commit_id
stringlengths
40
40
project
stringclasses
11 values
commit_message
stringlengths
3
3.04k
type
stringclasses
3 values
url
stringclasses
11 values
git_diff
stringlengths
555
691k
d479611dc5e989d79308e3b10a5e9b3952f730d0
elasticsearch
Tests: Use all found index files instead of- static list for static bwc tests--It is a pain to add a new static index, and then have to update the old-index test. This removes the need for the latter step.--closes -9854-
p
https://github.com/elastic/elasticsearch
diff --git a/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityTests.java b/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityTests.java index 3c9fcf85ae0f9..5a6d0e8eb92bc 100644 --- a/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityTests.java +++ b/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityTests.java @@ -36,9 +36,21 @@ import org.elasticsearch.test.index.merge.NoMergePolicyProvider; import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; import org.hamcrest.Matchers; +import org.junit.BeforeClass; +import java.io.IOException; import java.lang.reflect.Modifier; -import java.util.*; +import java.net.URL; +import java.nio.file.DirectoryStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.SortedSet; +import java.util.TreeSet; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -46,57 +58,20 @@ public class OldIndexBackwardsCompatibilityTests extends StaticIndexBackwardComp // TODO: test for proper exception on unsupported indexes (maybe via separate test?) // We have a 0.20.6.zip etc for this. - List<String> indexes = Arrays.asList( - "index-0.90.0.Beta1.zip", - "index-0.90.0.RC1.zip", - "index-0.90.0.RC2.zip", - "index-0.90.0.zip", - "index-0.90.1.zip", - "index-0.90.2.zip", - "index-0.90.3.zip", - "index-0.90.4.zip", - "index-0.90.5.zip", - "index-0.90.6.zip", - "index-0.90.7.zip", - "index-0.90.8.zip", - "index-0.90.9.zip", - "index-0.90.10.zip", - "index-0.90.11.zip", - "index-0.90.12.zip", - "index-0.90.13.zip", - "index-1.0.0.Beta1.zip", - "index-1.0.0.Beta2.zip", - "index-1.0.0.RC1.zip", - "index-1.0.0.RC2.zip", - "index-1.0.0.zip", - "index-1.0.1.zip", - "index-1.0.2.zip", - "index-1.0.3.zip", - "index-1.1.0.zip", - "index-1.1.1.zip", - "index-1.1.2.zip", - "index-1.2.0.zip", - "index-1.2.1.zip", - "index-1.2.2.zip", - "index-1.2.3.zip", - "index-1.2.4.zip", - "index-1.3.0.zip", - "index-1.3.1.zip", - "index-1.3.2.zip", - "index-1.3.3.zip", - "index-1.3.4.zip", - "index-1.3.5.zip", - "index-1.3.6.zip", - "index-1.3.7.zip", - "index-1.3.8.zip", - "index-1.3.9.zip", - "index-1.4.0.Beta1.zip", - "index-1.4.0.zip", - "index-1.4.1.zip", - "index-1.4.2.zip", - "index-1.4.3.zip", - "index-1.4.4.zip" - ); + static List<String> indexes; + + @BeforeClass + public static void initIndexes() throws Exception { + indexes = new ArrayList<>(); + URL dirUrl = OldIndexBackwardsCompatibilityTests.class.getResource("."); + Path dir = Paths.get(dirUrl.toURI()); + try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir, "index-*.zip")) { + for (Path path : stream) { + indexes.add(path.getFileName().toString()); + } + } + Collections.sort(indexes); + } public void testAllVersionsTested() throws Exception { SortedSet<String> expectedVersions = new TreeSet<>(); diff --git a/src/test/resources/org/elasticsearch/bwcompat/index-0.20.6.zip b/src/test/resources/org/elasticsearch/bwcompat/unsupported-0.20.6.zip similarity index 100% rename from src/test/resources/org/elasticsearch/bwcompat/index-0.20.6.zip rename to src/test/resources/org/elasticsearch/bwcompat/unsupported-0.20.6.zip
b02e572bdb781485ce87181f70b426454ab8abcb
ReactiveX-RxJava
tidying up AbstractOnSubscribe javadocs--
p
https://github.com/ReactiveX/RxJava
diff --git a/src/main/java/rx/observables/AbstractOnSubscribe.java b/src/main/java/rx/observables/AbstractOnSubscribe.java index 7cbb46b8a1..6dbe7ad44a 100644 --- a/src/main/java/rx/observables/AbstractOnSubscribe.java +++ b/src/main/java/rx/observables/AbstractOnSubscribe.java @@ -26,58 +26,65 @@ import rx.functions.*; /** - * Abstract base class for the OnSubscribe interface that helps building - * observable sources one onNext at a time and automatically supports - * unsubscription and backpressure. + * Abstract base class for the {@link OnSubscribe} interface that helps you build Observable sources one + * {@code onNext} at a time, and automatically supports unsubscription and backpressure. * <p> * <h1>Usage rules</h1> - * Implementors of the {@code next()} method + * When you implement the {@code next()} method, you * <ul> - * <li>should either + * <li>should either * <ul> - * <li>create the next value and signal it via {@code state.onNext()},</li> - * <li>signal a terminal condition via {@code state.onError()} or {@code state.onCompleted()} or</li> - * <li>signal a stop condition via {@code state.stop()} indicating no further values will be sent.</li> + * <li>create the next value and signal it via {@link SubscriptionState#onNext state.onNext()},</li> + * <li>signal a terminal condition via {@link SubscriptionState#onError state.onError()}, or + * {@link SubscriptionState#onCompleted state.onCompleted()}, or</li> + * <li>signal a stop condition via {@link SubscriptionState#stop state.stop()} indicating no further values + * will be sent.</li> * </ul> - * </li> - * <li>may + * </li> + * <li>may * <ul> - * <li>call {@code state.onNext()} and either {@code state.onError()} or {@code state.onCompleted()} together and + * <li>call {@link SubscriptionState#onNext state.onNext()} and either + * {@link SubscriptionState#onError state.onError()} or + * {@link SubscriptionState#onCompleted state.onCompleted()} together, and * <li>block or sleep. * </ul> - * </li> - * <li>should not + * </li> + * <li>should not * <ul> * <li>do nothing or do async work and not produce any event or request stopping. If neither of - * the methods are called, an {@code IllegalStateException} is forwarded to the {@code Subscriber} and - * the Observable is terminated;</li> - * <li>call the {@code state.onXYZ} methods more than once (yields {@code IllegalStateException}).</li> + * the methods are called, an {@link IllegalStateException} is forwarded to the {@code Subscriber} and + * the Observable is terminated;</li> + * <li>call the {@code state.on}<i>foo</i>() methods more than once (yields + * {@link IllegalStateException}).</li> * </ul> - * </li> + * </li> * </ul> * - * The {@code SubscriptionState} object features counters that may help implement a state machine: + * The {@link SubscriptionState} object features counters that may help implement a state machine: * <ul> - * <li>A call counter, accessible via {@code state.calls()} that tells how many times - * the {@code next()} was run (zero based). - * <li>A phase counter, accessible via {@code state.phase()} that helps track the current emission - * phase and may be used in a {@code switch ()} statement to implement the state machine. - * (It was named phase to avoid confusion with the per-subscriber state.)</li> - * <li>The current phase can be arbitrarily changed via {@code state.advancePhase()}, - * {@code state.advancePhaseBy(int)} and {@code state.phase(int)}.</li> - * + * <li>A call counter, accessible via {@link SubscriptionState#calls state.calls()} tells how many times the + * {@code next()} was run (zero based).</li> + * <li>You can use a phase counter, accessible via {@link SubscriptionState#phase state.phase}, that helps track + * the current emission phase, in a {@code switch()} statement to implement the state machine. (It is named + * {@code phase} to avoid confusion with the per-subscriber state.)</li> + * <li>You can arbitrarily change the current phase with + * {@link SubscriptionState#advancePhase state.advancePhase()}, + * {@link SubscriptionState#advancePhaseBy(int) state.advancedPhaseBy(int)} and + * {@link SubscriptionState#phase(int) state.phase(int)}.</li> * </ul> * <p> - * The implementors of the {@code AbstractOnSubscribe} may override the {@code onSubscribe} to perform - * special actions (such as registering {@code Subscription}s with {@code Subscriber.add()}) and return additional state for each subscriber subscribing. This custom state is - * accessible through the {@code state.state()} method. If the custom state requires some form of cleanup, - * the {@code onTerminated} method can be overridden. + * When you implement {@code AbstractOnSubscribe}, you may override {@link AbstractOnSubscribe#onSubscribe} to + * perform special actions (such as registering {@code Subscription}s with {@code Subscriber.add()}) and return + * additional state for each subscriber subscribing. You can access this custom state with the + * {@link SubscriptionState#state state.state()} method. If you need to do some cleanup, you can override the + * {@link #onTerminated} method. * <p> - * For convenience, lambda-accepting static factory methods, named {@code create()}, are available. Another - * convenience is the {@code toObservable} which turns an {@code AbstractOnSubscribe} instance into an {@code Observable} fluently. + * For convenience, a lambda-accepting static factory method, {@link #create}, is available. + * Another convenience is {@link #toObservable} which turns an {@code AbstractOnSubscribe} + * instance into an {@code Observable} fluently. * * <h1>Examples</h1> - * Note: the examples use the lambda-helper factories to avoid boilerplane. + * Note: these examples use the lambda-helper factories to avoid boilerplane. * * <h3>Implement: just</h3> * <pre><code> @@ -100,7 +107,7 @@ * } * }, u -> iterable.iterator()).subscribe(System.out::println); * </code></pre> - + * * <h3>Implement source that fails a number of times before succeeding</h3> * <pre><code> * AtomicInteger fails = new AtomicInteger(); @@ -136,37 +143,43 @@ * .timeout(1, TimeUnit.SECONDS) * .subscribe(System.out::println, Throwable::printStacktrace, () -> System.out.println("Done")); * </code></pre> - + * * @param <T> the value type * @param <S> the per-subscriber user-defined state type + * @since (if this graduates from Experimental/Beta to supported, replace this parenthetical with the release number) + * @Experimental */ @Experimental public abstract class AbstractOnSubscribe<T, S> implements OnSubscribe<T> { /** - * Called when a Subscriber subscribes and let's the implementor - * create a per-subscriber custom state. + * Called when a Subscriber subscribes and lets the implementor create a per-subscriber custom state. * <p> - * Override this method to have custom state per-subscriber. - * The default implementation returns {@code null}. + * Override this method to have custom state per-subscriber. The default implementation returns + * {@code null}. + * * @param subscriber the subscriber who is subscribing * @return the custom state */ protected S onSubscribe(Subscriber<? super T> subscriber) { return null; } + /** * Called after the terminal emission or when the downstream unsubscribes. * <p> - * This is called only once and it is made sure no onNext call runs concurrently with it. - * The default implementation does nothing. + * This is called only once and no {@code onNext} call will run concurrently with it. The default + * implementation does nothing. + * * @param state the user-provided state */ protected void onTerminated(S state) { } + /** - * Override this method and create an emission state-machine. - * @param state the per-subscriber subscription state. + * Override this method to create an emission state-machine. + * + * @param state the per-subscriber subscription state */ protected abstract void next(SubscriptionState<T, S> state); @@ -179,7 +192,8 @@ public final void call(final Subscriber<? super T> subscriber) { } /** - * Convenience method to create an observable from the implemented instance + * Convenience method to create an Observable from this implemented instance. + * * @return the created observable */ public final Observable<T> toObservable() { @@ -195,14 +209,15 @@ public Object call(Object t1) { }; /** - * Creates an AbstractOnSubscribe instance which calls the provided {@code next} action. + * Creates an {@code AbstractOnSubscribe} instance which calls the provided {@code next} action. * <p> - * This is a convenience method to help create AbstractOnSubscribe instances with the - * help of lambdas. + * This is a convenience method to help create {@code AbstractOnSubscribe} instances with the help of + * lambdas. + * * @param <T> the value type * @param <S> the per-subscriber user-defined state type * @param next the next action to call - * @return an AbstractOnSubscribe instance + * @return an {@code AbstractOnSubscribe} instance */ public static <T, S> AbstractOnSubscribe<T, S> create(Action1<SubscriptionState<T, S>> next) { @SuppressWarnings("unchecked") @@ -210,42 +225,49 @@ public static <T, S> AbstractOnSubscribe<T, S> create(Action1<SubscriptionState< (Func1<? super Subscriber<? super T>, ? extends S>)NULL_FUNC1; return create(next, nullFunc, Actions.empty()); } + /** - * Creates an AbstractOnSubscribe instance which creates a custom state with the - * {@code onSubscribe} function and calls the provided {@code next} action. + * Creates an {@code AbstractOnSubscribe} instance which creates a custom state with the {@code onSubscribe} + * function and calls the provided {@code next} action. * <p> - * This is a convenience method to help create AbstractOnSubscribe instances with the - * help of lambdas. + * This is a convenience method to help create {@code AbstractOnSubscribe} instances with the help of + * lambdas. + * * @param <T> the value type * @param <S> the per-subscriber user-defined state type * @param next the next action to call - * @param onSubscribe the function that returns a per-subscriber state to be used by next - * @return an AbstractOnSubscribe instance + * @param onSubscribe the function that returns a per-subscriber state to be used by {@code next} + * @return an {@code AbstractOnSubscribe} instance */ public static <T, S> AbstractOnSubscribe<T, S> create(Action1<SubscriptionState<T, S>> next, Func1<? super Subscriber<? super T>, ? extends S> onSubscribe) { return create(next, onSubscribe, Actions.empty()); } + /** - * Creates an AbstractOnSubscribe instance which creates a custom state with the - * {@code onSubscribe} function, calls the provided {@code next} action and - * calls the {@code onTerminated} action to release the state when its no longer needed. + * Creates an {@code AbstractOnSubscribe} instance which creates a custom state with the {@code onSubscribe} + * function, calls the provided {@code next} action and calls the {@code onTerminated} action to release the + * state when its no longer needed. * <p> - * This is a convenience method to help create AbstractOnSubscribe instances with the - * help of lambdas. + * This is a convenience method to help create {@code AbstractOnSubscribe} instances with the help of + * lambdas. + * * @param <T> the value type * @param <S> the per-subscriber user-defined state type * @param next the next action to call - * @param onSubscribe the function that returns a per-subscriber state to be used by next - * @param onTerminated the action to call to release the state created by the onSubscribe function - * @return an AbstractOnSubscribe instance + * @param onSubscribe the function that returns a per-subscriber state to be used by {@code next} + * @param onTerminated the action to call to release the state created by the {@code onSubscribe} function + * @return an {@code AbstractOnSubscribe} instance */ public static <T, S> AbstractOnSubscribe<T, S> create(Action1<SubscriptionState<T, S>> next, Func1<? super Subscriber<? super T>, ? extends S> onSubscribe, Action1<? super S> onTerminated) { return new LambdaOnSubscribe<T, S>(next, onSubscribe, onTerminated); } + /** - * An implementation that forwards the 3 main methods to functional callbacks. + * An implementation that forwards the three main methods ({@code next}, {@code onSubscribe}, and + * {@code onTermianted}) to functional callbacks. + * * @param <T> the value type * @param <S> the per-subscriber user-defined state type */ @@ -272,13 +294,14 @@ protected void next(SubscriptionState<T, S> state) { next.call(state); } } + /** * Manages unsubscription of the state. + * * @param <T> the value type * @param <S> the per-subscriber user-defined state type */ private static final class SubscriptionCompleter<T, S> extends AtomicBoolean implements Subscription { - /** */ private static final long serialVersionUID = 7993888274897325004L; private final SubscriptionState<T, S> state; private SubscriptionCompleter(SubscriptionState<T, S> state) { @@ -298,6 +321,7 @@ public void unsubscribe() { } /** * Contains the producer loop that reacts to downstream requests of work. + * * @param <T> the value type * @param <S> the per-subscriber user-defined state type */ @@ -325,9 +349,11 @@ public void request(long n) { } } } + /** * Executes the user-overridden next() method and performs state bookkeeping and * verification. + * * @return true if the outer loop may continue */ protected boolean doNext() { @@ -355,12 +381,15 @@ protected boolean doNext() { return false; } } + /** - * Represents a per-subscription state for the AbstractOnSubscribe operation. - * It supports phasing and counts the number of times a value was requested - * by the downstream. + * Represents a per-subscription state for the {@code AbstractOnSubscribe} operation. It supports phasing + * and counts the number of times a value was requested by the downstream. + * * @param <T> the value type * @param <S> the per-subscriber user-defined state type + * @since (if this graduates from Experimental/Beta to supported, replace this parenthetical with the release number) + * @Experimental */ public static final class SubscriptionState<T, S> { private final AbstractOnSubscribe<T, S> parent; @@ -382,51 +411,61 @@ private SubscriptionState(AbstractOnSubscribe<T, S> parent, Subscriber<? super T this.requestCount = new AtomicLong(); this.inUse = new AtomicInteger(1); } + /** - * @return the per-subscriber specific user-defined state created via AbstractOnSubscribe.onSubscribe. + * @return the per-subscriber specific user-defined state created via + * {@link AbstractOnSubscribe#onSubscribe} */ public S state() { return state; } + /** * @return the current phase value */ public int phase() { return phase; } + /** * Sets a new phase value. + * * @param newPhase */ public void phase(int newPhase) { phase = newPhase; } + /** * Advance the current phase by 1. */ public void advancePhase() { advancePhaseBy(1); } + /** * Advance the current phase by the given amount (can be negative). + * * @param amount the amount to advance the phase */ public void advancePhaseBy(int amount) { phase += amount; } + /** - * @return the number of times AbstractOnSubscribe.next was called so far, starting at 0 - * for the very first call. + * @return the number of times {@link AbstractOnSubscribe#next} was called so far, starting at 0 for the + * very first call */ public long calls() { return calls; } + /** - * Call this method to offer the next onNext value for the subscriber. - * <p> - * Throws IllegalStateException if there is a value already offered but not taken or - * a terminal state is reached. - * @param value the value to onNext + * Call this method to offer the next {@code onNext} value for the subscriber. + * + * @param value the value to {@code onNext} + * @throws IllegalStateException if there is a value already offered but not taken or a terminal state + * is reached */ public void onNext(T value) { if (hasOnNext) { @@ -438,13 +477,14 @@ public void onNext(T value) { theValue = value; hasOnNext = true; } + /** - * Call this method to send an onError to the subscriber and terminate - * all further activities. If there is an onNext even not taken, that - * value is emitted to the subscriber followed by this exception. - * <p> - * Throws IllegalStateException if the terminal state has been reached already. + * Call this method to send an {@code onError} to the subscriber and terminate all further activities. + * If there is a pending {@code onNext}, that value is emitted to the subscriber followed by this + * exception. + * * @param e the exception to deliver to the client + * @throws IllegalStateException if the terminal state has been reached already */ public void onError(Throwable e) { if (e == null) { @@ -456,13 +496,13 @@ public void onError(Throwable e) { theException = e; hasCompleted = true; } + /** - * Call this method to send an onCompleted to the subscriber and terminate - * all further activities. If there is an onNext even not taken, that - * value is emitted to the subscriber followed by this exception. - * <p> - * Throws IllegalStateException if the terminal state has been reached already. - * @param e the exception to deliver to the client + * Call this method to send an {@code onCompleted} to the subscriber and terminate all further + * activities. If there is a pending {@code onNext}, that value is emitted to the subscriber followed by + * this exception. + * + * @throws IllegalStateException if the terminal state has been reached already */ public void onCompleted() { if (hasCompleted) { @@ -470,15 +510,18 @@ public void onCompleted() { } hasCompleted = true; } + /** * Signals that there won't be any further events. */ public void stop() { stopRequested = true; } + /** - * Emits the onNextValue and/or the terminal value to the actual subscriber. - * @return true if the event was a terminal event + * Emits the {@code onNext} and/or the terminal value to the actual subscriber. + * + * @return {@code true} if the event was a terminal event */ protected boolean accept() { if (hasOnNext) { @@ -513,21 +556,28 @@ protected boolean accept() { } return false; } + /** - * Verify if the next() generated an event or requested a stop. + * Verify if the {@code next()} generated an event or requested a stop. + * * @return true if either event was generated or stop was requested */ protected boolean verify() { return hasOnNext || hasCompleted || stopRequested; } - /** @returns true if the next() requested a stop. */ + + /** @return true if the {@code next()} requested a stop */ protected boolean stopRequested() { return stopRequested; } + /** - * Request the state to be used by onNext or returns false if - * the downstream has unsubscribed. - * @return true if the state can be used exclusively + * Request the state to be used by {@code onNext} or returns {@code false} if the downstream has + * unsubscribed. + * + * @return {@code true} if the state can be used exclusively + * @throws IllegalStateEception + * @warn "throws" section incomplete */ protected boolean use() { int i = inUse.get(); @@ -539,9 +589,9 @@ protected boolean use() { } throw new IllegalStateException("This is not reentrant nor threadsafe!"); } + /** - * Release the state if there are no more interest in it and - * is not in use. + * Release the state if there are no more interest in it and it is not in use. */ protected void free() { int i = inUse.get(); @@ -552,9 +602,10 @@ protected void free() { parent.onTerminated(state); } } + /** - * Terminates the state immediately and calls - * onTerminated with the custom state. + * Terminates the state immediately and calls {@link AbstractOnSubscribe#onTerminated} with the custom + * state. */ protected void terminate() { for (;;) {
ad2358bba102bd4e9876028cf30341ec48aabe4f
ReactiveX-RxJava
GroupBy GroupedObservables should not re-subscribe- to parent sequence--https://github.com/Netflix/RxJava/issues/282--Refactored to maintain a single subscription that propagates events to the correct child GroupedObservables.-
c
https://github.com/ReactiveX/RxJava
diff --git a/rxjava-core/src/main/java/rx/operators/OperationGroupBy.java b/rxjava-core/src/main/java/rx/operators/OperationGroupBy.java index 1c2e6e969c..edd4ef7ae4 100644 --- a/rxjava-core/src/main/java/rx/operators/OperationGroupBy.java +++ b/rxjava-core/src/main/java/rx/operators/OperationGroupBy.java @@ -17,12 +17,15 @@ import static org.junit.Assert.*; -import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; -import java.util.List; +import java.util.Collection; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import org.junit.Test; @@ -30,6 +33,8 @@ import rx.Observer; import rx.Subscription; import rx.observables.GroupedObservable; +import rx.subscriptions.Subscriptions; +import rx.util.functions.Action1; import rx.util.functions.Func1; import rx.util.functions.Functions; @@ -55,7 +60,9 @@ public static <K, T> Func1<Observer<GroupedObservable<K, T>>, Subscription> grou } private static class GroupBy<K, V> implements Func1<Observer<GroupedObservable<K, V>>, Subscription> { + private final Observable<KeyValue<K, V>> source; + private final ConcurrentHashMap<K, GroupedSubject<K, V>> groupedObservables = new ConcurrentHashMap<K, GroupedSubject<K, V>>(); private GroupBy(Observable<KeyValue<K, V>> source) { this.source = source; @@ -63,61 +70,127 @@ private GroupBy(Observable<KeyValue<K, V>> source) { @Override public Subscription call(final Observer<GroupedObservable<K, V>> observer) { - return source.subscribe(new GroupByObserver(observer)); + return source.subscribe(new Observer<KeyValue<K, V>>() { + + @Override + public void onCompleted() { + // we need to propagate to all children I imagine ... we can't just leave all of those Observable/Observers hanging + for (GroupedSubject<K, V> o : groupedObservables.values()) { + o.onCompleted(); + } + // now the parent + observer.onCompleted(); + } + + @Override + public void onError(Exception e) { + // we need to propagate to all children I imagine ... we can't just leave all of those Observable/Observers hanging + for (GroupedSubject<K, V> o : groupedObservables.values()) { + o.onError(e); + } + // now the parent + observer.onError(e); + } + + @Override + public void onNext(KeyValue<K, V> value) { + GroupedSubject<K, V> gs = groupedObservables.get(value.key); + if (gs == null) { + /* + * Technically the source should be single-threaded so we shouldn't need to do this but I am + * programming defensively as most operators are so this can work with a concurrent sequence + * if it ends up receiving one. + */ + GroupedSubject<K, V> newGs = GroupedSubject.<K, V> create(value.key); + GroupedSubject<K, V> existing = groupedObservables.putIfAbsent(value.key, newGs); + if (existing == null) { + // we won so use the one we created + gs = newGs; + // since we won the creation we emit this new GroupedObservable + observer.onNext(gs); + } else { + // another thread beat us so use the existing one + gs = existing; + } + } + gs.onNext(value.value); + } + }); } + } - private class GroupByObserver implements Observer<KeyValue<K, V>> { - private final Observer<GroupedObservable<K, V>> underlying; + private static class GroupedSubject<K, T> extends GroupedObservable<K, T> implements Observer<T> { - private final ConcurrentHashMap<K, Boolean> keys = new ConcurrentHashMap<K, Boolean>(); + static <K, T> GroupedSubject<K, T> create(K key) { + @SuppressWarnings("unchecked") + final AtomicReference<Observer<T>> subscribedObserver = new AtomicReference<Observer<T>>(EMPTY_OBSERVER); - private GroupByObserver(Observer<GroupedObservable<K, V>> underlying) { - this.underlying = underlying; - } + return new GroupedSubject<K, T>(key, new Func1<Observer<T>, Subscription>() { - @Override - public void onCompleted() { - underlying.onCompleted(); - } + @Override + public Subscription call(Observer<T> observer) { + // register Observer + subscribedObserver.set(observer); - @Override - public void onError(Exception e) { - underlying.onError(e); - } + return new Subscription() { - @Override - public void onNext(final KeyValue<K, V> args) { - K key = args.key; - boolean newGroup = keys.putIfAbsent(key, true) == null; - if (newGroup) { - underlying.onNext(buildObservableFor(source, key)); + @SuppressWarnings("unchecked") + @Override + public void unsubscribe() { + // we remove the Observer so we stop emitting further events (they will be ignored if parent continues to send) + subscribedObserver.set(EMPTY_OBSERVER); + // I don't believe we need to worry about the parent here as it's a separate sequence that would + // be unsubscribed to directly if that needs to happen. + } + }; } - } + }, subscribedObserver); } - } - private static <K, R> GroupedObservable<K, R> buildObservableFor(Observable<KeyValue<K, R>> source, final K key) { - final Observable<R> observable = source.filter(new Func1<KeyValue<K, R>, Boolean>() { - @Override - public Boolean call(KeyValue<K, R> pair) { - return key.equals(pair.key); - } - }).map(new Func1<KeyValue<K, R>, R>() { - @Override - public R call(KeyValue<K, R> pair) { - return pair.value; - } - }); - return new GroupedObservable<K, R>(key, new Func1<Observer<R>, Subscription>() { + private final AtomicReference<Observer<T>> subscribedObserver; - @Override - public Subscription call(Observer<R> observer) { - return observable.subscribe(observer); - } + public GroupedSubject(K key, Func1<Observer<T>, Subscription> onSubscribe, AtomicReference<Observer<T>> subscribedObserver) { + super(key, onSubscribe); + this.subscribedObserver = subscribedObserver; + } + + @Override + public void onCompleted() { + subscribedObserver.get().onCompleted(); + } + + @Override + public void onError(Exception e) { + subscribedObserver.get().onError(e); + } + + @Override + public void onNext(T v) { + subscribedObserver.get().onNext(v); + } - }); } + @SuppressWarnings("rawtypes") + private static Observer EMPTY_OBSERVER = new Observer() { + + @Override + public void onCompleted() { + // do nothing + } + + @Override + public void onError(Exception e) { + // do nothing + } + + @Override + public void onNext(Object args) { + // do nothing + } + + }; + private static class KeyValue<K, V> { private final K key; private final V value; @@ -141,13 +214,12 @@ public void testGroupBy() { Observable<String> source = Observable.from("one", "two", "three", "four", "five", "six"); Observable<GroupedObservable<Integer, String>> grouped = Observable.create(groupBy(source, length)); - Map<Integer, List<String>> map = toMap(grouped); + Map<Integer, Collection<String>> map = toMap(grouped); assertEquals(3, map.size()); - assertEquals(Arrays.asList("one", "two", "six"), map.get(3)); - assertEquals(Arrays.asList("four", "five"), map.get(4)); - assertEquals(Arrays.asList("three"), map.get(5)); - + assertArrayEquals(Arrays.asList("one", "two", "six").toArray(), map.get(3).toArray()); + assertArrayEquals(Arrays.asList("four", "five").toArray(), map.get(4).toArray()); + assertArrayEquals(Arrays.asList("three").toArray(), map.get(5).toArray()); } @Test @@ -155,31 +227,133 @@ public void testEmpty() { Observable<String> source = Observable.from(); Observable<GroupedObservable<Integer, String>> grouped = Observable.create(groupBy(source, length)); - Map<Integer, List<String>> map = toMap(grouped); + Map<Integer, Collection<String>> map = toMap(grouped); assertTrue(map.isEmpty()); } - private static <K, V> Map<K, List<V>> toMap(Observable<GroupedObservable<K, V>> observable) { - Map<K, List<V>> result = new HashMap<K, List<V>>(); - for (GroupedObservable<K, V> g : observable.toBlockingObservable().toIterable()) { - K key = g.getKey(); + private static <K, V> Map<K, Collection<V>> toMap(Observable<GroupedObservable<K, V>> observable) { - for (V value : g.toBlockingObservable().toIterable()) { - List<V> values = result.get(key); - if (values == null) { - values = new ArrayList<V>(); - result.put(key, values); - } + final ConcurrentHashMap<K, Collection<V>> result = new ConcurrentHashMap<K, Collection<V>>(); - values.add(value); - } + observable.forEach(new Action1<GroupedObservable<K, V>>() { - } + @Override + public void call(final GroupedObservable<K, V> o) { + result.put(o.getKey(), new ConcurrentLinkedQueue<V>()); + o.subscribe(new Action1<V>() { + + @Override + public void call(V v) { + result.get(o.getKey()).add(v); + } + + }); + } + }); return result; } + /** + * Assert that only a single subscription to a stream occurs and that all events are received. + * + * @throws Exception + */ + @Test + public void testGroupedEventStream() throws Exception { + + final AtomicInteger eventCounter = new AtomicInteger(); + final AtomicInteger subscribeCounter = new AtomicInteger(); + final AtomicInteger groupCounter = new AtomicInteger(); + final CountDownLatch latch = new CountDownLatch(1); + final int count = 100; + final int groupCount = 2; + + Observable<Event> es = Observable.create(new Func1<Observer<Event>, Subscription>() { + + @Override + public Subscription call(final Observer<Event> observer) { + System.out.println("*** Subscribing to EventStream ***"); + subscribeCounter.incrementAndGet(); + new Thread(new Runnable() { + + @Override + public void run() { + for (int i = 0; i < count; i++) { + Event e = new Event(); + e.source = i % groupCount; + e.message = "Event-" + i; + observer.onNext(e); + } + observer.onCompleted(); + } + + }).start(); + return Subscriptions.empty(); + } + + }); + + es.groupBy(new Func1<Event, Integer>() { + + @Override + public Integer call(Event e) { + return e.source; + } + }).mapMany(new Func1<GroupedObservable<Integer, Event>, Observable<String>>() { + + @Override + public Observable<String> call(GroupedObservable<Integer, Event> eventGroupedObservable) { + System.out.println("GroupedObservable Key: " + eventGroupedObservable.getKey()); + groupCounter.incrementAndGet(); + + return eventGroupedObservable.map(new Func1<Event, String>() { + + @Override + public String call(Event event) { + return "Source: " + event.source + " Message: " + event.message; + } + }); + + }; + }).subscribe(new Observer<String>() { + + @Override + public void onCompleted() { + latch.countDown(); + } + + @Override + public void onError(Exception e) { + e.printStackTrace(); + latch.countDown(); + } + + @Override + public void onNext(String outputMessage) { + System.out.println(outputMessage); + eventCounter.incrementAndGet(); + } + }); + + latch.await(5000, TimeUnit.MILLISECONDS); + assertEquals(1, subscribeCounter.get()); + assertEquals(groupCount, groupCounter.get()); + assertEquals(count, eventCounter.get()); + + } + + private static class Event { + int source; + String message; + + @Override + public String toString() { + return "Event => source: " + source + " message: " + message; + } + } + } }
59d1720a5d1020d1167bea1a31b9616a685398a2
orientdb
Fixed bug on oidentifable but not odocument- fetching--
c
https://github.com/orientechnologies/orientdb
diff --git a/core/src/main/java/com/orientechnologies/orient/core/fetch/OFetchHelper.java b/core/src/main/java/com/orientechnologies/orient/core/fetch/OFetchHelper.java index d2d01458406..ecb2a3e5189 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/fetch/OFetchHelper.java +++ b/core/src/main/java/com/orientechnologies/orient/core/fetch/OFetchHelper.java @@ -298,9 +298,9 @@ private static void processRecord(final ORecordSchemaAware<?> record, final Obje iContext.onAfterStandardField(fieldValue, fieldName, iUserObject); } else { try { - if (!(fieldValue instanceof ODocument - && (((ODocument) fieldValue).isEmbedded() || !((ODocument) fieldValue).getIdentity().isValid()) && iContext - .fetchEmbeddedDocuments()) && !iFetchPlan.containsKey(fieldPath) && depthLevel > -1 && iCurrentLevel > depthLevel) { + if (!(!(fieldValue instanceof ODocument) || (((ODocument) fieldValue).isEmbedded() || !((ODocument) fieldValue) + .getIdentity().isValid()) && iContext.fetchEmbeddedDocuments()) + && !iFetchPlan.containsKey(fieldPath) && depthLevel > -1 && iCurrentLevel > depthLevel) { // MAX DEPTH REACHED: STOP TO FETCH THIS FIELD continue; } @@ -329,13 +329,13 @@ private static void fetch(final ORecordSchemaAware<?> iRootRecord, final Object } if (fieldValue == null) { iListener.processStandardField(iRootRecord, null, fieldName, iContext); - } else if (fieldValue instanceof ODocument) { - if (((ODocument) fieldValue).getClassName() != null + } else if (fieldValue instanceof OIdentifiable) { + if (fieldValue instanceof ODocument && ((ODocument) fieldValue).getClassName() != null && ((ODocument) fieldValue).getClassName().equals(OMVRBTreeRIDSet.OCLASS_NAME)) { fetchCollection(iRootRecord, iUserObject, iFetchPlan, fieldValue, fieldName, currentLevel, iLevelFromRoot, fieldDepthLevel, parsedRecords, iFieldPathFromRoot, iListener, iContext); } else { - fetchDocument(iRootRecord, iUserObject, iFetchPlan, (ODocument) fieldValue, fieldName, currentLevel, iLevelFromRoot, + fetchDocument(iRootRecord, iUserObject, iFetchPlan, (OIdentifiable) fieldValue, fieldName, currentLevel, iLevelFromRoot, fieldDepthLevel, parsedRecords, iFieldPathFromRoot, iListener, iContext); } } else if (fieldValue instanceof Collection<?>) { @@ -443,7 +443,7 @@ private static void fetchDocument(final ORecordSchemaAware<?> iRootRecord, final final Map<String, Integer> iFetchPlan, final OIdentifiable fieldValue, final String fieldName, final int iCurrentLevel, final int iLevelFromRoot, final int iFieldDepthLevel, final Map<ORID, Integer> parsedRecords, final String iFieldPathFromRoot, final OFetchListener iListener, final OFetchContext iContext) throws IOException { - final Integer fieldDepthLevel = parsedRecords.get(((ODocument) fieldValue).getIdentity()); + final Integer fieldDepthLevel = parsedRecords.get(fieldValue.getIdentity()); if (!fieldValue.getIdentity().isValid() || (fieldDepthLevel != null && fieldDepthLevel.intValue() == iLevelFromRoot)) { removeParsedFromMap(parsedRecords, fieldValue); final ODocument linked = (ODocument) fieldValue; diff --git a/core/src/main/java/com/orientechnologies/orient/core/fetch/object/OObjectFetchListener.java b/core/src/main/java/com/orientechnologies/orient/core/fetch/object/OObjectFetchListener.java index 4fe65d8485b..11738dfb141 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/fetch/object/OObjectFetchListener.java +++ b/core/src/main/java/com/orientechnologies/orient/core/fetch/object/OObjectFetchListener.java @@ -45,8 +45,8 @@ public void processStandardCollectionValue(Object iFieldValue, OFetchContext iCo public void parseLinked(final ORecordSchemaAware<?> iRootRecord, final OIdentifiable iLinked, final Object iUserObject, final String iFieldName, final OFetchContext iContext) throws OFetchException { final Class<?> type = OObjectSerializerHelper.getFieldType(iUserObject, iFieldName); - if (Set.class.isAssignableFrom(type) || Collection.class.isAssignableFrom(type) || Map.class.isAssignableFrom(type) - || type.isArray()) { + if (type == null || Set.class.isAssignableFrom(type) || Collection.class.isAssignableFrom(type) + || Map.class.isAssignableFrom(type) || type.isArray()) { return; } else if (iLinked instanceof ORecordSchemaAware && !(((OObjectFetchContext) iContext).getObj2RecHandler().existsUserObjectByRID(iLinked.getIdentity()))) {
af1d3891570f23ef391c4b7aed3f6dedbcf9277b
hadoop
YARN-2519. Credential Provider related unit tests- failed on Windows. Contributed by Xiaoyu Yao.--(cherry picked from commit cbea1b10efd871d04c648af18449dc724685db74)-
c
https://github.com/apache/hadoop
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index cf9fe6e6f793c..83274b65b7236 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -264,6 +264,9 @@ Release 2.6.0 - UNRELEASED YARN-2431. NM restart: cgroup is not removed for reacquired containers (jlowe) + YARN-2519. Credential Provider related unit tests failed on Windows. + (Xiaoyu Yao via cnauroth) + Release 2.5.1 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/util/TestWebAppUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/util/TestWebAppUtils.java index 18600fdea6886..2bd91b4ac6d37 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/util/TestWebAppUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/util/TestWebAppUtils.java @@ -24,6 +24,7 @@ import java.io.File; import java.io.IOException; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.http.HttpServer2.Builder; import org.apache.hadoop.security.alias.CredentialProvider; @@ -74,8 +75,9 @@ protected Configuration provisionCredentialsForSSL() throws IOException, "target/test-dir")); Configuration conf = new Configuration(); + final Path jksPath = new Path(testDir.toString(), "test.jks"); final String ourUrl = - JavaKeyStoreProvider.SCHEME_NAME + "://file/" + testDir + "/test.jks"; + JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri(); File file = new File(testDir, "test.jks"); file.delete();
0e8a141437a6645deb85a730bc9adfb757bb27b8
hbase
HBASE-10010 eliminate the put latency spike on- the new log file beginning--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1549384 13f79535-47bb-0310-9956-ffa450edef68-
c
https://github.com/apache/hbase
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java index 7c32ce611f5c..1c69d0ff890c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java @@ -532,6 +532,13 @@ OutputStream getOutputStream() { FSDataOutputStream nextHdfsOut = null; if (nextWriter instanceof ProtobufLogWriter) { nextHdfsOut = ((ProtobufLogWriter)nextWriter).getStream(); + // perform the costly sync before we get the lock to roll writers. + try { + nextWriter.sync(); + } catch (IOException e) { + // optimization failed, no need to abort here. + LOG.warn("pre-sync failed", e); + } } Path oldFile = null; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationHLogReaderManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationHLogReaderManager.java index 705b3ae9ab64..dfc89bc32df5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationHLogReaderManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationHLogReaderManager.java @@ -141,12 +141,6 @@ public void test() throws Exception { // Grab the path that was generated when the log rolled as part of its creation Path path = pathWatcher.currentPath; - // open it, it's empty so it fails - try { - logManager.openReader(path); - fail("Shouldn't be able to open an empty file"); - } catch (EOFException ex) {} - assertEquals(0, logManager.getPosition()); appendToLog(); @@ -184,12 +178,6 @@ public void test() throws Exception { path = pathWatcher.currentPath; - // Finally we have a new empty log, which should still give us EOFs - try { - logManager.openReader(path); - fail(); - } catch (EOFException ex) {} - for (int i = 0; i < nbRows; i++) { appendToLogPlus(walEditKVs); } log.rollWriter(); logManager.openReader(path);
c9d4924dcf129512dadd22dcd6fe0046cbcded43
drools
BZ-1039639 - GRE doesn't recognize MVEL inline- lists when opening rule--
c
https://github.com/kiegroup/drools
diff --git a/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceUnmarshallingTest.java b/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceUnmarshallingTest.java index 05b7b836bf1..9da1dd93e78 100644 --- a/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceUnmarshallingTest.java +++ b/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceUnmarshallingTest.java @@ -26,7 +26,9 @@ import org.drools.workbench.models.datamodel.oracle.ModelField; import org.drools.workbench.models.datamodel.oracle.PackageDataModelOracle; import org.drools.workbench.models.datamodel.rule.ActionCallMethod; +import org.drools.workbench.models.datamodel.rule.ActionFieldValue; import org.drools.workbench.models.datamodel.rule.ActionGlobalCollectionAdd; +import org.drools.workbench.models.datamodel.rule.ActionSetField; import org.drools.workbench.models.datamodel.rule.BaseSingleFieldConstraint; import org.drools.workbench.models.datamodel.rule.CEPWindow; import org.drools.workbench.models.datamodel.rule.CompositeFactPattern; @@ -36,6 +38,8 @@ import org.drools.workbench.models.datamodel.rule.ExpressionVariable; import org.drools.workbench.models.datamodel.rule.FactPattern; import org.drools.workbench.models.datamodel.rule.FieldConstraint; +import org.drools.workbench.models.datamodel.rule.FieldNature; +import org.drools.workbench.models.datamodel.rule.FieldNatureType; import org.drools.workbench.models.datamodel.rule.FreeFormLine; import org.drools.workbench.models.datamodel.rule.IPattern; import org.drools.workbench.models.datamodel.rule.RuleModel; @@ -1982,6 +1986,43 @@ public void testExpressionWithListSize() throws Exception { assertEquals(1,constraint.getConstraintValueType()); } + @Test + @Ignore("https://bugzilla.redhat.com/show_bug.cgi?id=1039639 - GRE doesn't recognize MVEL inline lists when opening rule") + public void testMVELInlineList() throws Exception { + String drl = "" + + "rule \"Borked\"\n" + + " dialect \"mvel\"\n" + + " when\n" + + " c : Company( )\n" + + " then\n" + + " c.setEmps( [\"item1\", \"item2\"] );\n" + + "end"; + + addModelField("Company", + "emps", + "java.util.List", + "List"); + + RuleModel m = RuleModelDRLPersistenceImpl.getInstance().unmarshal( drl, + dmo ); + assertEquals( 1, + m.rhs.length ); + assertTrue( m.rhs[0] instanceof ActionSetField); + ActionSetField actionSetField = (ActionSetField) m.rhs[0]; + + assertEquals("c", actionSetField.getVariable()); + + assertEquals(1, actionSetField.getFieldValues().length); + + ActionFieldValue actionFieldValue = actionSetField.getFieldValues()[0]; + + assertEquals("[\"item1\", \"item2\"]",actionFieldValue.getValue()); + assertEquals("emps",actionFieldValue.getField()); + assertEquals(FieldNatureType.TYPE_FORMULA, actionFieldValue.getNature()); + assertEquals("Collection",actionFieldValue.getType()); + + } + private void assertEqualsIgnoreWhitespace( final String expected, final String actual ) { final String cleanExpected = expected.replaceAll( "\\s+",
c335b3a1762080f8cb5b2694985d93f17978474e
elasticsearch
Fix- AckClusterUpdateSettingsIT.testClusterUpdateSettingsAcknowledgement() after- changes in -14259--Closes -14278-
c
https://github.com/elastic/elasticsearch
diff --git a/core/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java index 12147ffb783b5..81de8b1a43c22 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java @@ -29,6 +29,7 @@ import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoverySettings; @@ -50,6 +51,7 @@ protected Settings nodeSettings(int nodeOrdinal) { //make sure that enough concurrent reroutes can happen at the same time //we have a minimum of 2 nodes, and a maximum of 10 shards, thus 5 should be enough .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, 5) + .put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE, 10) .build(); }
34805565ec1a5002515aa1ae544956ad5b8182fa
drools
[DROOLS-389] Improve support for @Traitable POJOs- and @Trait interfaces--
a
https://github.com/kiegroup/drools
diff --git a/drools-compiler/src/test/java/org/drools/compiler/factmodel/traits/TraitTest.java b/drools-compiler/src/test/java/org/drools/compiler/factmodel/traits/TraitTest.java index 0225efce84f..658bbfc1481 100644 --- a/drools-compiler/src/test/java/org/drools/compiler/factmodel/traits/TraitTest.java +++ b/drools-compiler/src/test/java/org/drools/compiler/factmodel/traits/TraitTest.java @@ -26,6 +26,7 @@ import org.drools.core.factmodel.traits.LogicalTypeInconsistencyException; import org.drools.core.factmodel.traits.MapWrapper; import org.drools.core.factmodel.traits.Thing; +import org.drools.core.factmodel.traits.Trait; import org.drools.core.factmodel.traits.TraitFactory; import org.drools.core.factmodel.traits.TraitProxy; import org.drools.core.factmodel.traits.TraitRegistry; @@ -4909,4 +4910,72 @@ public void testTraitImplicitInsertionExceptionOnNonTraitable() throws Interrupt } + + @Trait + public static interface SomeTrait<K> extends Thing<K> { + public String getFoo(); + public void setFoo( String foo ); + } + + @Test + public void testTraitLegacyTraitableWithLegacyTrait() { + final String s1 = "package org.drools.compiler.factmodel.traits;\n" + + "import " + TraitTest.class.getName() + ".SomeTrait; \n" + + "import org.drools.core.factmodel.traits.*; \n" + + "global java.util.List list;\n" + + "" + + "rule \"Don ItemStyle\"\n" + + " when\n" + + " then\n" + + " don( new StudentImpl(), SomeTrait.class );\n" + + "end\n"; + + final KnowledgeBase kbase = getKieBaseFromString(s1); + TraitFactory.setMode( mode, kbase ); + ArrayList list = new ArrayList(); + + StatefulKnowledgeSession knowledgeSession = kbase.newStatefulKnowledgeSession(); + knowledgeSession.setGlobal( "list", list ); + + knowledgeSession.fireAllRules(); + + assertEquals( 2, knowledgeSession.getObjects().size() ); + } + + @Test + public void testIsALegacyTrait() { + final String s1 = "package org.drools.compiler.factmodel.traits;\n" + + "import " + TraitTest.class.getName() + ".SomeTrait; \n" + + "import org.drools.core.factmodel.traits.*; \n" + + "global java.util.List list;\n" + + "" + + "declare trait IStudent end \n" + + "" + + "rule \"Don ItemStyle\"\n" + + " when\n" + + " then\n" + + " insert( new StudentImpl() );\n" + + " don( new Entity(), IStudent.class );\n" + + "end\n" + + "" + + "rule Check " + + " when " + + " $s : StudentImpl() " + + " $e : Entity( this isA $s ) " + + " then " + + " list.add( 1 ); " + + " end "; + + final KnowledgeBase kbase = getKieBaseFromString(s1); + TraitFactory.setMode( mode, kbase ); + ArrayList list = new ArrayList(); + + StatefulKnowledgeSession knowledgeSession = kbase.newStatefulKnowledgeSession(); + knowledgeSession.setGlobal( "list", list ); + + knowledgeSession.fireAllRules(); + + assertEquals( Arrays.asList( 1 ), list ); + } + } diff --git a/drools-core/src/main/java/org/drools/core/base/evaluators/IsAEvaluatorDefinition.java b/drools-core/src/main/java/org/drools/core/base/evaluators/IsAEvaluatorDefinition.java index afbd9839541..15b0123aec9 100644 --- a/drools-core/src/main/java/org/drools/core/base/evaluators/IsAEvaluatorDefinition.java +++ b/drools-core/src/main/java/org/drools/core/base/evaluators/IsAEvaluatorDefinition.java @@ -334,6 +334,10 @@ private boolean compare( Object source, Object target, InternalWorkingMemory wor targetTraits = x.getCode( target ); } else if ( target instanceof Thing ) { targetTraits = ((TraitableBean) ((Thing) target).getCore()).getCurrentTypeCode(); + if ( targetTraits == null && target instanceof TraitType ) { + CodedHierarchy x = ((ReteooRuleBase) workingMemory.getRuleBase()).getConfiguration().getComponentFactory().getTraitRegistry().getHierarchy(); + targetTraits = x.getCode( ((TraitType)target).getTraitName() ); + } } else if ( target instanceof TraitableBean ) { targetTraits = ((TraitableBean) target).getCurrentTypeCode(); } else { diff --git a/drools-core/src/main/java/org/drools/core/factmodel/traits/TraitFactory.java b/drools-core/src/main/java/org/drools/core/factmodel/traits/TraitFactory.java index ad9cb783a35..5edbd2b7d52 100644 --- a/drools-core/src/main/java/org/drools/core/factmodel/traits/TraitFactory.java +++ b/drools-core/src/main/java/org/drools/core/factmodel/traits/TraitFactory.java @@ -226,10 +226,32 @@ private Class<T> buildProxyClass( String key, K core, Class<?> trait ) { ClassDefinition cdef = ruleBase.getTraitRegistry().getTraitable( coreKlass.getName() ); if ( tdef == null ) { - throw new RuntimeDroolsException( "Unable to find Trait definition for class " + trait.getName() + ". It should have been DECLARED as a trait" ); - } + if ( trait.getAnnotation( Trait.class ) != null ) { + try { + if ( Thing.class.isAssignableFrom( trait ) ) { + tdef = buildClassDefinition( trait, null ); + } else { + throw new RuntimeDroolsException( "Unable to create definition for class " + trait + + " : trait interfaces should extend " + Thing.class.getName() + " or be DECLARED as traits explicitly" ); + } + } catch ( IOException e ) { + throw new RuntimeDroolsException( "Unable to create definition for class " + trait + " : " + e.getMessage() ); + } + ruleBase.getTraitRegistry().addTrait( tdef ); + } else { + throw new RuntimeDroolsException( "Unable to find Trait definition for class " + trait.getName() + ". It should have been DECLARED as a trait" ); + } } if ( cdef == null ) { - throw new RuntimeDroolsException( "Unable to find Core class definition for class " + coreKlass.getName() + ". It should have been DECLARED as a trait" ); + if ( core.getClass().getAnnotation( Traitable.class ) != null ) { + try { + cdef = buildClassDefinition( core.getClass(), core.getClass() ); + } catch ( IOException e ) { + throw new RuntimeDroolsException( "Unable to create definition for class " + coreKlass.getName() + " : " + e.getMessage() ); + } + ruleBase.getTraitRegistry().addTraitable( cdef ); + } else { + throw new RuntimeDroolsException( "Unable to find Core class definition for class " + coreKlass.getName() + ". It should have been DECLARED as a trait" ); + } } String proxyName = getProxyName( tdef, cdef ); @@ -319,7 +341,7 @@ public synchronized CoreWrapper<K> getCoreWrapper( Class<K> coreKlazz , ClassDef } try { - ruleBase.getTraitRegistry().addTraitable( buildWrapperClassDefinition( coreKlazz, wrapperClass ) ); + ruleBase.getTraitRegistry().addTraitable( buildClassDefinition( coreKlazz, wrapperClass ) ); return wrapperClass != null ? wrapperClass.newInstance() : null; } catch (InstantiationException e) { return null; @@ -331,8 +353,8 @@ public synchronized CoreWrapper<K> getCoreWrapper( Class<K> coreKlazz , ClassDef } - private ClassDefinition buildWrapperClassDefinition(Class<K> coreKlazz, Class<? extends CoreWrapper<K>> wrapperClass) throws IOException { - ClassFieldInspector inspector = new ClassFieldInspector( coreKlazz ); + private ClassDefinition buildClassDefinition(Class<?> klazz, Class<?> wrapperClass) throws IOException { + ClassFieldInspector inspector = new ClassFieldInspector( klazz ); Package traitPackage = ruleBase.getPackagesMap().get( pack ); if ( traitPackage == null ) { @@ -342,14 +364,26 @@ private ClassDefinition buildWrapperClassDefinition(Class<K> coreKlazz, Class<? } ClassFieldAccessorStore store = traitPackage.getClassFieldAccessorStore(); - String className = coreKlazz.getName() + "Wrapper"; - String superClass = coreKlazz.getName(); - String[] interfaces = new String[] {CoreWrapper.class.getName()}; - ClassDefinition def = new ClassDefinition( className, superClass, interfaces ); - Traitable tbl = wrapperClass.getAnnotation( Traitable.class ); - def.setTraitable( true, tbl != null && tbl.logical() ); - def.setDefinedClass( wrapperClass ); + ClassDefinition def; + if ( ! klazz.isInterface() ) { + String className = wrapperClass.getName(); + String superClass = wrapperClass != klazz ? klazz.getName() : klazz.getSuperclass().getName(); + String[] interfaces = new String[] {CoreWrapper.class.getName()}; + def = new ClassDefinition( className, superClass, interfaces ); + def.setDefinedClass( wrapperClass ); + Traitable tbl = wrapperClass.getAnnotation( Traitable.class ); + def.setTraitable( true, tbl != null && tbl.logical() ); + } else { + String className = klazz.getName(); + String superClass = Object.class.getName(); + String[] interfaces = new String[ klazz.getInterfaces().length ]; + for ( int j = 0; j < klazz.getInterfaces().length; j++ ) { + interfaces[ j ] = klazz.getInterfaces()[ j ].getName(); + } + def = new ClassDefinition( className, superClass, interfaces ); + def.setDefinedClass( klazz ); + } Map<String, Field> fields = inspector.getFieldTypesField(); for ( Field f : fields.values() ) { if ( f != null ) {
2a590b78e4c9e814faa61457a04ec4f5f2c9a176
orientdb
Distributed: fixed issue -2008 on deploying- database--
c
https://github.com/orientechnologies/orientdb
diff --git a/distributed/src/main/java/com/orientechnologies/orient/server/hazelcast/OHazelcastDistributedDatabase.java b/distributed/src/main/java/com/orientechnologies/orient/server/hazelcast/OHazelcastDistributedDatabase.java index 693ea430372..756b5bf021b 100644 --- a/distributed/src/main/java/com/orientechnologies/orient/server/hazelcast/OHazelcastDistributedDatabase.java +++ b/distributed/src/main/java/com/orientechnologies/orient/server/hazelcast/OHazelcastDistributedDatabase.java @@ -23,7 +23,6 @@ import com.orientechnologies.orient.core.db.OScenarioThreadLocal; import com.orientechnologies.orient.core.db.OScenarioThreadLocal.RUN_MODE; import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; -import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.server.config.OServerUserConfiguration; import com.orientechnologies.orient.server.distributed.*; import com.orientechnologies.orient.server.distributed.ODistributedRequest.EXECUTION_MODE; @@ -533,9 +532,7 @@ protected void checkLocalNodeInConfiguration() { } if (dirty) { - final ODocument doc = cfg.serialize(); - manager.updateCachedDatabaseConfiguration(databaseName, doc); - manager.getConfigurationMap().put(OHazelcastPlugin.CONFIG_DATABASE_PREFIX + databaseName, doc); + manager.updateCachedDatabaseConfiguration(databaseName, cfg.serialize(), true, true); } } @@ -573,9 +570,7 @@ protected void removeNodeInConfiguration(final String iNode, final boolean iForc } if (dirty) { - final ODocument doc = cfg.serialize(); - manager.updateCachedDatabaseConfiguration(databaseName, doc); - manager.getConfigurationMap().put(OHazelcastPlugin.CONFIG_DATABASE_PREFIX + databaseName, doc); + manager.updateCachedDatabaseConfiguration(databaseName, cfg.serialize(), true, true); } } diff --git a/distributed/src/main/java/com/orientechnologies/orient/server/hazelcast/OHazelcastPlugin.java b/distributed/src/main/java/com/orientechnologies/orient/server/hazelcast/OHazelcastPlugin.java index e6d399d7a38..39964738933 100755 --- a/distributed/src/main/java/com/orientechnologies/orient/server/hazelcast/OHazelcastPlugin.java +++ b/distributed/src/main/java/com/orientechnologies/orient/server/hazelcast/OHazelcastPlugin.java @@ -470,7 +470,7 @@ public void entryAdded(EntryEvent<String, Object> iEvent) { } } else if (key.startsWith(CONFIG_DATABASE_PREFIX)) { - saveDatabaseConfiguration(key.substring(CONFIG_DATABASE_PREFIX.length()), (ODocument) iEvent.getValue()); + updateCachedDatabaseConfiguration(key.substring(CONFIG_DATABASE_PREFIX.length()), (ODocument) iEvent.getValue(), true, false); OClientConnectionManager.instance().pushDistribCfg2Clients(getClusterConfiguration()); } } @@ -493,7 +493,7 @@ public void entryUpdated(EntryEvent<String, Object> iEvent) { getNodeName(iEvent.getMember())); installNewDatabases(false); - saveDatabaseConfiguration(dbName, (ODocument) iEvent.getValue()); + updateCachedDatabaseConfiguration(dbName, (ODocument) iEvent.getValue(), true, false); OClientConnectionManager.instance().pushDistribCfg2Clients(getClusterConfiguration()); } } @@ -671,15 +671,20 @@ protected void installNewDatabases(final boolean iStartup) { ODistributedDatabaseChunk chunk = (ODistributedDatabaseChunk) value; final String fileName = System.getProperty("java.io.tmpdir") + "/orientdb/install_" + databaseName + ".zip"; + + ODistributedServerLog.warn(this, getLocalNodeName(), r.getKey(), DIRECTION.NONE, + "copying remote database '%s' to: %s", databaseName, fileName); + final File file = new File(fileName); if (file.exists()) file.delete(); + FileOutputStream out = null; try { - final FileOutputStream out = new FileOutputStream(fileName, false); + out = new FileOutputStream(fileName, false); - out.write(chunk.buffer); - for (int chunkNum = 1; !chunk.last; chunkNum++) { + long fileSize = writeDatabaseChunk(1, chunk, out); + for (int chunkNum = 2; !chunk.last; chunkNum++) { distrDatabase.setWaitForTaskType(OCopyDatabaseChunkTask.class); final Map<String, Object> result = (Map<String, Object>) sendRequest(databaseName, null, @@ -691,14 +696,25 @@ protected void installNewDatabases(final boolean iStartup) { continue; else { chunk = (ODistributedDatabaseChunk) res.getValue(); - out.write(chunk.buffer); + fileSize += writeDatabaseChunk(chunkNum, chunk, out); } } } + ODistributedServerLog.warn(this, getLocalNodeName(), null, DIRECTION.NONE, + "database copied correctly, size=%s", OFileUtils.getSizeAsString(fileSize)); + } catch (Exception e) { ODistributedServerLog.error(this, getLocalNodeName(), null, DIRECTION.NONE, "error on transferring database '%s' to '%s'", e, databaseName, fileName); + } finally { + try { + if (out != null) { + out.flush(); + out.close(); + } + } catch (IOException e) { + } } installDatabase(distrDatabase, databaseName, dbPath, r.getKey(), fileName); @@ -720,29 +736,51 @@ protected void installNewDatabases(final boolean iStartup) { } } - private void installDatabase(final OHazelcastDistributedDatabase distrDatabase, final String databaseName, final String dbPath, + public void updateCachedDatabaseConfiguration(String iDatabaseName, ODocument cfg, boolean iSaveToDisk, boolean iDeployToCluster) { + super.updateCachedDatabaseConfiguration(iDatabaseName, cfg, iSaveToDisk); + + if (iDeployToCluster) + // DEPLOY THE CONFIGURATION TO THE CLUSTER + getConfigurationMap().put(OHazelcastPlugin.CONFIG_DATABASE_PREFIX + iDatabaseName, cfg); + } + + protected long writeDatabaseChunk(final int iChunkId, final ODistributedDatabaseChunk chunk, final FileOutputStream out) + throws IOException { + + ODistributedServerLog.warn(this, getLocalNodeName(), null, DIRECTION.NONE, "- writing chunk #%d offset=%d size=%s", iChunkId, + chunk.offset, OFileUtils.getSizeAsString(chunk.buffer.length)); + out.write(chunk.buffer); + + return chunk.buffer.length; + } + + protected void installDatabase(final OHazelcastDistributedDatabase distrDatabase, final String databaseName, final String dbPath, final String iNode, final String iDatabaseCompressedFile) { - ODistributedServerLog.warn(this, getLocalNodeName(), iNode, DIRECTION.IN, "installing database %s in %s...", databaseName, + ODistributedServerLog.warn(this, getLocalNodeName(), iNode, DIRECTION.IN, "installing database '%s' to: %s...", databaseName, dbPath); try { - final FileInputStream in = new FileInputStream(iDatabaseCompressedFile); + File f = new File(iDatabaseCompressedFile); new File(dbPath).mkdirs(); final ODatabaseDocumentTx db = new ODatabaseDocumentTx("local:" + dbPath); - db.restore(in, null, null); - in.close(); + final FileInputStream in = new FileInputStream(f); + try { + db.restore(in, null, null); + } finally { + in.close(); + } db.close(); Orient.instance().unregisterStorageByName(db.getName()); - ODistributedServerLog.warn(this, getLocalNodeName(), null, DIRECTION.NONE, - "installed database %s in %s, setting it online...", databaseName, dbPath); + ODistributedServerLog.warn(this, getLocalNodeName(), null, DIRECTION.NONE, "installed database '%s', setting it online...", + databaseName); distrDatabase.setOnline(); - ODistributedServerLog.warn(this, getLocalNodeName(), null, DIRECTION.NONE, "database %s is online", databaseName); + ODistributedServerLog.warn(this, getLocalNodeName(), null, DIRECTION.NONE, "database '%s' is online", databaseName); } catch (IOException e) { ODistributedServerLog.warn(this, getLocalNodeName(), null, DIRECTION.IN, "error on copying database '%s' on local server", e, @@ -759,7 +797,7 @@ protected ODocument loadDatabaseConfiguration(final String iDatabaseName, final ODistributedServerLog.info(this, getLocalNodeName(), null, DIRECTION.NONE, "loaded database configuration from active cluster"); - updateCachedDatabaseConfiguration(iDatabaseName, cfg); + updateCachedDatabaseConfiguration(iDatabaseName, cfg, false, false); return cfg; } } diff --git a/server/src/main/java/com/orientechnologies/orient/server/distributed/ODistributedAbstractPlugin.java b/server/src/main/java/com/orientechnologies/orient/server/distributed/ODistributedAbstractPlugin.java index 3c3b2797e7b..4282649d130 100755 --- a/server/src/main/java/com/orientechnologies/orient/server/distributed/ODistributedAbstractPlugin.java +++ b/server/src/main/java/com/orientechnologies/orient/server/distributed/ODistributedAbstractPlugin.java @@ -15,14 +15,6 @@ */ package com.orientechnologies.orient.server.distributed; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - import com.orientechnologies.common.log.OLogManager; import com.orientechnologies.common.parser.OSystemVariableResolver; import com.orientechnologies.orient.core.Orient; @@ -38,6 +30,14 @@ import com.orientechnologies.orient.server.distributed.conflict.OReplicationConflictResolver; import com.orientechnologies.orient.server.plugin.OServerPluginAbstract; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; + /** * Abstract plugin to manage the distributed environment. * @@ -197,7 +197,7 @@ protected ODocument loadDatabaseConfiguration(final String iDatabaseName, final f.read(buffer); final ODocument doc = (ODocument) new ODocument().fromJSON(new String(buffer), "noMap"); - updateCachedDatabaseConfiguration(iDatabaseName, doc); + updateCachedDatabaseConfiguration(iDatabaseName, doc, false); return doc; } catch (Exception e) { @@ -211,12 +211,51 @@ protected ODocument loadDatabaseConfiguration(final String iDatabaseName, final return null; } - public void updateCachedDatabaseConfiguration(final String iDatabaseName, final ODocument cfg) { + public void updateCachedDatabaseConfiguration(final String iDatabaseName, final ODocument cfg, final boolean iSaveToDisk) { synchronized (cachedDatabaseConfiguration) { + final ODocument oldCfg = cachedDatabaseConfiguration.get(iDatabaseName); + if (oldCfg != null && (oldCfg == cfg || Arrays.equals(oldCfg.toStream(), cfg.toStream()))) + // NO CHANGE, SKIP IT + return; + + // INCREMENT VERSION + Integer oldVersion = cfg.field("version"); + if (oldVersion == null) + oldVersion = 0; + cfg.field("version", oldVersion.intValue() + 1); + + // SAVE IN NODE'S LOCAL RAM cachedDatabaseConfiguration.put(iDatabaseName, cfg); + // PRINT THE NEW CONFIGURATION OLogManager.instance().info(this, "updated distributed configuration for database: %s:\n----------\n%s\n----------", iDatabaseName, cfg.toJSON("prettyPrint")); + + if (iSaveToDisk) { + // SAVE THE CONFIGURATION TO DISK + FileOutputStream f = null; + try { + File file = getDistributedConfigFile(iDatabaseName); + + OLogManager.instance().info(this, "Saving distributed configuration file for database '%s' to: %s", iDatabaseName, file); + + if (!file.exists()) + file.createNewFile(); + + f = new FileOutputStream(file); + f.write(cfg.toJSON().getBytes()); + f.flush(); + } catch (Exception e) { + OLogManager.instance().error(this, "Error on saving distributed configuration file", e); + + } finally { + if (f != null) + try { + f.close(); + } catch (IOException e) { + } + } + } } } @@ -235,42 +274,6 @@ public ODistributedConfiguration getDatabaseConfiguration(final String iDatabase } } - protected void saveDatabaseConfiguration(final String iDatabaseName, final ODocument cfg) { - synchronized (cachedDatabaseConfiguration) { - final ODocument oldCfg = cachedDatabaseConfiguration.get(iDatabaseName); - if (oldCfg != null && Arrays.equals(oldCfg.toStream(), cfg.toStream())) - // NO CHANGE, SKIP IT - return; - } - - // INCREMENT VERSION - Integer oldVersion = cfg.field("version"); - if (oldVersion == null) - oldVersion = 0; - cfg.field("version", oldVersion.intValue() + 1); - - updateCachedDatabaseConfiguration(iDatabaseName, cfg); - - FileOutputStream f = null; - try { - File file = getDistributedConfigFile(iDatabaseName); - - OLogManager.instance().config(this, "Saving distributed configuration file for database '%s' in: %s", iDatabaseName, file); - - f = new FileOutputStream(file); - f.write(cfg.toJSON().getBytes()); - } catch (Exception e) { - OLogManager.instance().error(this, "Error on saving distributed configuration file", e); - - } finally { - if (f != null) - try { - f.close(); - } catch (IOException e) { - } - } - } - public File getDistributedConfigFile(final String iDatabaseName) { return new File(serverInstance.getDatabaseDirectory() + iDatabaseName + "/" + FILE_DISTRIBUTED_DB_CONFIG); } diff --git a/server/src/main/java/com/orientechnologies/orient/server/distributed/task/ODeployDatabaseTask.java b/server/src/main/java/com/orientechnologies/orient/server/distributed/task/ODeployDatabaseTask.java index 99ead1c84eb..13b9c886960 100644 --- a/server/src/main/java/com/orientechnologies/orient/server/distributed/task/ODeployDatabaseTask.java +++ b/server/src/main/java/com/orientechnologies/orient/server/distributed/task/ODeployDatabaseTask.java @@ -60,18 +60,28 @@ public Object execute(final OServer iServer, ODistributedServerManager iManager, ODistributedServerLog.warn(this, iManager.getLocalNodeName(), getNodeSource(), DIRECTION.OUT, "deploying database %s...", databaseName); - final File f = new File(BACKUP_DIRECTORY + "/" + database.getName()); + final File f = new File(BACKUP_DIRECTORY + "/backup_" + database.getName() + ".zip"); if (f.exists()) f.delete(); + else + f.getParentFile().mkdirs(); f.createNewFile(); + ODistributedServerLog.warn(this, iManager.getLocalNodeName(), getNodeSource(), DIRECTION.OUT, + "creating backup of database '%s' in directory: %s...", databaseName, f.getAbsolutePath()); + database.backup(new FileOutputStream(f), null, null); ODistributedServerLog.warn(this, iManager.getLocalNodeName(), getNodeSource(), DIRECTION.OUT, "sending the compressed database '%s' over the network to node '%s', size=%s...", databaseName, getNodeSource(), OFileUtils.getSizeAsString(f.length())); - return new ODistributedDatabaseChunk(f, 0, CHUNK_MAX_SIZE); + final ODistributedDatabaseChunk chunk = new ODistributedDatabaseChunk(f, 0, CHUNK_MAX_SIZE); + + ODistributedServerLog.warn(this, iManager.getLocalNodeName(), getNodeSource(), ODistributedServerLog.DIRECTION.OUT, + "- transferring chunk #%d offset=%d size=%s...", 1, 0, OFileUtils.getSizeAsNumber(chunk.buffer.length)); + + return chunk; } finally { lock.unlock();
b6096079c17488b1232f7db942c529c7eb5f9843
ReactiveX-RxJava
Unlock in finally block--
c
https://github.com/ReactiveX/RxJava
diff --git a/rxjava-core/src/main/java/rx/observers/SerializedObserverViaStateMachine.java b/rxjava-core/src/main/java/rx/observers/SerializedObserverViaStateMachine.java index fdd6e844ad..14c9612e07 100644 --- a/rxjava-core/src/main/java/rx/observers/SerializedObserverViaStateMachine.java +++ b/rxjava-core/src/main/java/rx/observers/SerializedObserverViaStateMachine.java @@ -63,25 +63,24 @@ public void onNext(T t) { } while (!state.compareAndSet(current, newState)); if (newState.shouldProcess()) { - if (newState == State.PROCESS_SELF) { - s.onNext(t); - - // finish processing to let this thread move on - do { - current = state.get(); - newState = current.finishProcessing(1); - } while (!state.compareAndSet(current, newState)); - } else { - // drain queue - Object[] items = newState.queue; - for (int i = 0; i < items.length; i++) { - s.onNext((T) items[i]); + int numItemsProcessed = 0; + try { + if (newState == State.PROCESS_SELF) { + s.onNext(t); + numItemsProcessed++; + } else { + // drain queue + Object[] items = newState.queue; + for (int i = 0; i < items.length; i++) { + s.onNext((T) items[i]); + numItemsProcessed++; + } } - + } finally { // finish processing to let this thread move on do { current = state.get(); - newState = current.finishProcessing(items.length); + newState = current.finishProcessing(numItemsProcessed); } while (!state.compareAndSet(current, newState)); } }
db14c09d6257c537ce0b77f004410ce1606db73f
ReactiveX-RxJava
1.x: Expose Single.lift()--
a
https://github.com/ReactiveX/RxJava
diff --git a/src/main/java/rx/Single.java b/src/main/java/rx/Single.java index 3489f1a55c..b7e99671d2 100644 --- a/src/main/java/rx/Single.java +++ b/src/main/java/rx/Single.java @@ -163,9 +163,8 @@ public interface OnSubscribe<T> extends Action1<SingleSubscriber<? super T>> { * @return a Single that is the result of applying the lifted Operator to the source Single * @see <a href="https://github.com/ReactiveX/RxJava/wiki/Implementing-Your-Own-Operators">RxJava wiki: Implementing Your Own Operators</a> */ - private <R> Single<R> lift(final Operator<? extends R, ? super T> lift) { - // This method is private because not sure if we want to expose the Observable.Operator in this public API rather than a Single.Operator - + @Experimental + public final <R> Single<R> lift(final Operator<? extends R, ? super T> lift) { return new Single<R>(new Observable.OnSubscribe<R>() { @Override public void call(Subscriber<? super R> o) {
de8f0efe60233436431930447d7672f2a1dc8878
hadoop
MAPREDUCE-3121. NodeManager should handle- disk-failures (Ravi Gummadi via mahadev) - Merging r1208131 from trunk.--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1208135 13f79535-47bb-0310-9956-ffa450edef68-
a
https://github.com/apache/hadoop
diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index 23fc209db4ee7..e349d206ff33f 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -6,6 +6,8 @@ Release 0.23.1 - Unreleased NEW FEATURES + MAPREDUCE-3121. NodeManager should handle disk-failures (Ravi Gummadi via mahadev) + IMPROVEMENTS MAPREDUCE-3375. [Gridmix] Memory Emulation system tests. (Vinay Thota via amarrk) diff --git a/hadoop-mapreduce-project/conf/container-executor.cfg b/hadoop-mapreduce-project/conf/container-executor.cfg index 1c11734b48949..fe1d680529650 100644 --- a/hadoop-mapreduce-project/conf/container-executor.cfg +++ b/hadoop-mapreduce-project/conf/container-executor.cfg @@ -1,3 +1,3 @@ -yarn.nodemanager.local-dirs=#configured value of yarn.nodemanager.local-dirs. It can be a list of comma separated paths. -yarn.nodemanager.log-dirs=#configured value of yarn.nodemanager.log-dirs. yarn.nodemanager.linux-container-executor.group=#configured value of yarn.nodemanager.linux-container-executor.group +banned.users=#comma separated list of users who can not run applications +min.user.id=1000#Prevent other super-users diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java index 19f558c67261b..14d8644e6e0a7 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java @@ -113,9 +113,10 @@ public void setup(JobConf conf) throws IOException { Map<LocalResource, Future<Path>> resourcesToPaths = Maps.newHashMap(); ExecutorService exec = Executors.newCachedThreadPool(); + Path destPath = localDirAllocator.getLocalPathForWrite(".", conf); for (LocalResource resource : localResources.values()) { Callable<Path> download = new FSDownload(localFSFileContext, ugi, conf, - localDirAllocator, resource, new Random()); + destPath, resource, new Random()); Future<Path> future = exec.submit(download); resourcesToPaths.put(resource, future); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java index 845d64f800b60..1120413eb7c0d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java @@ -56,7 +56,7 @@ public MiniMRYarnCluster(String testName) { } public MiniMRYarnCluster(String testName, int noOfNMs) { - super(testName, noOfNMs); + super(testName, noOfNMs, 4, 4); //TODO: add the history server historyServerWrapper = new JobHistoryServerWrapper(); addService(historyServerWrapper); diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java index 472c959eda58d..fb99c1cc2270d 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java @@ -43,7 +43,8 @@ public class TestDistributedShell { public static void setup() throws InterruptedException, IOException { LOG.info("Starting up YARN cluster"); if (yarnCluster == null) { - yarnCluster = new MiniYARNCluster(TestDistributedShell.class.getName()); + yarnCluster = new MiniYARNCluster(TestDistributedShell.class.getName(), + 1, 1, 1); yarnCluster.init(conf); yarnCluster.start(); } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java index 0779a5f7320a1..d4b8f9fc56c9e 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java @@ -351,13 +351,39 @@ public class YarnConfiguration extends Configuration { /** Class that calculates containers current resource utilization.*/ public static final String NM_CONTAINER_MON_RESOURCE_CALCULATOR = NM_PREFIX + "container-monitor.resource-calculator.class"; - + + /** + * Enable/Disable disks' health checker. Default is true. + * An expert level configuration property. + */ + public static final String NM_DISK_HEALTH_CHECK_ENABLE = + NM_PREFIX + "disk-health-checker.enable"; + /** Frequency of running disks' health checker.*/ + public static final String NM_DISK_HEALTH_CHECK_INTERVAL_MS = + NM_PREFIX + "disk-health-checker.interval-ms"; + /** By default, disks' health is checked every 2 minutes. */ + public static final long DEFAULT_NM_DISK_HEALTH_CHECK_INTERVAL_MS = + 2 * 60 * 1000; + + /** + * The minimum fraction of number of disks to be healthy for the nodemanager + * to launch new containers. This applies to nm-local-dirs and nm-log-dirs. + */ + public static final String NM_MIN_HEALTHY_DISKS_FRACTION = + NM_PREFIX + "disk-health-checker.min-healthy-disks"; + /** + * By default, at least 5% of disks are to be healthy to say that the node + * is healthy in terms of disks. + */ + public static final float DEFAULT_NM_MIN_HEALTHY_DISKS_FRACTION + = 0.25F; + /** Frequency of running node health script.*/ public static final String NM_HEALTH_CHECK_INTERVAL_MS = NM_PREFIX + "health-checker.interval-ms"; public static final long DEFAULT_NM_HEALTH_CHECK_INTERVAL_MS = 10 * 60 * 1000; - - /** Script time out period.*/ + + /** Health check script time out period.*/ public static final String NM_HEALTH_CHECK_SCRIPT_TIMEOUT_MS = NM_PREFIX + "health-checker.script.timeout-ms"; public static final long DEFAULT_NM_HEALTH_CHECK_SCRIPT_TIMEOUT_MS = diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java index b49ecc784cc46..0845c446670c9 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java @@ -31,6 +31,7 @@ import java.security.PrivilegedExceptionAction; import java.util.EnumSet; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Map.Entry; @@ -105,12 +106,12 @@ public String toString() { public static class LogValue { - private final String[] rootLogDirs; + private final List<String> rootLogDirs; private final ContainerId containerId; // TODO Maybe add a version string here. Instead of changing the version of // the entire k-v format - public LogValue(String[] rootLogDirs, ContainerId containerId) { + public LogValue(List<String> rootLogDirs, ContainerId containerId) { this.rootLogDirs = rootLogDirs; this.containerId = containerId; } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java index cccb140d99b3a..24a23c8c0c25a 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java @@ -33,7 +33,6 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.fs.LocalDirAllocator; import org.apache.hadoop.fs.Options.Rename; import org.apache.hadoop.fs.Path; import org.apache.hadoop.security.UserGroupInformation; @@ -56,7 +55,10 @@ public class FSDownload implements Callable<Path> { private final UserGroupInformation userUgi; private Configuration conf; private LocalResource resource; - private LocalDirAllocator dirs; + + /** The local FS dir path under which this resource is to be localized to */ + private Path destDirPath; + private static final FsPermission cachePerms = new FsPermission( (short) 0755); static final FsPermission PUBLIC_FILE_PERMS = new FsPermission((short) 0555); @@ -65,10 +67,11 @@ public class FSDownload implements Callable<Path> { static final FsPermission PUBLIC_DIR_PERMS = new FsPermission((short) 0755); static final FsPermission PRIVATE_DIR_PERMS = new FsPermission((short) 0700); + public FSDownload(FileContext files, UserGroupInformation ugi, Configuration conf, - LocalDirAllocator dirs, LocalResource resource, Random rand) { + Path destDirPath, LocalResource resource, Random rand) { this.conf = conf; - this.dirs = dirs; + this.destDirPath = destDirPath; this.files = files; this.userUgi = ugi; this.resource = resource; @@ -136,15 +139,13 @@ public Path call() throws Exception { } Path tmp; - Path dst = - dirs.getLocalPathForWrite(".", getEstimatedSize(resource), - conf); do { - tmp = new Path(dst, String.valueOf(rand.nextLong())); + tmp = new Path(destDirPath, String.valueOf(rand.nextLong())); } while (files.util().exists(tmp)); - dst = tmp; - files.mkdir(dst, cachePerms, false); - final Path dst_work = new Path(dst + "_tmp"); + destDirPath = tmp; + + files.mkdir(destDirPath, cachePerms, false); + final Path dst_work = new Path(destDirPath + "_tmp"); files.mkdir(dst_work, cachePerms, false); Path dFinal = files.makeQualified(new Path(dst_work, sCopy.getName())); @@ -158,9 +159,9 @@ public Path run() throws Exception { }); unpack(new File(dTmp.toUri()), new File(dFinal.toUri())); changePermissions(dFinal.getFileSystem(conf), dFinal); - files.rename(dst_work, dst, Rename.OVERWRITE); + files.rename(dst_work, destDirPath, Rename.OVERWRITE); } catch (Exception e) { - try { files.delete(dst, true); } catch (IOException ignore) { } + try { files.delete(destDirPath, true); } catch (IOException ignore) { } throw e; } finally { try { @@ -170,9 +171,8 @@ public Path run() throws Exception { rand = null; conf = null; resource = null; - dirs = null; } - return files.makeQualified(new Path(dst, sCopy.getName())); + return files.makeQualified(new Path(destDirPath, sCopy.getName())); } /** @@ -221,17 +221,4 @@ public Void run() throws Exception { } } - private static long getEstimatedSize(LocalResource rsrc) { - if (rsrc.getSize() < 0) { - return -1; - } - switch (rsrc.getType()) { - case ARCHIVE: - return 5 * rsrc.getSize(); - case FILE: - default: - return rsrc.getSize(); - } - } - } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestFSDownload.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestFSDownload.java index b7237bdefc2e7..fe1f3ac003125 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestFSDownload.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestFSDownload.java @@ -146,13 +146,14 @@ public void testDownload() throws IOException, URISyntaxException, vis = LocalResourceVisibility.APPLICATION; break; } - - LocalResource rsrc = createFile(files, new Path(basedir, "" + i), - sizes[i], rand, vis); + Path p = new Path(basedir, "" + i); + LocalResource rsrc = createFile(files, p, sizes[i], rand, vis); rsrcVis.put(rsrc, vis); + Path destPath = dirs.getLocalPathForWrite( + basedir.toString(), sizes[i], conf); FSDownload fsd = new FSDownload(files, UserGroupInformation.getCurrentUser(), conf, - dirs, rsrc, new Random(sharedSeed)); + destPath, rsrc, new Random(sharedSeed)); pending.put(rsrc, exec.submit(fsd)); } @@ -249,13 +250,15 @@ public void testDirDownload() throws IOException, InterruptedException { vis = LocalResourceVisibility.APPLICATION; break; } - - LocalResource rsrc = createJar(files, new Path(basedir, "dir" + i - + ".jar"), vis); + + Path p = new Path(basedir, "dir" + i + ".jar"); + LocalResource rsrc = createJar(files, p, vis); rsrcVis.put(rsrc, vis); + Path destPath = dirs.getLocalPathForWrite( + basedir.toString(), conf); FSDownload fsd = new FSDownload(files, UserGroupInformation.getCurrentUser(), conf, - dirs, rsrc, new Random(sharedSeed)); + destPath, rsrc, new Random(sharedSeed)); pending.put(rsrc, exec.submit(fsd)); } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/resources/yarn-default.xml b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/resources/yarn-default.xml index ac6bce2dda355..fdb7cb6c5b7df 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/resources/yarn-default.xml +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/resources/yarn-default.xml @@ -388,6 +388,22 @@ <value></value> </property> + <property> + <description>Frequency of running disk health checker code.</description> + <name>yarn.nodemanager.disk-health-checker.interval-ms</name> + <value>120000</value> + </property> + + <property> + <description>The minimum fraction of number of disks to be healthy for the + nodemanager to launch new containers. This correspond to both + yarn-nodemanager.local-dirs and yarn.nodemanager.log-dirs. i.e. If there + are less number of healthy local-dirs (or log-dirs) available, then + new containers will not be launched on this node.</description> + <name>yarn.nodemanager.disk-health-checker.min-healthy-disks</name> + <value>0.25</value> + </property> + <property> <description>The path to the Linux container executor.</description> <name>yarn.nodemanager.linux-container-executor.path</name> diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java index 6c3667ae5f941..e6a47da89c971 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java @@ -45,6 +45,7 @@ public abstract class ContainerExecutor implements Configurable { FsPermission.createImmutable((short) 0700); private Configuration conf; + private ConcurrentMap<ContainerId, Path> pidFiles = new ConcurrentHashMap<ContainerId, Path>(); @@ -68,7 +69,7 @@ public Configuration getConf() { * @throws IOException */ public abstract void init() throws IOException; - + /** * Prepare the environment for containers in this application to execute. * For $x in local.dirs @@ -82,12 +83,14 @@ public Configuration getConf() { * @param appId id of the application * @param nmPrivateContainerTokens path to localized credentials, rsrc by NM * @param nmAddr RPC address to contact NM + * @param localDirs nm-local-dirs + * @param logDirs nm-log-dirs * @throws IOException For most application init failures * @throws InterruptedException If application init thread is halted by NM */ public abstract void startLocalizer(Path nmPrivateContainerTokens, InetSocketAddress nmAddr, String user, String appId, String locId, - List<Path> localDirs) + List<String> localDirs, List<String> logDirs) throws IOException, InterruptedException; @@ -100,12 +103,15 @@ public abstract void startLocalizer(Path nmPrivateContainerTokens, * @param user the user of the container * @param appId the appId of the container * @param containerWorkDir the work dir for the container + * @param localDirs nm-local-dirs to be used for this container + * @param logDirs nm-log-dirs to be used for this container * @return the return status of the launch * @throws IOException */ public abstract int launchContainer(Container container, Path nmPrivateContainerScriptPath, Path nmPrivateTokensPath, - String user, String appId, Path containerWorkDir) throws IOException; + String user, String appId, Path containerWorkDir, List<String> localDirs, + List<String> logDirs) throws IOException; public abstract boolean signalContainer(String user, String pid, Signal signal) @@ -116,7 +122,8 @@ public abstract void deleteAsUser(String user, Path subDir, Path... basedirs) public enum ExitCode { FORCE_KILLED(137), - TERMINATED(143); + TERMINATED(143), + DISKS_FAILED(-101); private final int code; private ExitCode(int exitCode) { diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java index 9c252b142d44a..bd953174aa0cc 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java @@ -26,6 +26,7 @@ import java.io.IOException; import java.io.PrintStream; import java.net.InetSocketAddress; +import java.util.ArrayList; import java.util.Arrays; import java.util.EnumSet; import java.util.List; @@ -39,7 +40,6 @@ import org.apache.hadoop.util.Shell.ExitCodeException; import org.apache.hadoop.util.Shell.ShellCommandExecutor; import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerDiagnosticsUpdateEvent; @@ -77,16 +77,17 @@ public void init() throws IOException { @Override public void startLocalizer(Path nmPrivateContainerTokensPath, InetSocketAddress nmAddr, String user, String appId, String locId, - List<Path> localDirs) throws IOException, InterruptedException { + List<String> localDirs, List<String> logDirs) + throws IOException, InterruptedException { ContainerLocalizer localizer = - new ContainerLocalizer(this.lfs, user, appId, locId, - localDirs, RecordFactoryProvider.getRecordFactory(getConf())); + new ContainerLocalizer(lfs, user, appId, locId, getPaths(localDirs), + RecordFactoryProvider.getRecordFactory(getConf())); createUserLocalDirs(localDirs, user); createUserCacheDirs(localDirs, user); createAppDirs(localDirs, user, appId); - createAppLogDirs(appId); + createAppLogDirs(appId, logDirs); // TODO: Why pick first app dir. The same in LCE why not random? Path appStorageDir = getFirstApplicationDir(localDirs, user, appId); @@ -104,8 +105,8 @@ public void startLocalizer(Path nmPrivateContainerTokensPath, @Override public int launchContainer(Container container, Path nmPrivateContainerScriptPath, Path nmPrivateTokensPath, - String userName, String appId, Path containerWorkDir) - throws IOException { + String userName, String appId, Path containerWorkDir, + List<String> localDirs, List<String> logDirs) throws IOException { ContainerId containerId = container.getContainerID(); @@ -115,10 +116,7 @@ public int launchContainer(Container container, ConverterUtils.toString( container.getContainerID().getApplicationAttemptId(). getApplicationId()); - String[] sLocalDirs = getConf().getStrings( - YarnConfiguration.NM_LOCAL_DIRS, - YarnConfiguration.DEFAULT_NM_LOCAL_DIRS); - for (String sLocalDir : sLocalDirs) { + for (String sLocalDir : localDirs) { Path usersdir = new Path(sLocalDir, ContainerLocalizer.USERCACHE); Path userdir = new Path(usersdir, userName); Path appCacheDir = new Path(userdir, ContainerLocalizer.APPCACHE); @@ -128,7 +126,7 @@ public int launchContainer(Container container, } // Create the container log-dirs on all disks - createContainerLogDirs(appIdStr, containerIdStr); + createContainerLogDirs(appIdStr, containerIdStr, logDirs); // copy launch script to work dir Path launchDst = @@ -299,9 +297,9 @@ public void deleteAsUser(String user, Path subDir, Path... baseDirs) * $logdir/$user/$appId */ private static final short LOGDIR_PERM = (short)0710; - private Path getFirstApplicationDir(List<Path> localDirs, String user, + private Path getFirstApplicationDir(List<String> localDirs, String user, String appId) { - return getApplicationDir(localDirs.get(0), user, appId); + return getApplicationDir(new Path(localDirs.get(0)), user, appId); } private Path getApplicationDir(Path base, String user, String appId) { @@ -328,14 +326,14 @@ private Path getFileCacheDir(Path base, String user) { * <li>$local.dir/usercache/$user</li> * </ul> */ - private void createUserLocalDirs(List<Path> localDirs, String user) + private void createUserLocalDirs(List<String> localDirs, String user) throws IOException { boolean userDirStatus = false; FsPermission userperms = new FsPermission(USER_PERM); - for (Path localDir : localDirs) { + for (String localDir : localDirs) { // create $local.dir/usercache/$user and its immediate parent try { - lfs.mkdir(getUserCacheDir(localDir, user), userperms, true); + lfs.mkdir(getUserCacheDir(new Path(localDir), user), userperms, true); } catch (IOException e) { LOG.warn("Unable to create the user directory : " + localDir, e); continue; @@ -357,7 +355,7 @@ private void createUserLocalDirs(List<Path> localDirs, String user) * <li>$local.dir/usercache/$user/filecache</li> * </ul> */ - private void createUserCacheDirs(List<Path> localDirs, String user) + private void createUserCacheDirs(List<String> localDirs, String user) throws IOException { LOG.info("Initializing user " + user); @@ -366,9 +364,10 @@ private void createUserCacheDirs(List<Path> localDirs, String user) FsPermission appCachePerms = new FsPermission(APPCACHE_PERM); FsPermission fileperms = new FsPermission(FILECACHE_PERM); - for (Path localDir : localDirs) { + for (String localDir : localDirs) { // create $local.dir/usercache/$user/appcache - final Path appDir = getAppcacheDir(localDir, user); + Path localDirPath = new Path(localDir); + final Path appDir = getAppcacheDir(localDirPath, user); try { lfs.mkdir(appDir, appCachePerms, true); appcacheDirStatus = true; @@ -376,7 +375,7 @@ private void createUserCacheDirs(List<Path> localDirs, String user) LOG.warn("Unable to create app cache directory : " + appDir, e); } // create $local.dir/usercache/$user/filecache - final Path distDir = getFileCacheDir(localDir, user); + final Path distDir = getFileCacheDir(localDirPath, user); try { lfs.mkdir(distDir, fileperms, true); distributedCacheDirStatus = true; @@ -403,12 +402,12 @@ private void createUserCacheDirs(List<Path> localDirs, String user) * </ul> * @param localDirs */ - private void createAppDirs(List<Path> localDirs, String user, String appId) + private void createAppDirs(List<String> localDirs, String user, String appId) throws IOException { boolean initAppDirStatus = false; FsPermission appperms = new FsPermission(APPDIR_PERM); - for (Path localDir : localDirs) { - Path fullAppDir = getApplicationDir(localDir, user, appId); + for (String localDir : localDirs) { + Path fullAppDir = getApplicationDir(new Path(localDir), user, appId); // create $local.dir/usercache/$user/appcache/$appId try { lfs.mkdir(fullAppDir, appperms, true); @@ -427,15 +426,12 @@ private void createAppDirs(List<Path> localDirs, String user, String appId) /** * Create application log directories on all disks. */ - private void createAppLogDirs(String appId) + private void createAppLogDirs(String appId, List<String> logDirs) throws IOException { - String[] rootLogDirs = - getConf() - .getStrings(YarnConfiguration.NM_LOG_DIRS, YarnConfiguration.DEFAULT_NM_LOG_DIRS); - + boolean appLogDirStatus = false; FsPermission appLogDirPerms = new FsPermission(LOGDIR_PERM); - for (String rootLogDir : rootLogDirs) { + for (String rootLogDir : logDirs) { // create $log.dir/$appid Path appLogDir = new Path(rootLogDir, appId); try { @@ -455,15 +451,12 @@ private void createAppLogDirs(String appId) /** * Create application log directories on all disks. */ - private void createContainerLogDirs(String appId, String containerId) - throws IOException { - String[] rootLogDirs = - getConf() - .getStrings(YarnConfiguration.NM_LOG_DIRS, YarnConfiguration.DEFAULT_NM_LOG_DIRS); - + private void createContainerLogDirs(String appId, String containerId, + List<String> logDirs) throws IOException { + boolean containerLogDirStatus = false; FsPermission containerLogDirPerms = new FsPermission(LOGDIR_PERM); - for (String rootLogDir : rootLogDirs) { + for (String rootLogDir : logDirs) { // create $log.dir/$appid/$containerid Path appLogDir = new Path(rootLogDir, appId); Path containerLogDir = new Path(appLogDir, containerId); @@ -483,4 +476,15 @@ private void createContainerLogDirs(String appId, String containerId) + containerId); } } + + /** + * @return the list of paths of given local directories + */ + private static List<Path> getPaths(List<String> dirs) { + List<Path> paths = new ArrayList<Path>(dirs.size()); + for (int i = 0; i < dirs.size(); i++) { + paths.add(new Path(dirs.get(i))); + } + return paths; + } } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DirectoryCollection.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DirectoryCollection.java new file mode 100644 index 0000000000000..67ed4618a0e4c --- /dev/null +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DirectoryCollection.java @@ -0,0 +1,96 @@ +/** +* Licensed to the Apache Software Foundation (ASF) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The ASF licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package org.apache.hadoop.yarn.server.nodemanager; + +import java.io.File; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.ListIterator; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.util.DiskChecker; +import org.apache.hadoop.util.DiskChecker.DiskErrorException; + +/** + * Manages a list of local storage directories. + */ +class DirectoryCollection { + private static final Log LOG = LogFactory.getLog(DirectoryCollection.class); + + // Good local storage directories + private List<String> localDirs; + private List<String> failedDirs; + private int numFailures; + + public DirectoryCollection(String[] dirs) { + localDirs = new ArrayList<String>(); + localDirs.addAll(Arrays.asList(dirs)); + failedDirs = new ArrayList<String>(); + } + + /** + * @return the current valid directories + */ + synchronized List<String> getGoodDirs() { + return localDirs; + } + + /** + * @return the failed directories + */ + synchronized List<String> getFailedDirs() { + return failedDirs; + } + + /** + * @return total the number of directory failures seen till now + */ + synchronized int getNumFailures() { + return numFailures; + } + + /** + * Check the health of current set of local directories, updating the list + * of valid directories if necessary. + * @return <em>true</em> if there is a new disk-failure identified in + * this checking. <em>false</em> otherwise. + */ + synchronized boolean checkDirs() { + int oldNumFailures = numFailures; + ListIterator<String> it = localDirs.listIterator(); + while (it.hasNext()) { + final String dir = it.next(); + try { + DiskChecker.checkDir(new File(dir)); + } catch (DiskErrorException de) { + LOG.warn("Directory " + dir + " error " + + de.getMessage() + ", removing from the list of valid directories."); + it.remove(); + failedDirs.add(dir); + numFailures++; + } + } + if (numFailures > oldNumFailures) { + return true; + } + return false; + } +} diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java index 2ecf2b302e351..28f1247bb32eb 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java @@ -126,13 +126,18 @@ public void init() throws IOException { @Override public void startLocalizer(Path nmPrivateContainerTokensPath, InetSocketAddress nmAddr, String user, String appId, String locId, - List<Path> localDirs) throws IOException, InterruptedException { + List<String> localDirs, List<String> logDirs) + throws IOException, InterruptedException { + List<String> command = new ArrayList<String>( Arrays.asList(containerExecutorExe, user, Integer.toString(Commands.INITIALIZE_CONTAINER.getValue()), appId, - nmPrivateContainerTokensPath.toUri().getPath().toString())); + nmPrivateContainerTokensPath.toUri().getPath().toString(), + StringUtils.join(",", localDirs), + StringUtils.join(",", logDirs))); + File jvm = // use same jvm as parent new File(new File(System.getProperty("java.home"), "bin"), "java"); command.add(jvm.toString()); @@ -148,8 +153,8 @@ public void startLocalizer(Path nmPrivateContainerTokensPath, command.add(locId); command.add(nmAddr.getHostName()); command.add(Integer.toString(nmAddr.getPort())); - for (Path p : localDirs) { - command.add(p.toUri().getPath().toString()); + for (String dir : localDirs) { + command.add(dir); } String[] commandArray = command.toArray(new String[command.size()]); ShellCommandExecutor shExec = new ShellCommandExecutor(commandArray); @@ -174,7 +179,8 @@ public void startLocalizer(Path nmPrivateContainerTokensPath, @Override public int launchContainer(Container container, Path nmPrivateCotainerScriptPath, Path nmPrivateTokensPath, - String user, String appId, Path containerWorkDir) throws IOException { + String user, String appId, Path containerWorkDir, + List<String> localDirs, List<String> logDirs) throws IOException { ContainerId containerId = container.getContainerID(); String containerIdStr = ConverterUtils.toString(containerId); @@ -189,8 +195,10 @@ public int launchContainer(Container container, .toString(Commands.LAUNCH_CONTAINER.getValue()), appId, containerIdStr, containerWorkDir.toString(), nmPrivateCotainerScriptPath.toUri().getPath().toString(), - nmPrivateTokensPath.toUri().getPath().toString(), pidFilePath - .toString())); + nmPrivateTokensPath.toUri().getPath().toString(), + pidFilePath.toString(), + StringUtils.join(",", localDirs), + StringUtils.join(",", logDirs))); String[] commandArray = command.toArray(new String[command.size()]); shExec = new ShellCommandExecutor(commandArray, null, // NM's cwd container.getLaunchContext().getEnvironment()); // sanitized env diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LocalDirsHandlerService.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LocalDirsHandlerService.java new file mode 100644 index 0000000000000..1e143f6676498 --- /dev/null +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LocalDirsHandlerService.java @@ -0,0 +1,297 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.nodemanager; + +import java.io.IOException; +import java.util.List; +import java.util.Timer; +import java.util.TimerTask; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.LocalDirAllocator; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.util.StringUtils; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.service.AbstractService; + +/** + * The class which provides functionality of checking the health of the local + * directories of a node. This specifically manages nodemanager-local-dirs and + * nodemanager-log-dirs by periodically checking their health. + */ +public class LocalDirsHandlerService extends AbstractService { + + private static Log LOG = LogFactory.getLog(LocalDirsHandlerService.class); + + /** Timer used to schedule disk health monitoring code execution */ + private Timer dirsHandlerScheduler; + private long diskHealthCheckInterval; + private boolean isDiskHealthCheckerEnabled; + /** + * Minimum fraction of disks to be healthy for the node to be healthy in + * terms of disks. This applies to nm-local-dirs and nm-log-dirs. + */ + private float minNeededHealthyDisksFactor; + + private MonitoringTimerTask monitoringTimerTask; + + /** Local dirs to store localized files in */ + private DirectoryCollection localDirs = null; + + /** storage for container logs*/ + private DirectoryCollection logDirs = null; + + /** + * Everybody should go through this LocalDirAllocator object for read/write + * of any local path corresponding to {@link YarnConfiguration#NM_LOCAL_DIRS} + * instead of creating his/her own LocalDirAllocator objects + */ + private LocalDirAllocator localDirsAllocator; + /** + * Everybody should go through this LocalDirAllocator object for read/write + * of any local path corresponding to {@link YarnConfiguration#NM_LOG_DIRS} + * instead of creating his/her own LocalDirAllocator objects + */ + private LocalDirAllocator logDirsAllocator; + + /** when disk health checking code was last run */ + private long lastDisksCheckTime; + + /** + * Class which is used by the {@link Timer} class to periodically execute the + * disks' health checker code. + */ + private final class MonitoringTimerTask extends TimerTask { + + public MonitoringTimerTask(Configuration conf) { + localDirs = new DirectoryCollection( + conf.getTrimmedStrings(YarnConfiguration.NM_LOCAL_DIRS)); + logDirs = new DirectoryCollection( + conf.getTrimmedStrings(YarnConfiguration.NM_LOG_DIRS)); + localDirsAllocator = + new LocalDirAllocator(YarnConfiguration.NM_LOCAL_DIRS); + logDirsAllocator = new LocalDirAllocator(YarnConfiguration.NM_LOG_DIRS); + } + + @Override + public void run() { + boolean newFailure = false; + if (localDirs.checkDirs()) { + newFailure = true; + } + if (logDirs.checkDirs()) { + newFailure = true; + } + + if (newFailure) { + LOG.info("Disk(s) failed. " + getDisksHealthReport()); + updateDirsInConfiguration(); + if (!areDisksHealthy()) { + // Just log. + LOG.error("Most of the disks failed. " + getDisksHealthReport()); + } + } + lastDisksCheckTime = System.currentTimeMillis(); + } + } + + public LocalDirsHandlerService() { + super(LocalDirsHandlerService.class.getName()); + } + + /** + * Method which initializes the timertask and its interval time. + */ + @Override + public void init(Configuration config) { + // Clone the configuration as we may do modifications to dirs-list + Configuration conf = new Configuration(config); + diskHealthCheckInterval = conf.getLong( + YarnConfiguration.NM_DISK_HEALTH_CHECK_INTERVAL_MS, + YarnConfiguration.DEFAULT_NM_DISK_HEALTH_CHECK_INTERVAL_MS); + monitoringTimerTask = new MonitoringTimerTask(conf); + isDiskHealthCheckerEnabled = conf.getBoolean( + YarnConfiguration.NM_DISK_HEALTH_CHECK_ENABLE, true); + minNeededHealthyDisksFactor = conf.getFloat( + YarnConfiguration.NM_MIN_HEALTHY_DISKS_FRACTION, + YarnConfiguration.DEFAULT_NM_MIN_HEALTHY_DISKS_FRACTION); + lastDisksCheckTime = System.currentTimeMillis(); + super.init(conf); + } + + /** + * Method used to start the disk health monitoring, if enabled. + */ + @Override + public void start() { + if (isDiskHealthCheckerEnabled) { + dirsHandlerScheduler = new Timer("DiskHealthMonitor-Timer", true); + // Start the timer task for disk health checking immediately and + // then run periodically at interval time. + dirsHandlerScheduler.scheduleAtFixedRate(monitoringTimerTask, 0, + diskHealthCheckInterval); + } + super.start(); + } + + /** + * Method used to terminate the disk health monitoring service. + */ + @Override + public void stop() { + if (dirsHandlerScheduler != null) { + dirsHandlerScheduler.cancel(); + } + super.stop(); + } + + /** + * @return the good/valid local directories based on disks' health + */ + public List<String> getLocalDirs() { + return localDirs.getGoodDirs(); + } + + /** + * @return the good/valid log directories based on disks' health + */ + public List<String> getLogDirs() { + return logDirs.getGoodDirs(); + } + + /** + * @return the health report of nm-local-dirs and nm-log-dirs + */ + public String getDisksHealthReport() { + if (!isDiskHealthCheckerEnabled) { + return ""; + } + + StringBuilder report = new StringBuilder(); + List<String> failedLocalDirsList = localDirs.getFailedDirs(); + List<String> failedLogDirsList = logDirs.getFailedDirs(); + int numLocalDirs = localDirs.getGoodDirs().size() + + failedLocalDirsList.size(); + int numLogDirs = logDirs.getGoodDirs().size() + failedLogDirsList.size(); + if (!failedLocalDirsList.isEmpty()) { + report.append(failedLocalDirsList.size() + "/" + numLocalDirs + + " local-dirs turned bad: " + + StringUtils.join(",", failedLocalDirsList) + ";"); + } + if (!failedLogDirsList.isEmpty()) { + report.append(failedLogDirsList.size() + "/" + numLogDirs + + " log-dirs turned bad: " + + StringUtils.join(",", failedLogDirsList)); + } + return report.toString(); + } + + /** + * The minimum fraction of number of disks needed to be healthy for a node to + * be considered healthy in terms of disks is configured using + * {@link YarnConfiguration#NM_MIN_HEALTHY_DISKS_FRACTION}, with a default + * value of {@link YarnConfiguration#DEFAULT_NM_MIN_HEALTHY_DISKS_FRACTION}. + * @return <em>false</em> if either (a) more than the allowed percentage of + * nm-local-dirs failed or (b) more than the allowed percentage of + * nm-log-dirs failed. + */ + public boolean areDisksHealthy() { + if (!isDiskHealthCheckerEnabled) { + return true; + } + + int goodDirs = getLocalDirs().size(); + int failedDirs = localDirs.getFailedDirs().size(); + int totalConfiguredDirs = goodDirs + failedDirs; + if (goodDirs/(float)totalConfiguredDirs < minNeededHealthyDisksFactor) { + return false; // Not enough healthy local-dirs + } + + goodDirs = getLogDirs().size(); + failedDirs = logDirs.getFailedDirs().size(); + totalConfiguredDirs = goodDirs + failedDirs; + if (goodDirs/(float)totalConfiguredDirs < minNeededHealthyDisksFactor) { + return false; // Not enough healthy log-dirs + } + + return true; + } + + public long getLastDisksCheckTime() { + return lastDisksCheckTime; + } + + /** + * Set good local dirs and good log dirs in the configuration so that the + * LocalDirAllocator objects will use this updated configuration only. + */ + private void updateDirsInConfiguration() { + Configuration conf = getConfig(); + List<String> localDirs = getLocalDirs(); + conf.setStrings(YarnConfiguration.NM_LOCAL_DIRS, + localDirs.toArray(new String[localDirs.size()])); + List<String> logDirs = getLogDirs(); + synchronized(conf) { + conf.setStrings(YarnConfiguration.NM_LOG_DIRS, + logDirs.toArray(new String[logDirs.size()])); + } + } + + public Path getLocalPathForWrite(String pathStr) throws IOException { + Configuration conf = getConfig(); + Path path = null; + synchronized (conf) { + path = localDirsAllocator.getLocalPathForWrite(pathStr, conf); + } + return path; + } + + public Path getLocalPathForWrite(String pathStr, long size, + boolean checkWrite) throws IOException { + Configuration conf = getConfig(); + Path path = null; + synchronized (conf) { + path = localDirsAllocator.getLocalPathForWrite(pathStr, size, conf, + checkWrite); + } + return path; + } + + public Path getLogPathForWrite(String pathStr, boolean checkWrite) + throws IOException { + Configuration conf = getConfig(); + Path path = null; + synchronized (conf) { + path = logDirsAllocator.getLocalPathForWrite(pathStr, + LocalDirAllocator.SIZE_UNKNOWN, conf, checkWrite); + } + return path; + } + + public Path getLogPathToRead(String pathStr) throws IOException { + Configuration conf = getConfig(); + Path path = null; + synchronized (conf) { + path = logDirsAllocator.getLocalPathToRead(pathStr, conf); + } + return path; + } +} diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeHealthCheckerService.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeHealthCheckerService.java new file mode 100644 index 0000000000000..78e5a53685158 --- /dev/null +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeHealthCheckerService.java @@ -0,0 +1,97 @@ +/** +* Licensed to the Apache Software Foundation (ASF) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The ASF licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package org.apache.hadoop.yarn.server.nodemanager; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.yarn.service.CompositeService; + +/** + * The class which provides functionality of checking the health of the node and + * reporting back to the service for which the health checker has been asked to + * report. + */ +public class NodeHealthCheckerService extends CompositeService { + + private NodeHealthScriptRunner nodeHealthScriptRunner; + private LocalDirsHandlerService dirsHandler; + + static final String SEPARATOR = ";"; + + public NodeHealthCheckerService() { + super(NodeHealthCheckerService.class.getName()); + dirsHandler = new LocalDirsHandlerService(); + } + + @Override + public void init(Configuration conf) { + if (NodeHealthScriptRunner.shouldRun(conf)) { + nodeHealthScriptRunner = new NodeHealthScriptRunner(); + addService(nodeHealthScriptRunner); + } + addService(dirsHandler); + super.init(conf); + } + + /** + * @return the reporting string of health of the node + */ + String getHealthReport() { + String scriptReport = (nodeHealthScriptRunner == null) ? "" + : nodeHealthScriptRunner.getHealthReport(); + if (scriptReport.equals("")) { + return dirsHandler.getDisksHealthReport(); + } else { + return scriptReport.concat(SEPARATOR + dirsHandler.getDisksHealthReport()); + } + } + + /** + * @return <em>true</em> if the node is healthy + */ + boolean isHealthy() { + boolean scriptHealthStatus = (nodeHealthScriptRunner == null) ? true + : nodeHealthScriptRunner.isHealthy(); + return scriptHealthStatus && dirsHandler.areDisksHealthy(); + } + + /** + * @return when the last time the node health status is reported + */ + long getLastHealthReportTime() { + long diskCheckTime = dirsHandler.getLastDisksCheckTime(); + long lastReportTime = (nodeHealthScriptRunner == null) + ? diskCheckTime + : Math.max(nodeHealthScriptRunner.getLastReportedTime(), diskCheckTime); + return lastReportTime; + } + + /** + * @return the disk handler + */ + public LocalDirsHandlerService getDiskHandler() { + return dirsHandler; + } + + /** + * @return the node health script runner + */ + NodeHealthScriptRunner getNodeHealthScriptRunner() { + return nodeHealthScriptRunner; + } +} diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/NodeHealthCheckerService.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeHealthScriptRunner.java similarity index 88% rename from hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/NodeHealthCheckerService.java rename to hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeHealthScriptRunner.java index b02e8b13ad5dc..0898bb284c214 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/NodeHealthCheckerService.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeHealthScriptRunner.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop; +package org.apache.hadoop.yarn.server.nodemanager; import java.io.File; import java.io.IOException; @@ -31,19 +31,18 @@ import org.apache.hadoop.util.Shell.ExitCodeException; import org.apache.hadoop.util.Shell.ShellCommandExecutor; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.yarn.api.records.NodeHealthStatus; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.service.AbstractService; /** * - * The class which provides functionality of checking the health of the node and - * reporting back to the service for which the health checker has been asked to - * report. + * The class which provides functionality of checking the health of the node + * using the configured node health script and reporting back to the service + * for which the health checker has been asked to report. */ -public class NodeHealthCheckerService extends AbstractService { +public class NodeHealthScriptRunner extends AbstractService { - private static Log LOG = LogFactory.getLog(NodeHealthCheckerService.class); + private static Log LOG = LogFactory.getLog(NodeHealthScriptRunner.class); /** Absolute path to the health script. */ private String nodeHealthScript; @@ -74,7 +73,6 @@ public class NodeHealthCheckerService extends AbstractService { private TimerTask timer; - private enum HealthCheckerExitStatus { SUCCESS, TIMED_OUT, @@ -187,18 +185,13 @@ private boolean hasErrors(String output) { } } - public NodeHealthCheckerService() { - super(NodeHealthCheckerService.class.getName()); + public NodeHealthScriptRunner() { + super(NodeHealthScriptRunner.class.getName()); this.lastReportedTime = System.currentTimeMillis(); this.isHealthy = true; this.healthReport = ""; } - public NodeHealthCheckerService(Configuration conf) { - this(); - init(conf); - } - /* * Method which initializes the values for the script path and interval time. */ @@ -257,12 +250,12 @@ public void stop() { * * @return true if node is healthy */ - private boolean isHealthy() { + public boolean isHealthy() { return isHealthy; } /** - * Sets if the node is healhty or not. + * Sets if the node is healhty or not considering disks' health also. * * @param isHealthy * if or not node is healthy @@ -277,13 +270,14 @@ private synchronized void setHealthy(boolean isHealthy) { * * @return output from health script */ - private String getHealthReport() { + public String getHealthReport() { return healthReport; } /** - * Sets the health report from the node health script. - * + * Sets the health report from the node health script. Also set the disks' + * health info obtained from DiskHealthCheckerService. + * * @param healthReport */ private synchronized void setHealthReport(String healthReport) { @@ -295,7 +289,7 @@ private synchronized void setHealthReport(String healthReport) { * * @return timestamp when node health script was last run */ - private long getLastReportedTime() { + public long getLastReportedTime() { return lastReportedTime; } @@ -340,27 +334,12 @@ private synchronized void setHealthStatus(boolean isHealthy, String output, this.setHealthStatus(isHealthy, output); this.setLastReportedTime(time); } - - /** - * Method to populate the fields for the {@link NodeHealthStatus} - * - * @param healthStatus - */ - public synchronized void setHealthStatus(NodeHealthStatus healthStatus) { - healthStatus.setIsNodeHealthy(this.isHealthy()); - healthStatus.setHealthReport(this.getHealthReport()); - healthStatus.setLastHealthReportTime(this.getLastReportedTime()); - } - + /** - * Test method to directly access the timer which node - * health checker would use. - * - * - * @return Timer task + * Used only by tests to access the timer task directly + * @return the timer task */ - //XXX:Not to be used directly. - TimerTask getTimer() { + TimerTask getTimerTask() { return timer; } } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java index 94971d365e73f..439b5e37a5740 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java @@ -25,7 +25,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.NodeHealthCheckerService; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.security.SecurityUtil; @@ -59,6 +58,8 @@ public class NodeManager extends CompositeService implements protected final NodeManagerMetrics metrics = NodeManagerMetrics.create(); protected ContainerTokenSecretManager containerTokenSecretManager; private ApplicationACLsManager aclsManager; + private NodeHealthCheckerService nodeHealthChecker; + private LocalDirsHandlerService dirsHandler; public NodeManager() { super(NodeManager.class.getName()); @@ -78,14 +79,16 @@ protected NodeResourceMonitor createNodeResourceMonitor() { protected ContainerManagerImpl createContainerManager(Context context, ContainerExecutor exec, DeletionService del, NodeStatusUpdater nodeStatusUpdater, ContainerTokenSecretManager - containerTokenSecretManager, ApplicationACLsManager aclsManager) { + containerTokenSecretManager, ApplicationACLsManager aclsManager, + LocalDirsHandlerService dirsHandler) { return new ContainerManagerImpl(context, exec, del, nodeStatusUpdater, - metrics, containerTokenSecretManager, aclsManager); + metrics, containerTokenSecretManager, aclsManager, dirsHandler); } protected WebServer createWebServer(Context nmContext, - ResourceView resourceView, ApplicationACLsManager aclsManager) { - return new WebServer(nmContext, resourceView, aclsManager); + ResourceView resourceView, ApplicationACLsManager aclsManager, + LocalDirsHandlerService dirsHandler) { + return new WebServer(nmContext, resourceView, aclsManager, dirsHandler); } protected void doSecureLogin() throws IOException { @@ -121,16 +124,12 @@ public void init(Configuration conf) { // NodeManager level dispatcher AsyncDispatcher dispatcher = new AsyncDispatcher(); - NodeHealthCheckerService healthChecker = null; - if (NodeHealthCheckerService.shouldRun(conf)) { - healthChecker = new NodeHealthCheckerService(); - addService(healthChecker); - } + nodeHealthChecker = new NodeHealthCheckerService(); + addService(nodeHealthChecker); + dirsHandler = nodeHealthChecker.getDiskHandler(); - NodeStatusUpdater nodeStatusUpdater = - createNodeStatusUpdater(context, dispatcher, healthChecker, - this.containerTokenSecretManager); - + NodeStatusUpdater nodeStatusUpdater = createNodeStatusUpdater(context, + dispatcher, nodeHealthChecker, this.containerTokenSecretManager); nodeStatusUpdater.register(this); NodeResourceMonitor nodeResourceMonitor = createNodeResourceMonitor(); @@ -138,11 +137,11 @@ public void init(Configuration conf) { ContainerManagerImpl containerManager = createContainerManager(context, exec, del, nodeStatusUpdater, - this.containerTokenSecretManager, this.aclsManager); + this.containerTokenSecretManager, this.aclsManager, dirsHandler); addService(containerManager); Service webServer = createWebServer(context, containerManager - .getContainersMonitor(), this.aclsManager); + .getContainersMonitor(), this.aclsManager, dirsHandler); addService(webServer); dispatcher.register(ContainerManagerEventType.class, containerManager); @@ -215,7 +214,14 @@ public NodeHealthStatus getNodeHealthStatus() { } } - + + /** + * @return the node health checker + */ + public NodeHealthCheckerService getNodeHealthChecker() { + return nodeHealthChecker; + } + @Override public void stateChanged(Service service) { // Shutdown the Nodemanager when the NodeStatusUpdater is stopped. diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java index 94396088cac2f..6da70f150233f 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java @@ -27,7 +27,6 @@ import org.apache.avro.AvroRuntimeException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.NodeHealthCheckerService; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; @@ -222,11 +221,14 @@ private NodeStatus getNodeStatus() { + numActiveContainers + " containers"); NodeHealthStatus nodeHealthStatus = this.context.getNodeHealthStatus(); - if (this.healthChecker != null) { - this.healthChecker.setHealthStatus(nodeHealthStatus); + nodeHealthStatus.setHealthReport(healthChecker.getHealthReport()); + nodeHealthStatus.setIsNodeHealthy(healthChecker.isHealthy()); + nodeHealthStatus.setLastHealthReportTime( + healthChecker.getLastHealthReportTime()); + if (LOG.isDebugEnabled()) { + LOG.debug("Node's health-status : " + nodeHealthStatus.getIsNodeHealthy() + + ", " + nodeHealthStatus.getHealthReport()); } - LOG.debug("Node's health-status : " + nodeHealthStatus.getIsNodeHealthy() - + ", " + nodeHealthStatus.getHealthReport()); nodeStatus.setNodeHealthStatus(nodeHealthStatus); return nodeStatus; diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java index 5e3eb26cb5dbe..615b825c4f377 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java @@ -68,6 +68,7 @@ import org.apache.hadoop.yarn.server.nodemanager.ContainerManagerEvent; import org.apache.hadoop.yarn.server.nodemanager.Context; import org.apache.hadoop.yarn.server.nodemanager.DeletionService; +import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService; import org.apache.hadoop.yarn.server.nodemanager.NMAuditLogger; import org.apache.hadoop.yarn.server.nodemanager.NMAuditLogger.AuditConstants; import org.apache.hadoop.yarn.server.nodemanager.NodeStatusUpdater; @@ -120,7 +121,8 @@ public class ContainerManagerImpl extends CompositeService implements private ContainerTokenSecretManager containerTokenSecretManager; private final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); - + + protected LocalDirsHandlerService dirsHandler; protected final AsyncDispatcher dispatcher; private final ApplicationACLsManager aclsManager; @@ -129,9 +131,12 @@ public class ContainerManagerImpl extends CompositeService implements public ContainerManagerImpl(Context context, ContainerExecutor exec, DeletionService deletionContext, NodeStatusUpdater nodeStatusUpdater, NodeManagerMetrics metrics, ContainerTokenSecretManager - containerTokenSecretManager, ApplicationACLsManager aclsManager) { + containerTokenSecretManager, ApplicationACLsManager aclsManager, + LocalDirsHandlerService dirsHandler) { super(ContainerManagerImpl.class.getName()); this.context = context; + this.dirsHandler = dirsHandler; + dispatcher = new AsyncDispatcher(); this.deletionService = deletionContext; this.metrics = metrics; @@ -190,9 +195,10 @@ protected LogHandler createLogHandler(Configuration conf, Context context, if (conf.getBoolean(YarnConfiguration.NM_LOG_AGGREGATION_ENABLED, YarnConfiguration.DEFAULT_NM_LOG_AGGREGATION_ENABLED)) { return new LogAggregationService(this.dispatcher, context, - deletionService); + deletionService, dirsHandler); } else { - return new NonAggregatingLogHandler(this.dispatcher, deletionService); + return new NonAggregatingLogHandler(this.dispatcher, deletionService, + dirsHandler); } } @@ -203,12 +209,12 @@ public ContainersMonitor getContainersMonitor() { protected ResourceLocalizationService createResourceLocalizationService( ContainerExecutor exec, DeletionService deletionContext) { return new ResourceLocalizationService(this.dispatcher, exec, - deletionContext); + deletionContext, dirsHandler); } protected ContainersLauncher createContainersLauncher(Context context, ContainerExecutor exec) { - return new ContainersLauncher(context, this.dispatcher, exec); + return new ContainersLauncher(context, this.dispatcher, exec, dirsHandler); } @Override diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerExitEvent.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerExitEvent.java index b9416886f691e..7a2fc2f41626a 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerExitEvent.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerExitEvent.java @@ -22,14 +22,20 @@ public class ContainerExitEvent extends ContainerEvent { private int exitCode; + private final String diagnosticInfo; public ContainerExitEvent(ContainerId cID, ContainerEventType eventType, - int exitCode) { + int exitCode, String diagnosticInfo) { super(cID, eventType); this.exitCode = exitCode; + this.diagnosticInfo = diagnosticInfo; } public int getExitCode() { return this.exitCode; } + + public String getDiagnosticInfo() { + return diagnosticInfo; + } } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java index f7fd522f811ab..15de5d2749b0b 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java @@ -50,6 +50,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor; +import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.DelayedProcessKiller; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.ExitCode; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.Signal; @@ -78,7 +79,6 @@ public class ContainerLaunch implements Callable<Integer> { private final Application app; private final Container container; private final Configuration conf; - private final LocalDirAllocator logDirsSelector; private volatile AtomicBoolean shouldLaunchContainer = new AtomicBoolean(false); private volatile AtomicBoolean completed = new AtomicBoolean(false); @@ -88,14 +88,17 @@ public class ContainerLaunch implements Callable<Integer> { private Path pidFilePath = null; + private final LocalDirsHandlerService dirsHandler; + public ContainerLaunch(Configuration configuration, Dispatcher dispatcher, - ContainerExecutor exec, Application app, Container container) { + ContainerExecutor exec, Application app, Container container, + LocalDirsHandlerService dirsHandler) { this.conf = configuration; this.app = app; this.exec = exec; this.container = container; this.dispatcher = dispatcher; - this.logDirsSelector = new LocalDirAllocator(YarnConfiguration.NM_LOG_DIRS); + this.dirsHandler = dirsHandler; this.sleepDelayBeforeSigKill = conf.getLong(YarnConfiguration.NM_SLEEP_DELAY_BEFORE_SIGKILL_MS, YarnConfiguration.DEFAULT_NM_SLEEP_DELAY_BEFORE_SIGKILL_MS); @@ -121,9 +124,8 @@ public Integer call() { List<String> newCmds = new ArrayList<String>(command.size()); String appIdStr = app.getAppId().toString(); Path containerLogDir = - this.logDirsSelector.getLocalPathForWrite(ContainerLaunch - .getRelativeContainerLogDir(appIdStr, containerIdStr), - LocalDirAllocator.SIZE_UNKNOWN, this.conf, false); + dirsHandler.getLogPathForWrite(ContainerLaunch + .getRelativeContainerLogDir(appIdStr, containerIdStr), false); for (String str : command) { // TODO: Should we instead work via symlinks without this grammar? newCmds.add(str.replace(ApplicationConstants.LOG_DIR_EXPANSION_VAR, @@ -144,47 +146,49 @@ public Integer call() { // /////////////////////////// End of variable expansion FileContext lfs = FileContext.getLocalFSFileContext(); - LocalDirAllocator lDirAllocator = - new LocalDirAllocator(YarnConfiguration.NM_LOCAL_DIRS); // TODO Path nmPrivateContainerScriptPath = - lDirAllocator.getLocalPathForWrite( + dirsHandler.getLocalPathForWrite( getContainerPrivateDir(appIdStr, containerIdStr) + Path.SEPARATOR - + CONTAINER_SCRIPT, this.conf); + + CONTAINER_SCRIPT); Path nmPrivateTokensPath = - lDirAllocator.getLocalPathForWrite( + dirsHandler.getLocalPathForWrite( getContainerPrivateDir(appIdStr, containerIdStr) + Path.SEPARATOR + String.format(ContainerLocalizer.TOKEN_FILE_NAME_FMT, - containerIdStr), this.conf); + containerIdStr)); DataOutputStream containerScriptOutStream = null; DataOutputStream tokensOutStream = null; // Select the working directory for the container Path containerWorkDir = - lDirAllocator.getLocalPathForWrite(ContainerLocalizer.USERCACHE + dirsHandler.getLocalPathForWrite(ContainerLocalizer.USERCACHE + Path.SEPARATOR + user + Path.SEPARATOR + ContainerLocalizer.APPCACHE + Path.SEPARATOR + appIdStr + Path.SEPARATOR + containerIdStr, - LocalDirAllocator.SIZE_UNKNOWN, this.conf, false); + LocalDirAllocator.SIZE_UNKNOWN, false); String pidFileSuffix = String.format(ContainerLaunch.PID_FILE_NAME_FMT, containerIdStr); // pid file should be in nm private dir so that it is not // accessible by users - pidFilePath = lDirAllocator.getLocalPathForWrite( + pidFilePath = dirsHandler.getLocalPathForWrite( ResourceLocalizationService.NM_PRIVATE_DIR + Path.SEPARATOR - + pidFileSuffix, - this.conf); + + pidFileSuffix); + List<String> localDirs = dirsHandler.getLocalDirs(); + List<String> logDirs = dirsHandler.getLogDirs(); + + if (!dirsHandler.areDisksHealthy()) { + ret = ExitCode.DISKS_FAILED.getExitCode(); + throw new IOException("Most of the disks failed. " + + dirsHandler.getDisksHealthReport()); + } try { // /////////// Write out the container-script in the nmPrivate space. - String[] localDirs = - this.conf.getStrings(YarnConfiguration.NM_LOCAL_DIRS, - YarnConfiguration.DEFAULT_NM_LOCAL_DIRS); - List<Path> appDirs = new ArrayList<Path>(localDirs.length); + List<Path> appDirs = new ArrayList<Path>(localDirs.size()); for (String localDir : localDirs) { Path usersdir = new Path(localDir, ContainerLocalizer.USERCACHE); Path userdir = new Path(usersdir, user); @@ -234,30 +238,34 @@ public Integer call() { } else { exec.activateContainer(containerID, pidFilePath); - ret = - exec.launchContainer(container, nmPrivateContainerScriptPath, - nmPrivateTokensPath, user, appIdStr, containerWorkDir); + ret = exec.launchContainer(container, nmPrivateContainerScriptPath, + nmPrivateTokensPath, user, appIdStr, containerWorkDir, + localDirs, logDirs); } } catch (Throwable e) { - LOG.warn("Failed to launch container", e); + LOG.warn("Failed to launch container.", e); dispatcher.getEventHandler().handle(new ContainerExitEvent( launchContext.getContainerId(), - ContainerEventType.CONTAINER_EXITED_WITH_FAILURE, ret)); + ContainerEventType.CONTAINER_EXITED_WITH_FAILURE, ret, + e.getMessage())); return ret; } finally { completed.set(true); exec.deactivateContainer(containerID); } - LOG.debug("Container " + containerIdStr + " completed with exit code " - + ret); + if (LOG.isDebugEnabled()) { + LOG.debug("Container " + containerIdStr + " completed with exit code " + + ret); + } if (ret == ExitCode.FORCE_KILLED.getExitCode() || ret == ExitCode.TERMINATED.getExitCode()) { // If the process was killed, Send container_cleanedup_after_kill and // just break out of this method. dispatcher.getEventHandler().handle( new ContainerExitEvent(launchContext.getContainerId(), - ContainerEventType.CONTAINER_KILLED_ON_REQUEST, ret)); + ContainerEventType.CONTAINER_KILLED_ON_REQUEST, ret, + "Container exited with a non-zero exit code " + ret)); return ret; } @@ -265,7 +273,8 @@ public Integer call() { LOG.warn("Container exited with a non-zero exit code " + ret); this.dispatcher.getEventHandler().handle(new ContainerExitEvent( launchContext.getContainerId(), - ContainerEventType.CONTAINER_EXITED_WITH_FAILURE, ret)); + ContainerEventType.CONTAINER_EXITED_WITH_FAILURE, ret, + "Container exited with a non-zero exit code " + ret)); return ret; } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainersLauncher.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainersLauncher.java index 8f8bfc76885ae..1e3c18b971e9b 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainersLauncher.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainersLauncher.java @@ -33,10 +33,10 @@ import org.apache.hadoop.fs.UnsupportedFileSystemException; import org.apache.hadoop.yarn.YarnException; import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor; +import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService; import org.apache.hadoop.yarn.server.nodemanager.Context; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container; @@ -59,6 +59,8 @@ public class ContainersLauncher extends AbstractService private final Context context; private final ContainerExecutor exec; private final Dispatcher dispatcher; + + private LocalDirsHandlerService dirsHandler; private final ExecutorService containerLauncher = Executors.newCachedThreadPool( new ThreadFactoryBuilder() @@ -80,11 +82,12 @@ public RunningContainer(Future<Integer> submit, public ContainersLauncher(Context context, Dispatcher dispatcher, - ContainerExecutor exec) { + ContainerExecutor exec, LocalDirsHandlerService dirsHandler) { super("containers-launcher"); this.exec = exec; this.context = context; this.dispatcher = dispatcher; + this.dirsHandler = dirsHandler; } @Override @@ -114,15 +117,19 @@ public void handle(ContainersLauncherEvent event) { Application app = context.getApplications().get( containerId.getApplicationAttemptId().getApplicationId()); - ContainerLaunch launch = - new ContainerLaunch(getConfig(), dispatcher, exec, app, - event.getContainer()); + + ContainerLaunch launch = new ContainerLaunch(getConfig(), dispatcher, + exec, app, event.getContainer(), dirsHandler); running.put(containerId, new RunningContainer(containerLauncher.submit(launch), launch)); break; case CLEANUP_CONTAINER: RunningContainer rContainerDatum = running.remove(containerId); + if (rContainerDatum == null) { + // Container not launched. So nothing needs to be done. + return; + } Future<Integer> rContainer = rContainerDatum.runningcontainer; if (rContainer != null && !rContainer.isDone()) { diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java index 392128733fbef..4e03fa2a5a185 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java @@ -45,12 +45,10 @@ import org.apache.hadoop.fs.LocalDirAllocator; import org.apache.hadoop.fs.Path; import org.apache.hadoop.security.Credentials; -import org.apache.hadoop.security.SecurityInfo; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.yarn.api.records.LocalResource; -import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnRemoteException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; @@ -61,7 +59,6 @@ import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerHeartbeatResponse; import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerStatus; import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.ResourceStatusType; -import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.security.LocalizerSecurityInfo; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.security.LocalizerTokenIdentifier; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.security.LocalizerTokenSecretManager; import org.apache.hadoop.yarn.util.ConverterUtils; @@ -186,16 +183,30 @@ ExecutorService createDownloadThreadPool() { } Callable<Path> download(LocalDirAllocator lda, LocalResource rsrc, - UserGroupInformation ugi) { - return new FSDownload(lfs, ugi, conf, lda, rsrc, new Random()); + UserGroupInformation ugi) throws IOException { + Path destPath = lda.getLocalPathForWrite(".", getEstimatedSize(rsrc), conf); + return new FSDownload(lfs, ugi, conf, destPath, rsrc, new Random()); + } + + static long getEstimatedSize(LocalResource rsrc) { + if (rsrc.getSize() < 0) { + return -1; + } + switch (rsrc.getType()) { + case ARCHIVE: + return 5 * rsrc.getSize(); + case FILE: + default: + return rsrc.getSize(); + } } void sleep(int duration) throws InterruptedException { TimeUnit.SECONDS.sleep(duration); } - private void localizeFiles(LocalizationProtocol nodemanager, ExecutorService exec, - UserGroupInformation ugi) { + private void localizeFiles(LocalizationProtocol nodemanager, + ExecutorService exec, UserGroupInformation ugi) throws IOException { while (true) { try { LocalizerStatus status = createStatus(); diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java index 9ec83cdbc5553..744c2b1990098 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java @@ -57,7 +57,6 @@ import java.io.IOException; import java.net.InetSocketAddress; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -68,7 +67,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.FileContext; -import org.apache.hadoop.fs.LocalDirAllocator; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.net.NetUtils; @@ -81,6 +79,7 @@ import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor; import org.apache.hadoop.yarn.server.nodemanager.DeletionService; +import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService; import org.apache.hadoop.yarn.server.nodemanager.api.LocalizationProtocol; import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalResourceStatus; import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerAction; @@ -125,19 +124,18 @@ public class ResourceLocalizationService extends CompositeService private InetSocketAddress localizationServerAddress; private long cacheTargetSize; private long cacheCleanupPeriod; - private List<Path> logDirs; - private List<Path> localDirs; - private List<Path> sysDirs; + private final ContainerExecutor exec; protected final Dispatcher dispatcher; private final DeletionService delService; private LocalizerTracker localizerTracker; private RecordFactory recordFactory; - private final LocalDirAllocator localDirsSelector; private final ScheduledExecutorService cacheCleanup; private final LocalResourcesTracker publicRsrc; - + + private LocalDirsHandlerService dirsHandler; + /** * Map of LocalResourceTrackers keyed by username, for private * resources. @@ -153,12 +151,15 @@ public class ResourceLocalizationService extends CompositeService new ConcurrentHashMap<String,LocalResourcesTracker>(); public ResourceLocalizationService(Dispatcher dispatcher, - ContainerExecutor exec, DeletionService delService) { + ContainerExecutor exec, DeletionService delService, + LocalDirsHandlerService dirsHandler) { + super(ResourceLocalizationService.class.getName()); this.exec = exec; this.dispatcher = dispatcher; this.delService = delService; - this.localDirsSelector = new LocalDirAllocator(YarnConfiguration.NM_LOCAL_DIRS); + this.dirsHandler = dirsHandler; + this.publicRsrc = new LocalResourcesTrackerImpl(null, dispatcher); this.cacheCleanup = new ScheduledThreadPoolExecutor(1, new ThreadFactoryBuilder() @@ -177,41 +178,31 @@ FileContext getLocalFileContext(Configuration conf) { @Override public void init(Configuration conf) { this.recordFactory = RecordFactoryProvider.getRecordFactory(conf); + try { // TODO queue deletions here, rather than NM init? FileContext lfs = getLocalFileContext(conf); - String[] sLocalDirs = - conf.getStrings(YarnConfiguration.NM_LOCAL_DIRS, YarnConfiguration.DEFAULT_NM_LOCAL_DIRS); - - localDirs = new ArrayList<Path>(sLocalDirs.length); - logDirs = new ArrayList<Path>(sLocalDirs.length); - sysDirs = new ArrayList<Path>(sLocalDirs.length); - for (String sLocaldir : sLocalDirs) { - Path localdir = new Path(sLocaldir); - localDirs.add(localdir); + List<String> localDirs = dirsHandler.getLocalDirs(); + for (String localDir : localDirs) { // $local/usercache - Path userdir = new Path(localdir, ContainerLocalizer.USERCACHE); - lfs.mkdir(userdir, null, true); + Path userDir = new Path(localDir, ContainerLocalizer.USERCACHE); + lfs.mkdir(userDir, null, true); // $local/filecache - Path filedir = new Path(localdir, ContainerLocalizer.FILECACHE); - lfs.mkdir(filedir, null, true); + Path fileDir = new Path(localDir, ContainerLocalizer.FILECACHE); + lfs.mkdir(fileDir, null, true); // $local/nmPrivate - Path sysdir = new Path(localdir, NM_PRIVATE_DIR); - lfs.mkdir(sysdir, NM_PRIVATE_PERM, true); - sysDirs.add(sysdir); + Path sysDir = new Path(localDir, NM_PRIVATE_DIR); + lfs.mkdir(sysDir, NM_PRIVATE_PERM, true); } - String[] sLogdirs = conf.getStrings(YarnConfiguration.NM_LOG_DIRS, YarnConfiguration.DEFAULT_NM_LOG_DIRS); - for (String sLogdir : sLogdirs) { - Path logdir = new Path(sLogdir); - logDirs.add(logdir); - lfs.mkdir(logdir, null, true); + + List<String> logDirs = dirsHandler.getLogDirs(); + for (String logDir : logDirs) { + lfs.mkdir(new Path(logDir), null, true); } } catch (IOException e) { throw new YarnException("Failed to initialize LocalizationService", e); } - localDirs = Collections.unmodifiableList(localDirs); - logDirs = Collections.unmodifiableList(logDirs); - sysDirs = Collections.unmodifiableList(sysDirs); + cacheTargetSize = conf.getLong(YarnConfiguration.NM_LOCALIZER_CACHE_TARGET_SIZE_MB, YarnConfiguration.DEFAULT_NM_LOCALIZER_CACHE_TARGET_SIZE_MB) << 20; cacheCleanupPeriod = @@ -391,7 +382,7 @@ private void handleCleanupContainerResources( String containerIDStr = c.toString(); String appIDStr = ConverterUtils.toString( c.getContainerID().getApplicationAttemptId().getApplicationId()); - for (Path localDir : localDirs) { + for (String localDir : dirsHandler.getLocalDirs()) { // Delete the user-owned container-dir Path usersdir = new Path(localDir, ContainerLocalizer.USERCACHE); @@ -428,7 +419,7 @@ private void handleDestroyApplicationResources(Application application) { // Delete the application directories userName = application.getUser(); appIDStr = application.toString(); - for (Path localDir : localDirs) { + for (String localDir : dirsHandler.getLocalDirs()) { // Delete the user-owned app-dir Path usersdir = new Path(localDir, ContainerLocalizer.USERCACHE); @@ -574,12 +565,9 @@ private static ExecutorService createLocalizerExecutor(Configuration conf) { class PublicLocalizer extends Thread { - static final String PUBCACHE_CTXT = "public.cache.dirs"; - final FileContext lfs; final Configuration conf; final ExecutorService threadPool; - final LocalDirAllocator publicDirs; final CompletionService<Path> queue; final Map<Future<Path>,LocalizerResourceRequestEvent> pending; // TODO hack to work around broken signaling @@ -601,13 +589,23 @@ class PublicLocalizer extends Thread { this.conf = conf; this.pending = pending; this.attempts = attempts; - String[] publicFilecache = new String[localDirs.size()]; - for (int i = 0, n = localDirs.size(); i < n; ++i) { - publicFilecache[i] = - new Path(localDirs.get(i), ContainerLocalizer.FILECACHE).toString(); - } - conf.setStrings(PUBCACHE_CTXT, publicFilecache); - this.publicDirs = new LocalDirAllocator(PUBCACHE_CTXT); +// List<String> localDirs = dirsHandler.getLocalDirs(); +// String[] publicFilecache = new String[localDirs.size()]; +// for (int i = 0, n = localDirs.size(); i < n; ++i) { +// publicFilecache[i] = +// new Path(localDirs.get(i), ContainerLocalizer.FILECACHE).toString(); +// } +// conf.setStrings(PUBCACHE_CTXT, publicFilecache); + +// this.publicDirDestPath = new LocalDirAllocator(PUBCACHE_CTXT).getLocalPathForWrite(pathStr, conf); +// List<String> localDirs = dirsHandler.getLocalDirs(); +// String[] publicFilecache = new String[localDirs.size()]; +// int i = 0; +// for (String localDir : localDirs) { +// publicFilecache[i++] = +// new Path(localDir, ContainerLocalizer.FILECACHE).toString(); +// } + this.threadPool = threadPool; this.queue = new ExecutorCompletionService<Path>(threadPool); } @@ -619,11 +617,19 @@ public void addResource(LocalizerResourceRequestEvent request) { synchronized (attempts) { List<LocalizerResourceRequestEvent> sigh = attempts.get(key); if (null == sigh) { - pending.put(queue.submit(new FSDownload( - lfs, null, conf, publicDirs, - request.getResource().getRequest(), new Random())), - request); - attempts.put(key, new LinkedList<LocalizerResourceRequestEvent>()); + LocalResource resource = request.getResource().getRequest(); + try { + Path publicDirDestPath = dirsHandler.getLocalPathForWrite( + "." + Path.SEPARATOR + ContainerLocalizer.FILECACHE, + ContainerLocalizer.getEstimatedSize(resource), true); + pending.put(queue.submit(new FSDownload( + lfs, null, conf, publicDirDestPath, resource, new Random())), + request); + attempts.put(key, new LinkedList<LocalizerResourceRequestEvent>()); + } catch (IOException e) { + LOG.error("Local path for public localization is not found. " + + " May be disks failed.", e); + } } else { sigh.add(request); } @@ -844,24 +850,30 @@ LocalizerHeartbeatResponse update( public void run() { Path nmPrivateCTokensPath = null; try { - // Use LocalDirAllocator to get nmPrivateDir + // Get nmPrivateDir nmPrivateCTokensPath = - localDirsSelector.getLocalPathForWrite( - NM_PRIVATE_DIR - + Path.SEPARATOR + dirsHandler.getLocalPathForWrite( + NM_PRIVATE_DIR + Path.SEPARATOR + String.format(ContainerLocalizer.TOKEN_FILE_NAME_FMT, - localizerId), getConfig()); + localizerId)); // 0) init queue, etc. // 1) write credentials to private dir writeCredentials(nmPrivateCTokensPath); // 2) exec initApplication and wait - exec.startLocalizer(nmPrivateCTokensPath, localizationServerAddress, - context.getUser(), - ConverterUtils.toString( - context.getContainerId(). - getApplicationAttemptId().getApplicationId()), - localizerId, localDirs); + List<String> localDirs = dirsHandler.getLocalDirs(); + List<String> logDirs = dirsHandler.getLogDirs(); + if (dirsHandler.areDisksHealthy()) { + exec.startLocalizer(nmPrivateCTokensPath, localizationServerAddress, + context.getUser(), + ConverterUtils.toString( + context.getContainerId(). + getApplicationAttemptId().getApplicationId()), + localizerId, localDirs, logDirs); + } else { + throw new IOException("All disks failed. " + + dirsHandler.getDisksHealthReport()); + } // TODO handle ExitCodeException separately? } catch (Exception e) { LOG.info("Localizer failed", e); diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java index c41162bbec05e..5cfcc0d2ea110 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java @@ -20,6 +20,7 @@ import java.io.IOException; import java.security.PrivilegedExceptionAction; +import java.util.List; import java.util.Map; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; @@ -31,6 +32,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ContainerId; @@ -40,10 +42,12 @@ import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogValue; import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogWriter; import org.apache.hadoop.yarn.server.nodemanager.DeletionService; +import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEventType; import org.apache.hadoop.yarn.util.ConverterUtils; + public class AppLogAggregatorImpl implements AppLogAggregator { private static final Log LOG = LogFactory @@ -51,6 +55,7 @@ public class AppLogAggregatorImpl implements AppLogAggregator { private static final int THREAD_SLEEP_TIME = 1000; private static final String TMP_FILE_SUFFIX = ".tmp"; + private final LocalDirsHandlerService dirsHandler; private final Dispatcher dispatcher; private final ApplicationId appId; private final String applicationId; @@ -58,7 +63,6 @@ public class AppLogAggregatorImpl implements AppLogAggregator { private final Configuration conf; private final DeletionService delService; private final UserGroupInformation userUgi; - private final String[] rootLogDirs; private final Path remoteNodeLogFileForApp; private final Path remoteNodeTmpLogFileForApp; private final ContainerLogsRetentionPolicy retentionPolicy; @@ -72,7 +76,7 @@ public class AppLogAggregatorImpl implements AppLogAggregator { public AppLogAggregatorImpl(Dispatcher dispatcher, DeletionService deletionService, Configuration conf, ApplicationId appId, - UserGroupInformation userUgi, String[] localRootLogDirs, + UserGroupInformation userUgi, LocalDirsHandlerService dirsHandler, Path remoteNodeLogFileForApp, ContainerLogsRetentionPolicy retentionPolicy, Map<ApplicationAccessType, String> appAcls) { @@ -82,7 +86,7 @@ public AppLogAggregatorImpl(Dispatcher dispatcher, this.appId = appId; this.applicationId = ConverterUtils.toString(appId); this.userUgi = userUgi; - this.rootLogDirs = localRootLogDirs; + this.dirsHandler = dirsHandler; this.remoteNodeLogFileForApp = remoteNodeLogFileForApp; this.remoteNodeTmpLogFileForApp = getRemoteNodeTmpLogFileForApp(); this.retentionPolicy = retentionPolicy; @@ -115,9 +119,11 @@ private void uploadLogsForContainer(ContainerId containerId) { } } - LOG.info("Uploading logs for container " + containerId); + LOG.info("Uploading logs for container " + containerId + + ". Current good log dirs are " + + StringUtils.join(",", dirsHandler.getLogDirs())); LogKey logKey = new LogKey(containerId); - LogValue logValue = new LogValue(this.rootLogDirs, containerId); + LogValue logValue = new LogValue(dirsHandler.getLogDirs(), containerId); try { this.writer.append(logKey, logValue); } catch (IOException e) { @@ -150,9 +156,10 @@ public void run() { } // Remove the local app-log-dirs - Path[] localAppLogDirs = new Path[this.rootLogDirs.length]; + List<String> rootLogDirs = dirsHandler.getLogDirs(); + Path[] localAppLogDirs = new Path[rootLogDirs.size()]; int index = 0; - for (String rootLogDir : this.rootLogDirs) { + for (String rootLogDir : rootLogDirs) { localAppLogDirs[index] = new Path(rootLogDir, this.applicationId); index++; } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java index 95885d4e07e9d..173bc95943a2c 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java @@ -47,6 +47,7 @@ import org.apache.hadoop.yarn.logaggregation.LogAggregationUtils; import org.apache.hadoop.yarn.server.nodemanager.Context; import org.apache.hadoop.yarn.server.nodemanager.DeletionService; +import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService; import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.LogHandler; import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerAppFinishedEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerAppStartedEvent; @@ -85,7 +86,7 @@ public class LogAggregationService extends AbstractService implements private final DeletionService deletionService; private final Dispatcher dispatcher; - private String[] localRootLogDirs; + private LocalDirsHandlerService dirsHandler; Path remoteRootLogDir; String remoteRootLogDirSuffix; private NodeId nodeId; @@ -95,11 +96,12 @@ public class LogAggregationService extends AbstractService implements private final ExecutorService threadPool; public LogAggregationService(Dispatcher dispatcher, Context context, - DeletionService deletionService) { + DeletionService deletionService, LocalDirsHandlerService dirsHandler) { super(LogAggregationService.class.getName()); this.dispatcher = dispatcher; this.context = context; this.deletionService = deletionService; + this.dirsHandler = dirsHandler; this.appLogAggregators = new ConcurrentHashMap<ApplicationId, AppLogAggregator>(); this.threadPool = Executors.newCachedThreadPool( @@ -109,9 +111,6 @@ public LogAggregationService(Dispatcher dispatcher, Context context, } public synchronized void init(Configuration conf) { - this.localRootLogDirs = - conf.getStrings(YarnConfiguration.NM_LOG_DIRS, - YarnConfiguration.DEFAULT_NM_LOG_DIRS); this.remoteRootLogDir = new Path(conf.get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR, YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR)); @@ -291,9 +290,10 @@ private void initApp(final ApplicationId appId, String user, // New application AppLogAggregator appLogAggregator = - new AppLogAggregatorImpl(this.dispatcher, this.deletionService, getConfig(), appId, - userUgi, this.localRootLogDirs, - getRemoteNodeLogFileForApp(appId, user), logRetentionPolicy, appAcls); + new AppLogAggregatorImpl(this.dispatcher, this.deletionService, + getConfig(), appId, userUgi, dirsHandler, + getRemoteNodeLogFileForApp(appId, user), logRetentionPolicy, + appAcls); if (this.appLogAggregators.putIfAbsent(appId, appLogAggregator) != null) { throw new YarnException("Duplicate initApp for " + appId); } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/loghandler/NonAggregatingLogHandler.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/loghandler/NonAggregatingLogHandler.java index e0f843e245a00..a90912e6885ec 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/loghandler/NonAggregatingLogHandler.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/loghandler/NonAggregatingLogHandler.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler; +import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ScheduledThreadPoolExecutor; @@ -31,6 +32,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.server.nodemanager.DeletionService; +import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEventType; import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerAppFinishedEvent; @@ -53,15 +55,16 @@ public class NonAggregatingLogHandler extends AbstractService implements private final DeletionService delService; private final Map<ApplicationId, String> appOwners; - private String[] rootLogDirs; + private final LocalDirsHandlerService dirsHandler; private long deleteDelaySeconds; private ScheduledThreadPoolExecutor sched; public NonAggregatingLogHandler(Dispatcher dispatcher, - DeletionService delService) { + DeletionService delService, LocalDirsHandlerService dirsHandler) { super(NonAggregatingLogHandler.class.getName()); this.dispatcher = dispatcher; this.delService = delService; + this.dirsHandler = dirsHandler; this.appOwners = new ConcurrentHashMap<ApplicationId, String>(); } @@ -70,9 +73,6 @@ public void init(Configuration conf) { // Default 3 hours. this.deleteDelaySeconds = conf.getLong(YarnConfiguration.NM_LOG_RETAIN_SECONDS, 3 * 60 * 60); - this.rootLogDirs = - conf.getStrings(YarnConfiguration.NM_LOG_DIRS, - YarnConfiguration.DEFAULT_NM_LOG_DIRS); sched = createScheduledThreadPoolExecutor(conf); super.init(conf); } @@ -145,10 +145,11 @@ public LogDeleterRunnable(String user, ApplicationId applicationId) { @Override @SuppressWarnings("unchecked") public void run() { - Path[] localAppLogDirs = - new Path[NonAggregatingLogHandler.this.rootLogDirs.length]; + List<String> rootLogDirs = + NonAggregatingLogHandler.this.dirsHandler.getLogDirs(); + Path[] localAppLogDirs = new Path[rootLogDirs.size()]; int index = 0; - for (String rootLogDir : NonAggregatingLogHandler.this.rootLogDirs) { + for (String rootLogDir : rootLogDirs) { localAppLogDirs[index] = new Path(rootLogDir, applicationId.toString()); index++; } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java index faf0cbc47feae..b39bb33b1e6e4 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java @@ -34,15 +34,14 @@ import java.util.List; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.LocalDirAllocator; import org.apache.hadoop.fs.Path; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.nodemanager.Context; +import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState; @@ -87,17 +86,18 @@ protected Class<? extends SubView> content() { public static class ContainersLogsBlock extends HtmlBlock implements YarnWebParams { private final Configuration conf; - private final LocalDirAllocator logsSelector; private final Context nmContext; private final ApplicationACLsManager aclsManager; + private final LocalDirsHandlerService dirsHandler; @Inject public ContainersLogsBlock(Configuration conf, Context context, - ApplicationACLsManager aclsManager) { + ApplicationACLsManager aclsManager, + LocalDirsHandlerService dirsHandler) { this.conf = conf; - this.logsSelector = new LocalDirAllocator(YarnConfiguration.NM_LOG_DIRS); this.nmContext = context; this.aclsManager = aclsManager; + this.dirsHandler = dirsHandler; } @Override @@ -198,11 +198,10 @@ private void printLogs(Block html, ContainerId containerId, File logFile = null; try { logFile = - new File(this.logsSelector - .getLocalPathToRead( - ContainerLaunch.getRelativeContainerLogDir( - applicationId.toString(), containerId.toString()) - + Path.SEPARATOR + $(CONTAINER_LOG_TYPE), this.conf) + new File(this.dirsHandler.getLogPathToRead( + ContainerLaunch.getRelativeContainerLogDir( + applicationId.toString(), containerId.toString()) + + Path.SEPARATOR + $(CONTAINER_LOG_TYPE)) .toUri().getPath()); } catch (Exception e) { html.h1("Cannot find this log on the local disk."); @@ -272,8 +271,8 @@ private void printLogs(Block html, ContainerId containerId, } } else { // Just print out the log-types - List<File> containerLogsDirs = - getContainerLogDirs(this.conf, containerId); + List<File> containerLogsDirs = getContainerLogDirs(containerId, + dirsHandler); boolean foundLogFile = false; for (File containerLogsDir : containerLogsDirs) { for (File logFile : containerLogsDir.listFiles()) { @@ -293,11 +292,10 @@ private void printLogs(Block html, ContainerId containerId, return; } - static List<File> - getContainerLogDirs(Configuration conf, ContainerId containerId) { - String[] logDirs = conf.getStrings(YarnConfiguration.NM_LOG_DIRS, - YarnConfiguration.DEFAULT_NM_LOG_DIRS); - List<File> containerLogDirs = new ArrayList<File>(logDirs.length); + static List<File> getContainerLogDirs(ContainerId containerId, + LocalDirsHandlerService dirsHandler) { + List<String> logDirs = dirsHandler.getLogDirs(); + List<File> containerLogDirs = new ArrayList<File>(logDirs.size()); for (String logDir : logDirs) { String appIdStr = ConverterUtils.toString( diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java index 2573015877731..f0d87414fee90 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java @@ -26,6 +26,7 @@ import org.apache.hadoop.yarn.YarnException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.nodemanager.Context; +import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService; import org.apache.hadoop.yarn.server.nodemanager.ResourceView; import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; import org.apache.hadoop.yarn.service.AbstractService; @@ -42,10 +43,11 @@ public class WebServer extends AbstractService { private WebApp webApp; public WebServer(Context nmContext, ResourceView resourceView, - ApplicationACLsManager aclsManager) { + ApplicationACLsManager aclsManager, + LocalDirsHandlerService dirsHandler) { super(WebServer.class.getName()); this.nmContext = nmContext; - this.nmWebApp = new NMWebApp(resourceView, aclsManager); + this.nmWebApp = new NMWebApp(resourceView, aclsManager, dirsHandler); } @Override @@ -81,17 +83,21 @@ public static class NMWebApp extends WebApp implements YarnWebParams { private final ResourceView resourceView; private final ApplicationACLsManager aclsManager; + private final LocalDirsHandlerService dirsHandler; public NMWebApp(ResourceView resourceView, - ApplicationACLsManager aclsManager) { + ApplicationACLsManager aclsManager, + LocalDirsHandlerService dirsHandler) { this.resourceView = resourceView; this.aclsManager = aclsManager; + this.dirsHandler = dirsHandler; } @Override public void setup() { bind(ResourceView.class).toInstance(this.resourceView); bind(ApplicationACLsManager.class).toInstance(this.aclsManager); + bind(LocalDirsHandlerService.class).toInstance(dirsHandler); route("/", NMController.class, "info"); route("/node", NMController.class, "node"); route("/allApplications", NMController.class, "allApplications"); diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/configuration.c b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/configuration.c index d85715be7a0e2..aa72303351294 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/configuration.c +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/configuration.c @@ -261,8 +261,15 @@ char * get_value(const char* key) { * Value delimiter is assumed to be a comma. */ char ** get_values(const char * key) { - char ** toPass = NULL; char *value = get_value(key); + return extract_values(value); +} + +/** + * Extracts array of values from the comma separated list of values. + */ +char ** extract_values(char *value) { + char ** toPass = NULL; char *tempTok = NULL; char *tempstr = NULL; int size = 0; @@ -276,8 +283,7 @@ char ** get_values(const char * key) { toPass[size++] = tempTok; if(size == toPassSize) { toPassSize += MAX_SIZE; - toPass = (char **) realloc(toPass,(sizeof(char *) * - (MAX_SIZE * toPassSize))); + toPass = (char **) realloc(toPass,(sizeof(char *) * toPassSize)); } tempTok = strtok_r(NULL, ",", &tempstr); } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/configuration.h b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/configuration.h index 16ca23d6da835..b0d4814b310b6 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/configuration.h +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/configuration.h @@ -34,6 +34,9 @@ char *get_value(const char* key); //comma seperated strings. char ** get_values(const char* key); +// Extracts array of values from the comma separated list of values. +char ** extract_values(char *value); + // free the memory returned by get_values void free_values(char** values); diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.c b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.c index 73d160ae66b15..c4bde44a26589 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.c +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.c @@ -357,7 +357,7 @@ int mkdirs(const char* path, mode_t perm) { * It creates the container work and log directories. */ static int create_container_directories(const char* user, const char *app_id, - const char *container_id) { + const char *container_id, char* const* local_dir, char* const* log_dir) { // create dirs as 0750 const mode_t perms = S_IRWXU | S_IRGRP | S_IXGRP; if (app_id == NULL || container_id == NULL || user == NULL) { @@ -367,20 +367,11 @@ static int create_container_directories(const char* user, const char *app_id, } int result = -1; - - char **local_dir = get_values(NM_SYS_DIR_KEY); - - if (local_dir == NULL) { - fprintf(LOGFILE, "%s is not configured.\n", NM_SYS_DIR_KEY); - return -1; - } - - char **local_dir_ptr; + char* const* local_dir_ptr; for(local_dir_ptr = local_dir; *local_dir_ptr != NULL; ++local_dir_ptr) { char *container_dir = get_container_work_directory(*local_dir_ptr, user, app_id, container_id); if (container_dir == NULL) { - free_values(local_dir); return -1; } if (mkdirs(container_dir, perms) == 0) { @@ -390,7 +381,6 @@ static int create_container_directories(const char* user, const char *app_id, free(container_dir); } - free_values(local_dir); if (result != 0) { return result; } @@ -404,19 +394,11 @@ static int create_container_directories(const char* user, const char *app_id, } else { sprintf(combined_name, "%s/%s", app_id, container_id); - char **log_dir = get_values(NM_LOG_DIR_KEY); - if (log_dir == NULL) { - free(combined_name); - fprintf(LOGFILE, "%s is not configured.\n", NM_LOG_DIR_KEY); - return -1; - } - - char **log_dir_ptr; + char* const* log_dir_ptr; for(log_dir_ptr = log_dir; *log_dir_ptr != NULL; ++log_dir_ptr) { char *container_log_dir = get_app_log_directory(*log_dir_ptr, combined_name); if (container_log_dir == NULL) { free(combined_name); - free_values(log_dir); return -1; } else if (mkdirs(container_log_dir, perms) != 0) { free(container_log_dir); @@ -426,7 +408,6 @@ static int create_container_directories(const char* user, const char *app_id, } } free(combined_name); - free_values(log_dir); } return result; } @@ -660,17 +641,12 @@ static int copy_file(int input, const char* in_filename, /** * Function to initialize the user directories of a user. */ -int initialize_user(const char *user) { - char **local_dir = get_values(NM_SYS_DIR_KEY); - if (local_dir == NULL) { - fprintf(LOGFILE, "%s is not configured.\n", NM_SYS_DIR_KEY); - return INVALID_NM_ROOT_DIRS; - } +int initialize_user(const char *user, char* const* local_dirs) { char *user_dir; - char **local_dir_ptr = local_dir; + char* const* local_dir_ptr; int failed = 0; - for(local_dir_ptr = local_dir; *local_dir_ptr != 0; ++local_dir_ptr) { + for(local_dir_ptr = local_dirs; *local_dir_ptr != 0; ++local_dir_ptr) { user_dir = get_user_directory(*local_dir_ptr, user); if (user_dir == NULL) { fprintf(LOGFILE, "Couldn't get userdir directory for %s.\n", user); @@ -682,32 +658,29 @@ int initialize_user(const char *user) { } free(user_dir); } - free_values(local_dir); return failed ? INITIALIZE_USER_FAILED : 0; } /** * Function to prepare the application directories for the container. */ -int initialize_app(const char *user, const char *app_id, - const char* nmPrivate_credentials_file, char* const* args) { +int initialize_app(const char *user, const char *app_id, + const char* nmPrivate_credentials_file, + char* const* local_dirs, char* const* log_roots, + char* const* args) { if (app_id == NULL || user == NULL) { fprintf(LOGFILE, "Either app_id is null or the user passed is null.\n"); return INVALID_ARGUMENT_NUMBER; } // create the user directory on all disks - int result = initialize_user(user); + int result = initialize_user(user, local_dirs); if (result != 0) { return result; } ////////////// create the log directories for the app on all disks - char **log_roots = get_values(NM_LOG_DIR_KEY); - if (log_roots == NULL) { - return INVALID_CONFIG_FILE; - } - char **log_root; + char* const* log_root; char *any_one_app_log_dir = NULL; for(log_root=log_roots; *log_root != NULL; ++log_root) { char *app_log_dir = get_app_log_directory(*log_root, app_id); @@ -722,7 +695,7 @@ int initialize_app(const char *user, const char *app_id, free(app_log_dir); } } - free_values(log_roots); + if (any_one_app_log_dir == NULL) { fprintf(LOGFILE, "Did not create any app-log directories\n"); return -1; @@ -743,15 +716,9 @@ int initialize_app(const char *user, const char *app_id, // 750 mode_t permissions = S_IRWXU | S_IRGRP | S_IXGRP; - char **nm_roots = get_values(NM_SYS_DIR_KEY); - - if (nm_roots == NULL) { - return INVALID_CONFIG_FILE; - } - - char **nm_root; + char* const* nm_root; char *primary_app_dir = NULL; - for(nm_root=nm_roots; *nm_root != NULL; ++nm_root) { + for(nm_root=local_dirs; *nm_root != NULL; ++nm_root) { char *app_dir = get_app_directory(*nm_root, user, app_id); if (app_dir == NULL) { // try the next one @@ -763,7 +730,7 @@ int initialize_app(const char *user, const char *app_id, free(app_dir); } } - free_values(nm_roots); + if (primary_app_dir == NULL) { fprintf(LOGFILE, "Did not create any app directories\n"); return -1; @@ -805,9 +772,10 @@ int initialize_app(const char *user, const char *app_id, } int launch_container_as_user(const char *user, const char *app_id, - const char *container_id, const char *work_dir, - const char *script_name, const char *cred_file, - const char* pid_file) { + const char *container_id, const char *work_dir, + const char *script_name, const char *cred_file, + const char* pid_file, char* const* local_dirs, + char* const* log_dirs) { int exit_code = -1; char *script_file_dest = NULL; char *cred_file_dest = NULL; @@ -854,7 +822,8 @@ int launch_container_as_user(const char *user, const char *app_id, goto cleanup; } - if (create_container_directories(user, app_id, container_id) != 0) { + if (create_container_directories(user, app_id, container_id, local_dirs, + log_dirs) != 0) { fprintf(LOGFILE, "Could not create container dirs"); goto cleanup; } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.h b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.h index 3f0e8a5aa2c9d..baf677a319ff7 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.h +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.h @@ -61,8 +61,6 @@ enum errorcodes { #define NM_APP_DIR_PATTERN USER_DIR_PATTERN "/appcache/%s" #define CONTAINER_DIR_PATTERN NM_APP_DIR_PATTERN "/%s" #define CONTAINER_SCRIPT "launch_container.sh" -#define NM_SYS_DIR_KEY "yarn.nodemanager.local-dirs" -#define NM_LOG_DIR_KEY "yarn.nodemanager.log-dirs" #define CREDENTIALS_FILENAME "container_tokens" #define MIN_USERID_KEY "min.user.id" #define BANNED_USERS_KEY "banned.users" @@ -92,12 +90,13 @@ int check_executor_permissions(char *executable_file); // initialize the application directory int initialize_app(const char *user, const char *app_id, - const char *credentials, char* const* args); + const char *credentials, char* const* local_dirs, + char* const* log_dirs, char* const* args); /* * Function used to launch a container as the provided user. It does the following : * 1) Creates container work dir and log dir to be accessible by the child - * 2) Copies the script file from the TT to the work directory + * 2) Copies the script file from the NM to the work directory * 3) Sets up the environment * 4) Does an execlp on the same in order to replace the current image with * container image. @@ -109,12 +108,15 @@ int initialize_app(const char *user, const char *app_id, * @param cred_file the credentials file that needs to be compied to the * working directory. * @param pid_file file where pid of process should be written to + * @param local_dirs nodemanager-local-directories to be used + * @param log_dirs nodemanager-log-directories to be used * @return -1 or errorcode enum value on error (should never return on success). */ int launch_container_as_user(const char * user, const char *app_id, const char *container_id, const char *work_dir, const char *script_name, const char *cred_file, - const char *pid_file); + const char *pid_file, char* const* local_dirs, + char* const* log_dirs); /** * Function used to signal a container launched by the user. @@ -181,7 +183,7 @@ int mkdirs(const char* path, mode_t perm); /** * Function to initialize the user directories of a user. */ -int initialize_user(const char *user); +int initialize_user(const char *user, char* const* local_dirs); /** * Create a top level directory for the user. diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/main.c b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/main.c index 40fbad83653fc..d039f05ea438f 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/main.c +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/main.c @@ -43,10 +43,11 @@ void display_usage(FILE *stream) { fprintf(stream, "Usage: container-executor user command command-args\n"); fprintf(stream, "Commands:\n"); - fprintf(stream, " initialize container: %2d appid tokens cmd app...\n", - INITIALIZE_CONTAINER); + fprintf(stream, " initialize container: %2d appid tokens " \ + "nm-local-dirs nm-log-dirs cmd app...\n", INITIALIZE_CONTAINER); fprintf(stream, - " launch container: %2d appid containerid workdir container-script tokens pidfile\n", + " launch container: %2d appid containerid workdir "\ + "container-script tokens pidfile nm-local-dirs nm-log-dirs\n", LAUNCH_CONTAINER); fprintf(stream, " signal container: %2d container-pid signal\n", SIGNAL_CONTAINER); @@ -96,6 +97,7 @@ int main(int argc, char **argv) { char *orig_conf_file = STRINGIFY(HADOOP_CONF_DIR) "/" CONF_FILENAME; char *conf_file = realpath(orig_conf_file, NULL); + char *local_dirs, *log_dirs; if (conf_file == NULL) { fprintf(ERRORFILE, "Configuration file %s not found.\n", orig_conf_file); @@ -158,20 +160,23 @@ int main(int argc, char **argv) { switch (command) { case INITIALIZE_CONTAINER: - if (argc < 6) { - fprintf(ERRORFILE, "Too few arguments (%d vs 6) for initialize container\n", + if (argc < 8) { + fprintf(ERRORFILE, "Too few arguments (%d vs 8) for initialize container\n", argc); fflush(ERRORFILE); return INVALID_ARGUMENT_NUMBER; } app_id = argv[optind++]; cred_file = argv[optind++]; + local_dirs = argv[optind++];// good local dirs as a comma separated list + log_dirs = argv[optind++];// good log dirs as a comma separated list exit_code = initialize_app(user_detail->pw_name, app_id, cred_file, - argv + optind); + extract_values(local_dirs), + extract_values(log_dirs), argv + optind); break; case LAUNCH_CONTAINER: - if (argc < 9) { - fprintf(ERRORFILE, "Too few arguments (%d vs 9) for launch container\n", + if (argc != 11) { + fprintf(ERRORFILE, "Too few arguments (%d vs 11) for launch container\n", argc); fflush(ERRORFILE); return INVALID_ARGUMENT_NUMBER; @@ -182,13 +187,17 @@ int main(int argc, char **argv) { script_file = argv[optind++]; cred_file = argv[optind++]; pid_file = argv[optind++]; - exit_code = launch_container_as_user(user_detail->pw_name, app_id, container_id, - current_dir, script_file, cred_file, pid_file); + local_dirs = argv[optind++];// good local dirs as a comma separated list + log_dirs = argv[optind++];// good log dirs as a comma separated list + exit_code = launch_container_as_user(user_detail->pw_name, app_id, + container_id, current_dir, script_file, cred_file, + pid_file, extract_values(local_dirs), + extract_values(log_dirs)); break; case SIGNAL_CONTAINER: - if (argc < 5) { - fprintf(ERRORFILE, "Too few arguments (%d vs 5) for signal container\n", - argc); + if (argc != 5) { + fprintf(ERRORFILE, "Wrong number of arguments (%d vs 5) for " \ + "signal container\n", argc); fflush(ERRORFILE); return INVALID_ARGUMENT_NUMBER; } else { diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/test/test-container-executor.c b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/test/test-container-executor.c index 7c62f1ba183a3..b7796586a4d34 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/test/test-container-executor.c +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/test/test-container-executor.c @@ -28,10 +28,17 @@ #include <sys/stat.h> #include <sys/wait.h> -#define TEST_ROOT "/tmp/test-container-controller" +#define TEST_ROOT "/tmp/test-container-executor" #define DONT_TOUCH_FILE "dont-touch-me" +#define NM_LOCAL_DIRS TEST_ROOT "/local-1," TEST_ROOT "/local-2," \ + TEST_ROOT "/local-3," TEST_ROOT "/local-4," TEST_ROOT "/local-5" +#define NM_LOG_DIRS TEST_ROOT "/logdir_1," TEST_ROOT "/logdir_2," \ + TEST_ROOT "/logdir_3," TEST_ROOT "/logdir_4" +#define ARRAY_SIZE 1000 static char* username = NULL; +static char* local_dirs = NULL; +static char* log_dirs = NULL; /** * Run the command using the effective user id. @@ -84,40 +91,33 @@ void run(const char *cmd) { int write_config_file(char *file_name) { FILE *file; - int i = 0; file = fopen(file_name, "w"); if (file == NULL) { printf("Failed to open %s.\n", file_name); return EXIT_FAILURE; } - fprintf(file, "yarn.nodemanager.local-dirs=" TEST_ROOT "/local-1"); - for(i=2; i < 5; ++i) { - fprintf(file, "," TEST_ROOT "/local-%d", i); - } - fprintf(file, "\n"); - fprintf(file, "yarn.nodemanager.log-dirs=" TEST_ROOT "/logs\n"); + fprintf(file, "banned.users=bannedUser\n"); + fprintf(file, "min.user.id=1000\n"); fclose(file); return 0; } -void create_nm_roots() { - char** nm_roots = get_values(NM_SYS_DIR_KEY); +void create_nm_roots(char ** nm_roots) { char** nm_root; for(nm_root=nm_roots; *nm_root != NULL; ++nm_root) { if (mkdir(*nm_root, 0755) != 0) { printf("FAIL: Can't create directory %s - %s\n", *nm_root, - strerror(errno)); + strerror(errno)); exit(1); } char buffer[100000]; sprintf(buffer, "%s/usercache", *nm_root); if (mkdir(buffer, 0755) != 0) { printf("FAIL: Can't create directory %s - %s\n", buffer, - strerror(errno)); + strerror(errno)); exit(1); } } - free_values(nm_roots); } void test_get_user_directory() { @@ -209,7 +209,7 @@ void test_check_configuration_permissions() { } void test_delete_container() { - if (initialize_user(username)) { + if (initialize_user(username, extract_values(local_dirs))) { printf("FAIL: failed to initialize user %s\n", username); exit(1); } @@ -504,7 +504,8 @@ void test_init_app() { exit(1); } else if (child == 0) { char *final_pgm[] = {"touch", "my-touch-file", 0}; - if (initialize_app(username, "app_4", TEST_ROOT "/creds.txt", final_pgm) != 0) { + if (initialize_app(username, "app_4", TEST_ROOT "/creds.txt", final_pgm, + extract_values(local_dirs), extract_values(log_dirs)) != 0) { printf("FAIL: failed in child\n"); exit(42); } @@ -598,7 +599,8 @@ void test_run_container() { exit(1); } else if (child == 0) { if (launch_container_as_user(username, "app_4", "container_1", - container_dir, script_name, TEST_ROOT "/creds.txt", pid_file) != 0) { + container_dir, script_name, TEST_ROOT "/creds.txt", pid_file, + extract_values(local_dirs), extract_values(log_dirs)) != 0) { printf("FAIL: failed in child\n"); exit(42); } @@ -677,7 +679,12 @@ int main(int argc, char **argv) { } read_config(TEST_ROOT "/test.cfg"); - create_nm_roots(); + local_dirs = (char *) malloc (sizeof(char) * ARRAY_SIZE); + strcpy(local_dirs, NM_LOCAL_DIRS); + log_dirs = (char *) malloc (sizeof(char) * ARRAY_SIZE); + strcpy(log_dirs, NM_LOG_DIRS); + + create_nm_roots(extract_values(local_dirs)); if (getuid() == 0 && argc == 2) { username = argv[1]; diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/DummyContainerManager.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/DummyContainerManager.java index 74d99796914a0..bf429da73471f 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/DummyContainerManager.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/DummyContainerManager.java @@ -60,16 +60,18 @@ public DummyContainerManager(Context context, ContainerExecutor exec, DeletionService deletionContext, NodeStatusUpdater nodeStatusUpdater, NodeManagerMetrics metrics, ContainerTokenSecretManager containerTokenSecretManager, - ApplicationACLsManager applicationACLsManager) { + ApplicationACLsManager applicationACLsManager, + LocalDirsHandlerService dirsHandler) { super(context, exec, deletionContext, nodeStatusUpdater, metrics, - containerTokenSecretManager, applicationACLsManager); + containerTokenSecretManager, applicationACLsManager, dirsHandler); } @Override @SuppressWarnings("unchecked") - protected ResourceLocalizationService createResourceLocalizationService(ContainerExecutor exec, - DeletionService deletionContext) { - return new ResourceLocalizationService(super.dispatcher, exec, deletionContext) { + protected ResourceLocalizationService createResourceLocalizationService( + ContainerExecutor exec, DeletionService deletionContext) { + return new ResourceLocalizationService(super.dispatcher, exec, + deletionContext, super.dirsHandler) { @Override public void handle(LocalizationEvent event) { switch (event.getType()) { @@ -125,7 +127,8 @@ public void handle(LocalizationEvent event) { @SuppressWarnings("unchecked") protected ContainersLauncher createContainersLauncher(Context context, ContainerExecutor exec) { - return new ContainersLauncher(context, super.dispatcher, exec) { + return new ContainersLauncher(context, super.dispatcher, exec, + super.dirsHandler) { @Override public void handle(ContainersLauncherEvent event) { Container container = event.getContainer(); @@ -139,7 +142,8 @@ public void handle(ContainersLauncherEvent event) { case CLEANUP_CONTAINER: dispatcher.getEventHandler().handle( new ContainerExitEvent(containerId, - ContainerEventType.CONTAINER_KILLED_ON_REQUEST, 0)); + ContainerEventType.CONTAINER_KILLED_ON_REQUEST, 0, + "Container exited with exit code 0.")); break; } } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java index 8b4b01a5da24c..9a358f6b84d35 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java @@ -21,7 +21,6 @@ import java.io.File; import java.io.IOException; -import org.apache.hadoop.NodeHealthCheckerService; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.Path; import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest; @@ -80,9 +79,12 @@ public void testSuccessfulContainerLaunch() throws InterruptedException, ContainerExecutor exec = new DefaultContainerExecutor(); exec.setConf(conf); + DeletionService del = new DeletionService(exec); Dispatcher dispatcher = new AsyncDispatcher(); - NodeHealthCheckerService healthChecker = null; + NodeHealthCheckerService healthChecker = new NodeHealthCheckerService(); + healthChecker.init(conf); + LocalDirsHandlerService dirsHandler = healthChecker.getDiskHandler(); NodeManagerMetrics metrics = NodeManagerMetrics.create(); ContainerTokenSecretManager containerTokenSecretManager = new ContainerTokenSecretManager(); NodeStatusUpdater nodeStatusUpdater = @@ -100,7 +102,8 @@ protected void startStatusUpdater() { DummyContainerManager containerManager = new DummyContainerManager( context, exec, del, nodeStatusUpdater, metrics, - containerTokenSecretManager, new ApplicationACLsManager(conf)); + containerTokenSecretManager, new ApplicationACLsManager(conf), + dirsHandler); containerManager.init(conf); containerManager.start(); diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutor.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutor.java index 5eb146db2c04e..ba18a3d2f4053 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutor.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutor.java @@ -37,6 +37,7 @@ import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.conf.YarnConfiguration; @@ -63,8 +64,6 @@ * config values. * <br><pre><code> * > cat /etc/hadoop/container-executor.cfg - * yarn.nodemanager.local-dirs=/tmp/hadoop/nm-local/ - * yarn.nodemanager.log-dirs=/tmp/hadoop/nm-log * yarn.nodemanager.linux-container-executor.group=mapred * #depending on the user id of the application.submitter option * min.user.id=1 @@ -72,7 +71,7 @@ * > sudo chmod 444 /etc/hadoop/container-executor.cfg * </code></pre> * - * <li>iMove the binary and set proper permissions on it. It needs to be owned + * <li>Move the binary and set proper permissions on it. It needs to be owned * by root, the group needs to be the group configured in container-executor.cfg, * and it needs the setuid bit set. (The build will also overwrite it so you * need to move it to a place that you can support it. @@ -98,14 +97,22 @@ public class TestLinuxContainerExecutor { private LinuxContainerExecutor exec = null; private String appSubmitter = null; + private LocalDirsHandlerService dirsHandler; @Before public void setup() throws Exception { - FileContext.getLocalFSFileContext().mkdir( - new Path(workSpace.getAbsolutePath()), null, true); + FileContext files = FileContext.getLocalFSFileContext(); + Path workSpacePath = new Path(workSpace.getAbsolutePath()); + files.mkdir(workSpacePath, null, true); workSpace.setReadable(true, false); workSpace.setExecutable(true, false); workSpace.setWritable(true, false); + File localDir = new File(workSpace.getAbsoluteFile(), "localDir"); + files.mkdir(new Path(localDir.getAbsolutePath()), + new FsPermission("777"), false); + File logDir = new File(workSpace.getAbsoluteFile(), "logDir"); + files.mkdir(new Path(logDir.getAbsolutePath()), + new FsPermission("777"), false); String exec_path = System.getProperty("container-executor.path"); if(exec_path != null && !exec_path.isEmpty()) { Configuration conf = new Configuration(false); @@ -114,6 +121,10 @@ public void setup() throws Exception { conf.set(YarnConfiguration.NM_LINUX_CONTAINER_EXECUTOR_PATH, exec_path); exec = new LinuxContainerExecutor(); exec.setConf(conf); + conf.set(YarnConfiguration.NM_LOCAL_DIRS, localDir.getAbsolutePath()); + conf.set(YarnConfiguration.NM_LOG_DIRS, logDir.getAbsolutePath()); + dirsHandler = new LocalDirsHandlerService(); + dirsHandler.init(conf); } appSubmitter = System.getProperty("application.submitter"); if(appSubmitter == null || appSubmitter.isEmpty()) { @@ -189,7 +200,8 @@ private int runAndBlock(ContainerId cId, String ... cmd) throws IOException { exec.activateContainer(cId, pidFile); return exec.launchContainer(container, scriptPath, tokensPath, - appSubmitter, appId, workDir); + appSubmitter, appId, workDir, dirsHandler.getLocalDirs(), + dirsHandler.getLogDirs()); } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutorWithMocks.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutorWithMocks.java index 4827d83192577..9b98290d9092c 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutorWithMocks.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutorWithMocks.java @@ -35,6 +35,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.conf.YarnConfiguration; @@ -51,6 +52,7 @@ public class TestLinuxContainerExecutorWithMocks { private LinuxContainerExecutor mockExec = null; private final File mockParamFile = new File("./params.txt"); + private LocalDirsHandlerService dirsHandler; private void deleteMockParamFile() { if(mockParamFile.exists()) { @@ -80,6 +82,8 @@ public void setup() { Configuration conf = new Configuration(); conf.set(YarnConfiguration.NM_LINUX_CONTAINER_EXECUTOR_PATH, executorPath); mockExec = new LinuxContainerExecutor(); + dirsHandler = new LocalDirsHandlerService(); + dirsHandler.init(conf); mockExec.setConf(conf); } @@ -114,10 +118,13 @@ public void testContainerLaunch() throws IOException { mockExec.activateContainer(cId, pidFile); int ret = mockExec.launchContainer(container, scriptPath, tokensPath, - appSubmitter, appId, workDir); + appSubmitter, appId, workDir, dirsHandler.getLocalDirs(), + dirsHandler.getLogDirs()); assertEquals(0, ret); assertEquals(Arrays.asList(appSubmitter, cmd, appId, containerId, - workDir.toString(), "/bin/echo", "/dev/null", pidFile.toString()), + workDir.toString(), "/bin/echo", "/dev/null", pidFile.toString(), + StringUtils.join(",", dirsHandler.getLocalDirs()), + StringUtils.join(",", dirsHandler.getLogDirs())), readMockParams()); } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/TestNodeHealthService.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeHealthService.java similarity index 69% rename from hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/TestNodeHealthService.java rename to hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeHealthService.java index 54c3033ba2673..6b64f80e31f97 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/TestNodeHealthService.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeHealthService.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop; +package org.apache.hadoop.yarn.server.nodemanager; import java.io.File; import java.io.FileOutputStream; @@ -88,24 +88,31 @@ private void writeNodeHealthScriptFile(String scriptStr, boolean setExecutable) public void testNodeHealthScriptShouldRun() throws IOException { // Node health script should not start if there is no property called // node health script path. - Assert.assertFalse("By default Health checker should not have started", - NodeHealthCheckerService.shouldRun(new Configuration())); + Assert.assertFalse("By default Health script should not have started", + NodeHealthScriptRunner.shouldRun(new Configuration())); Configuration conf = getConfForNodeHealthScript(); // Node health script should not start if the node health script does not // exists - Assert.assertFalse("Node health script should start", NodeHealthCheckerService - .shouldRun(conf)); + Assert.assertFalse("Node health script should start", + NodeHealthScriptRunner.shouldRun(conf)); // Create script path. conf.writeXml(new FileOutputStream(nodeHealthConfigFile)); conf.addResource(nodeHealthConfigFile.getName()); writeNodeHealthScriptFile("", false); // Node health script should not start if the node health script is not // executable. - Assert.assertFalse("Node health script should start", NodeHealthCheckerService - .shouldRun(conf)); + Assert.assertFalse("Node health script should start", + NodeHealthScriptRunner.shouldRun(conf)); writeNodeHealthScriptFile("", true); - Assert.assertTrue("Node health script should start", NodeHealthCheckerService - .shouldRun(conf)); + Assert.assertTrue("Node health script should start", + NodeHealthScriptRunner.shouldRun(conf)); + } + + private void setHealthStatus(NodeHealthStatus healthStatus, boolean isHealthy, + String healthReport, long lastHealthReportTime) { + healthStatus.setHealthReport(healthReport); + healthStatus.setIsNodeHealthy(isHealthy); + healthStatus.setLastHealthReportTime(lastHealthReportTime); } @Test @@ -120,54 +127,67 @@ public void testNodeHealthScript() throws Exception { conf.writeXml(new FileOutputStream(nodeHealthConfigFile)); conf.addResource(nodeHealthConfigFile.getName()); - NodeHealthCheckerService nodeHealthChecker = new NodeHealthCheckerService( - conf); - TimerTask timer = nodeHealthChecker.getTimer(); writeNodeHealthScriptFile(normalScript, true); - timer.run(); + NodeHealthCheckerService nodeHealthChecker = new NodeHealthCheckerService(); + nodeHealthChecker.init(conf); + NodeHealthScriptRunner nodeHealthScriptRunner = + nodeHealthChecker.getNodeHealthScriptRunner(); + TimerTask timerTask = nodeHealthScriptRunner.getTimerTask(); - nodeHealthChecker.setHealthStatus(healthStatus); + timerTask.run(); + + setHealthStatus(healthStatus, nodeHealthChecker.isHealthy(), + nodeHealthChecker.getHealthReport(), + nodeHealthChecker.getLastHealthReportTime()); LOG.info("Checking initial healthy condition"); // Check proper report conditions. Assert.assertTrue("Node health status reported unhealthy", healthStatus .getIsNodeHealthy()); Assert.assertTrue("Node health status reported unhealthy", healthStatus - .getHealthReport().isEmpty()); + .getHealthReport().equals(nodeHealthChecker.getHealthReport())); // write out error file. // Healthy to unhealthy transition writeNodeHealthScriptFile(errorScript, true); // Run timer - timer.run(); + timerTask.run(); // update health status - nodeHealthChecker.setHealthStatus(healthStatus); + setHealthStatus(healthStatus, nodeHealthChecker.isHealthy(), + nodeHealthChecker.getHealthReport(), + nodeHealthChecker.getLastHealthReportTime()); LOG.info("Checking Healthy--->Unhealthy"); Assert.assertFalse("Node health status reported healthy", healthStatus .getIsNodeHealthy()); - Assert.assertFalse("Node health status reported healthy", healthStatus - .getHealthReport().isEmpty()); + Assert.assertTrue("Node health status reported healthy", healthStatus + .getHealthReport().equals(nodeHealthChecker.getHealthReport())); // Check unhealthy to healthy transitions. writeNodeHealthScriptFile(normalScript, true); - timer.run(); - nodeHealthChecker.setHealthStatus(healthStatus); + timerTask.run(); + setHealthStatus(healthStatus, nodeHealthChecker.isHealthy(), + nodeHealthChecker.getHealthReport(), + nodeHealthChecker.getLastHealthReportTime()); LOG.info("Checking UnHealthy--->healthy"); // Check proper report conditions. Assert.assertTrue("Node health status reported unhealthy", healthStatus .getIsNodeHealthy()); Assert.assertTrue("Node health status reported unhealthy", healthStatus - .getHealthReport().isEmpty()); + .getHealthReport().equals(nodeHealthChecker.getHealthReport())); // Healthy to timeout transition. writeNodeHealthScriptFile(timeOutScript, true); - timer.run(); - nodeHealthChecker.setHealthStatus(healthStatus); + timerTask.run(); + setHealthStatus(healthStatus, nodeHealthChecker.isHealthy(), + nodeHealthChecker.getHealthReport(), + nodeHealthChecker.getLastHealthReportTime()); LOG.info("Checking Healthy--->timeout"); Assert.assertFalse("Node health status reported healthy even after timeout", healthStatus.getIsNodeHealthy()); - Assert.assertEquals("Node time out message not propogated", healthStatus - .getHealthReport(), - NodeHealthCheckerService.NODE_HEALTH_SCRIPT_TIMED_OUT_MSG); + Assert.assertTrue("Node script time out message not propogated", + healthStatus.getHealthReport().equals( + NodeHealthScriptRunner.NODE_HEALTH_SCRIPT_TIMED_OUT_MSG + + NodeHealthCheckerService.SEPARATOR + + nodeHealthChecker.getDiskHandler().getDisksHealthReport())); } } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java index a0a5c557954f5..c1462746ff1c4 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java @@ -29,7 +29,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.NodeHealthCheckerService; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.Path; @@ -440,10 +439,11 @@ protected ContainerManagerImpl createContainerManager(Context context, ContainerExecutor exec, DeletionService del, NodeStatusUpdater nodeStatusUpdater, ContainerTokenSecretManager containerTokenSecretManager, - ApplicationACLsManager aclsManager) { + ApplicationACLsManager aclsManager, + LocalDirsHandlerService diskhandler) { return new ContainerManagerImpl(context, exec, del, nodeStatusUpdater, metrics, containerTokenSecretManager, - aclsManager) { + aclsManager, diskhandler) { @Override public void start() { // Simulating failure of starting RPC server diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java index 6cd6f8c691ebe..6d1ad8ed57b56 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java @@ -45,7 +45,9 @@ import org.apache.hadoop.yarn.server.nodemanager.Context; import org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor; import org.apache.hadoop.yarn.server.nodemanager.DeletionService; +import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService; import org.apache.hadoop.yarn.server.nodemanager.LocalRMInterface; +import org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService; import org.apache.hadoop.yarn.server.nodemanager.NodeManager.NMContext; import org.apache.hadoop.yarn.server.nodemanager.NodeStatusUpdater; import org.apache.hadoop.yarn.server.nodemanager.NodeStatusUpdaterImpl; @@ -94,6 +96,8 @@ public BaseContainerManagerTest() throws UnsupportedFileSystemException { protected ContainerExecutor exec; protected DeletionService delSrvc; protected String user = "nobody"; + protected NodeHealthCheckerService nodeHealthChecker; + protected LocalDirsHandlerService dirsHandler; protected NodeStatusUpdater nodeStatusUpdater = new NodeStatusUpdaterImpl( context, new AsyncDispatcher(), null, metrics, this.containerTokenSecretManager) { @@ -147,9 +151,12 @@ public void delete(String user, Path subDir, Path[] baseDirs) { delSrvc.init(conf); exec = createContainerExecutor(); + nodeHealthChecker = new NodeHealthCheckerService(); + nodeHealthChecker.init(conf); + dirsHandler = nodeHealthChecker.getDiskHandler(); containerManager = new ContainerManagerImpl(context, exec, delSrvc, nodeStatusUpdater, metrics, this.containerTokenSecretManager, - new ApplicationACLsManager(conf)); + new ApplicationACLsManager(conf), dirsHandler); containerManager.init(conf); } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java index c096598cc9467..c341548b1dd39 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java @@ -383,11 +383,12 @@ public void testLocalFilesCleanup() throws InterruptedException, // Real del service delSrvc = new DeletionService(exec); delSrvc.init(conf); + ContainerTokenSecretManager containerTokenSecretManager = new ContainerTokenSecretManager(); containerManager = new ContainerManagerImpl(context, exec, delSrvc, nodeStatusUpdater, metrics, containerTokenSecretManager, - new ApplicationACLsManager(conf)); + new ApplicationACLsManager(conf), dirsHandler); containerManager.init(conf); containerManager.start(); diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/TestContainer.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/TestContainer.java index c3b42166285f9..e4b7aa47a7af7 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/TestContainer.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/TestContainer.java @@ -25,6 +25,7 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import java.io.IOException; import java.net.URISyntaxException; import java.nio.ByteBuffer; import java.util.AbstractMap.SimpleEntry; @@ -649,7 +650,8 @@ public void containerSuccessful() { public void containerFailed(int exitCode) { c.handle(new ContainerExitEvent(cId, - ContainerEventType.CONTAINER_EXITED_WITH_FAILURE, exitCode)); + ContainerEventType.CONTAINER_EXITED_WITH_FAILURE, exitCode, + "Container completed with exit code " + exitCode)); drainDispatcherEvents(); } @@ -659,9 +661,10 @@ public void killContainer() { } public void containerKilledOnRequest() { + int exitCode = ExitCode.FORCE_KILLED.getExitCode(); c.handle(new ContainerExitEvent(cId, - ContainerEventType.CONTAINER_KILLED_ON_REQUEST, ExitCode.FORCE_KILLED - .getExitCode())); + ContainerEventType.CONTAINER_KILLED_ON_REQUEST, exitCode, + "Container completed with exit code " + exitCode)); drainDispatcherEvents(); } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java index fe7710bacbb92..9886d37c73b43 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java @@ -59,6 +59,8 @@ import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor; import org.apache.hadoop.yarn.server.nodemanager.DeletionService; +import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService; +import org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService; import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalResourceStatus; import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerAction; import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerHeartbeatResponse; @@ -109,19 +111,23 @@ public void testLocalizationInit() throws Exception { doNothing().when(spylfs).mkdir( isA(Path.class), isA(FsPermission.class), anyBoolean()); + List<Path> localDirs = new ArrayList<Path>(); + String[] sDirs = new String[4]; + for (int i = 0; i < 4; ++i) { + localDirs.add(lfs.makeQualified(new Path(basedir, i + ""))); + sDirs[i] = localDirs.get(i).toString(); + } + conf.setStrings(YarnConfiguration.NM_LOCAL_DIRS, sDirs); + LocalDirsHandlerService diskhandler = new LocalDirsHandlerService(); + diskhandler.init(conf); + ResourceLocalizationService locService = - spy(new ResourceLocalizationService(dispatcher, exec, delService)); + spy(new ResourceLocalizationService(dispatcher, exec, delService, + diskhandler)); doReturn(lfs) .when(locService).getLocalFileContext(isA(Configuration.class)); try { dispatcher.start(); - List<Path> localDirs = new ArrayList<Path>(); - String[] sDirs = new String[4]; - for (int i = 0; i < 4; ++i) { - localDirs.add(lfs.makeQualified(new Path(basedir, i + ""))); - sDirs[i] = localDirs.get(i).toString(); - } - conf.setStrings(YarnConfiguration.NM_LOCAL_DIRS, sDirs); // initialize ResourceLocalizationService locService.init(conf); @@ -176,12 +182,16 @@ public void testResourceRelease() throws Exception { dispatcher.register(LocalizerEventType.class, localizerBus); ContainerExecutor exec = mock(ContainerExecutor.class); + LocalDirsHandlerService dirsHandler = new LocalDirsHandlerService(); + dirsHandler.init(conf); + DeletionService delService = new DeletionService(exec); delService.init(null); delService.start(); ResourceLocalizationService rawService = - new ResourceLocalizationService(dispatcher, exec, delService); + new ResourceLocalizationService(dispatcher, exec, delService, + dirsHandler); ResourceLocalizationService spyService = spy(rawService); doReturn(ignore).when(spyService).createServer(); doReturn(mockLocallilzerTracker).when(spyService).createLocalizerTracker( @@ -356,13 +366,17 @@ public void testLocalizationHeartbeat() throws Exception { dispatcher.register(ContainerEventType.class, containerBus); ContainerExecutor exec = mock(ContainerExecutor.class); + LocalDirsHandlerService dirsHandler = new LocalDirsHandlerService(); + dirsHandler.init(conf); + DeletionService delServiceReal = new DeletionService(exec); DeletionService delService = spy(delServiceReal); delService.init(null); delService.start(); ResourceLocalizationService rawService = - new ResourceLocalizationService(dispatcher, exec, delService); + new ResourceLocalizationService(dispatcher, exec, delService, + dirsHandler); ResourceLocalizationService spyService = spy(rawService); doReturn(ignore).when(spyService).createServer(); doReturn(lfs).when(spyService).getLocalFileContext(isA(Configuration.class)); @@ -414,8 +428,9 @@ public boolean matches(Object o) { String appStr = ConverterUtils.toString(appId); String ctnrStr = c.getContainerID().toString(); ArgumentCaptor<Path> tokenPathCaptor = ArgumentCaptor.forClass(Path.class); - verify(exec).startLocalizer(tokenPathCaptor.capture(), isA(InetSocketAddress.class), - eq("user0"), eq(appStr), eq(ctnrStr), isA(List.class)); + verify(exec).startLocalizer(tokenPathCaptor.capture(), + isA(InetSocketAddress.class), eq("user0"), eq(appStr), eq(ctnrStr), + isA(List.class), isA(List.class)); Path localizationTokenPath = tokenPathCaptor.getValue(); // heartbeat from localizer diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java index a4202a9462dcd..a1853b307b0f8 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java @@ -122,7 +122,8 @@ public void testLocalFileDeletionAfterUpload() throws IOException { dispatcher.register(ApplicationEventType.class, appEventHandler); LogAggregationService logAggregationService = - new LogAggregationService(dispatcher, this.context, this.delSrvc); + new LogAggregationService(dispatcher, this.context, this.delSrvc, + super.dirsHandler); logAggregationService.init(this.conf); logAggregationService.start(); @@ -189,7 +190,8 @@ public void testNoContainerOnNode() { dispatcher.register(ApplicationEventType.class, appEventHandler); LogAggregationService logAggregationService = - new LogAggregationService(dispatcher, this.context, this.delSrvc); + new LogAggregationService(dispatcher, this.context, this.delSrvc, + super.dirsHandler); logAggregationService.init(this.conf); logAggregationService.start(); @@ -237,7 +239,8 @@ public void testMultipleAppsLogAggregation() throws IOException { dispatcher.register(ApplicationEventType.class, appEventHandler); LogAggregationService logAggregationService = - new LogAggregationService(dispatcher, this.context, this.delSrvc); + new LogAggregationService(dispatcher, this.context, this.delSrvc, + super.dirsHandler); logAggregationService.init(this.conf); logAggregationService.start(); diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/loghandler/TestNonAggregatingLogHandler.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/loghandler/TestNonAggregatingLogHandler.java index 5fa7bcb3b1c3f..a5e5eb06bc819 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/loghandler/TestNonAggregatingLogHandler.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/loghandler/TestNonAggregatingLogHandler.java @@ -37,6 +37,7 @@ import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.logaggregation.ContainerLogsRetentionPolicy; import org.apache.hadoop.yarn.server.nodemanager.DeletionService; +import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEventType; import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerAppFinishedEvent; @@ -74,13 +75,16 @@ public void testLogDeletion() { EventHandler<ApplicationEvent> appEventHandler = mock(EventHandler.class); dispatcher.register(ApplicationEventType.class, appEventHandler); + LocalDirsHandlerService dirsHandler = new LocalDirsHandlerService(); + dirsHandler.init(conf); + ApplicationId appId1 = BuilderUtils.newApplicationId(1234, 1); ApplicationAttemptId appAttemptId1 = BuilderUtils.newApplicationAttemptId(appId1, 1); ContainerId container11 = BuilderUtils.newContainerId(appAttemptId1, 1); NonAggregatingLogHandler logHandler = - new NonAggregatingLogHandler(dispatcher, delService); + new NonAggregatingLogHandler(dispatcher, delService, dirsHandler); logHandler.init(conf); logHandler.start(); @@ -146,13 +150,17 @@ public void testDelayedDelete() { EventHandler<ApplicationEvent> appEventHandler = mock(EventHandler.class); dispatcher.register(ApplicationEventType.class, appEventHandler); + LocalDirsHandlerService dirsHandler = new LocalDirsHandlerService(); + dirsHandler.init(conf); + ApplicationId appId1 = BuilderUtils.newApplicationId(1234, 1); ApplicationAttemptId appAttemptId1 = BuilderUtils.newApplicationAttemptId(appId1, 1); ContainerId container11 = BuilderUtils.newContainerId(appAttemptId1, 1); NonAggregatingLogHandler logHandler = - new NonAggregatingLogHandlerWithMockExecutor(dispatcher, delService); + new NonAggregatingLogHandlerWithMockExecutor(dispatcher, delService, + dirsHandler); logHandler.init(conf); logHandler.start(); @@ -182,8 +190,8 @@ private class NonAggregatingLogHandlerWithMockExecutor extends private ScheduledThreadPoolExecutor mockSched; public NonAggregatingLogHandlerWithMockExecutor(Dispatcher dispatcher, - DeletionService delService) { - super(dispatcher, delService); + DeletionService delService, LocalDirsHandlerService dirsHandler) { + super(dispatcher, delService, dirsHandler); } @Override diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServer.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServer.java index 5eea6d8380df3..ebba63fcc0de3 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServer.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServer.java @@ -27,6 +27,7 @@ import java.io.Writer; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; @@ -37,6 +38,8 @@ import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.nodemanager.Context; +import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService; +import org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService; import org.apache.hadoop.yarn.server.nodemanager.NodeManager; import org.apache.hadoop.yarn.server.nodemanager.ResourceView; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application; @@ -47,6 +50,7 @@ import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.util.ConverterUtils; +import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -54,10 +58,19 @@ public class TestNMWebServer { private static final File testRootDir = new File("target", TestNMWebServer.class.getSimpleName()); + private static File testLogDir = new File("target", + TestNMWebServer.class.getSimpleName() + "LogDir"); @Before public void setup() { testRootDir.mkdirs(); + testLogDir.mkdir(); + } + + @After + public void tearDown() { + FileUtil.fullyDelete(testRootDir); + FileUtil.fullyDelete(testLogDir); } @Test @@ -74,9 +87,14 @@ public long getPmemAllocatedForContainers() { } }; Configuration conf = new Configuration(); - WebServer server = new WebServer(nmContext, resourceView, - new ApplicationACLsManager(conf)); conf.set(YarnConfiguration.NM_LOCAL_DIRS, testRootDir.getAbsolutePath()); + conf.set(YarnConfiguration.NM_LOG_DIRS, testLogDir.getAbsolutePath()); + NodeHealthCheckerService healthChecker = new NodeHealthCheckerService(); + healthChecker.init(conf); + LocalDirsHandlerService dirsHandler = healthChecker.getDiskHandler(); + + WebServer server = new WebServer(nmContext, resourceView, + new ApplicationACLsManager(conf), dirsHandler); server.init(conf); server.start(); @@ -119,20 +137,20 @@ public ContainerState getContainerState() { containerId.getApplicationAttemptId().getApplicationId(); nmContext.getApplications().get(applicationId).getContainers() .put(containerId, container); - writeContainerLogs(conf, nmContext, containerId); + writeContainerLogs(nmContext, containerId, dirsHandler); } // TODO: Pull logs and test contents. // Thread.sleep(1000000); } - private void writeContainerLogs(Configuration conf, Context nmContext, - ContainerId containerId) + private void writeContainerLogs(Context nmContext, + ContainerId containerId, LocalDirsHandlerService dirsHandler) throws IOException { // ContainerLogDir should be created File containerLogDir = - ContainerLogsPage.ContainersLogsBlock.getContainerLogDirs(conf, - containerId).get(0); + ContainerLogsPage.ContainersLogsBlock.getContainerLogDirs(containerId, + dirsHandler).get(0); containerLogDir.mkdirs(); for (String fileType : new String[] { "stdout", "stderr", "syslog" }) { Writer writer = new FileWriter(new File(containerLogDir, fileType)); diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java index 53a891366fcda..ae35de0ac1331 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java @@ -23,7 +23,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.NodeHealthCheckerService; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.Path; @@ -41,6 +40,7 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerResponse; import org.apache.hadoop.yarn.server.nodemanager.Context; +import org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService; import org.apache.hadoop.yarn.server.nodemanager.NodeManager; import org.apache.hadoop.yarn.server.nodemanager.NodeStatusUpdater; import org.apache.hadoop.yarn.server.nodemanager.NodeStatusUpdaterImpl; @@ -51,7 +51,6 @@ import org.apache.hadoop.yarn.server.security.ContainerTokenSecretManager; import org.apache.hadoop.yarn.service.AbstractService; import org.apache.hadoop.yarn.service.CompositeService; -import org.apache.hadoop.yarn.service.Service.STATE; public class MiniYARNCluster extends CompositeService { @@ -69,13 +68,23 @@ public class MiniYARNCluster extends CompositeService { private File testWorkDir; - public MiniYARNCluster(String testName) { - //default number of nodeManagers = 1 - this(testName, 1); - } + // Number of nm-local-dirs per nodemanager + private int numLocalDirs; + // Number of nm-log-dirs per nodemanager + private int numLogDirs; + + /** + * @param testName name of the test + * @param noOfNodeManagers the number of node managers in the cluster + * @param numLocalDirs the number of nm-local-dirs per nodemanager + * @param numLogDirs the number of nm-log-dirs per nodemanager + */ + public MiniYARNCluster(String testName, int noOfNodeManagers, + int numLocalDirs, int numLogDirs) { - public MiniYARNCluster(String testName, int noOfNodeManagers) { super(testName); + this.numLocalDirs = numLocalDirs; + this.numLogDirs = numLogDirs; this.testWorkDir = new File("target", testName); try { FileContext.getLocalFSFileContext().delete( @@ -166,25 +175,39 @@ public synchronized void init(Configuration conf) { super.init(config); } + /** + * Create local/log directories + * @param dirType type of directories i.e. local dirs or log dirs + * @param numDirs number of directories + * @return the created directories as a comma delimited String + */ + private String prepareDirs(String dirType, int numDirs) { + File []dirs = new File[numDirs]; + String dirsString = ""; + for (int i = 0; i < numDirs; i++) { + dirs[i]= new File(testWorkDir, MiniYARNCluster.this.getName() + + "-" + dirType + "Dir-nm-" + index + "_" + i); + dirs[i].mkdir(); + LOG.info("Created " + dirType + "Dir in " + dirs[i].getAbsolutePath()); + String delimiter = (i > 0) ? "," : ""; + dirsString = dirsString.concat(delimiter + dirs[i].getAbsolutePath()); + } + return dirsString; + } + public synchronized void start() { try { - File localDir = new File(testWorkDir, MiniYARNCluster.this.getName() - + "-localDir-nm-" + index); - localDir.mkdir(); - LOG.info("Created localDir in " + localDir.getAbsolutePath()); - getConfig().set(YarnConfiguration.NM_LOCAL_DIRS, - localDir.getAbsolutePath()); - File logDir = - new File(testWorkDir, MiniYARNCluster.this.getName() - + "-logDir-nm-" + index); + // create nm-local-dirs and configure them for the nodemanager + String localDirsString = prepareDirs("local", numLocalDirs); + getConfig().set(YarnConfiguration.NM_LOCAL_DIRS, localDirsString); + // create nm-log-dirs and configure them for the nodemanager + String logDirsString = prepareDirs("log", numLogDirs); + getConfig().set(YarnConfiguration.NM_LOG_DIRS, logDirsString); + File remoteLogDir = new File(testWorkDir, MiniYARNCluster.this.getName() + "-remoteLogDir-nm-" + index); - logDir.mkdir(); remoteLogDir.mkdir(); - LOG.info("Created logDir in " + logDir.getAbsolutePath()); - getConfig().set(YarnConfiguration.NM_LOG_DIRS, - logDir.getAbsolutePath()); getConfig().set(YarnConfiguration.NM_REMOTE_APP_LOG_DIR, remoteLogDir.getAbsolutePath()); // By default AM + 2 containers diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java index 9fe914d87603b..765234665f9e1 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java @@ -117,7 +117,7 @@ public static void setup() throws AccessControlException, conf.setLong(YarnConfiguration.RM_AM_EXPIRY_INTERVAL_MS, 100000L); UserGroupInformation.setConfiguration(conf); yarnCluster = new MiniYARNCluster(TestContainerManagerSecurity.class - .getName()); + .getName(), 1, 1, 1); yarnCluster.init(conf); yarnCluster.start(); } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestDiskFailures.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestDiskFailures.java new file mode 100644 index 0000000000000..67755f189aeb2 --- /dev/null +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestDiskFailures.java @@ -0,0 +1,247 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileContext; +import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.UnsupportedFileSystemException; +import org.apache.hadoop.security.AccessControlException; +import org.apache.hadoop.util.StringUtils; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.server.MiniYARNCluster; +import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService; +import org.apache.hadoop.yarn.server.nodemanager.NodeManager; +import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.util.Iterator; +import java.util.List; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import junit.framework.Assert; + +/** + * Verify if NodeManager's in-memory good local dirs list and good log dirs list + * get updated properly when disks(nm-local-dirs and nm-log-dirs) fail. Also + * verify if the overall health status of the node gets updated properly when + * specified percentage of disks fail. + */ +public class TestDiskFailures { + + private static final Log LOG = LogFactory.getLog(TestDiskFailures.class); + + private static final long DISK_HEALTH_CHECK_INTERVAL = 1000;//1 sec + + private static FileContext localFS = null; + private static final File testDir = new File("target", + TestDiskFailures.class.getName()).getAbsoluteFile(); + private static final File localFSDirBase = new File(testDir, + TestDiskFailures.class.getName() + "-localDir"); + private static final int numLocalDirs = 4; + private static final int numLogDirs = 4; + + private static MiniYARNCluster yarnCluster; + LocalDirsHandlerService dirsHandler; + + @BeforeClass + public static void setup() throws AccessControlException, + FileNotFoundException, UnsupportedFileSystemException, IOException { + localFS = FileContext.getLocalFSFileContext(); + localFS.delete(new Path(localFSDirBase.getAbsolutePath()), true); + localFSDirBase.mkdirs(); + // Do not start cluster here + } + + @AfterClass + public static void teardown() { + if (yarnCluster != null) { + yarnCluster.stop(); + yarnCluster = null; + } + FileUtil.fullyDelete(localFSDirBase); + } + + /** + * Make local-dirs fail/inaccessible and verify if NodeManager can + * recognize the disk failures properly and can update the list of + * local-dirs accordingly with good disks. Also verify the overall + * health status of the node. + * @throws IOException + */ + @Test + public void testLocalDirsFailures() throws IOException { + testDirsFailures(true); + } + + /** + * Make log-dirs fail/inaccessible and verify if NodeManager can + * recognize the disk failures properly and can update the list of + * log-dirs accordingly with good disks. Also verify the overall health + * status of the node. + * @throws IOException + */ + @Test + public void testLogDirsFailures() throws IOException { + testDirsFailures(false); + } + + private void testDirsFailures(boolean localORLogDirs) throws IOException { + String dirType = localORLogDirs ? "local" : "log"; + String dirsProperty = localORLogDirs ? YarnConfiguration.NM_LOCAL_DIRS + : YarnConfiguration.NM_LOG_DIRS; + + Configuration conf = new Configuration(); + // set disk health check interval to a small value (say 1 sec). + conf.setLong(YarnConfiguration.NM_DISK_HEALTH_CHECK_INTERVAL_MS, + DISK_HEALTH_CHECK_INTERVAL); + + // If 2 out of the total 4 local-dirs fail OR if 2 Out of the total 4 + // log-dirs fail, then the node's health status should become unhealthy. + conf.setFloat(YarnConfiguration.NM_MIN_HEALTHY_DISKS_FRACTION, 0.60F); + + if (yarnCluster != null) { + yarnCluster.stop(); + FileUtil.fullyDelete(localFSDirBase); + localFSDirBase.mkdirs(); + } + LOG.info("Starting up YARN cluster"); + yarnCluster = new MiniYARNCluster(TestDiskFailures.class.getName(), + 1, numLocalDirs, numLogDirs); + yarnCluster.init(conf); + yarnCluster.start(); + + NodeManager nm = yarnCluster.getNodeManager(0); + LOG.info("Configured nm-" + dirType + "-dirs=" + + nm.getConfig().get(dirsProperty)); + dirsHandler = nm.getNodeHealthChecker().getDiskHandler(); + List<String> list = localORLogDirs ? dirsHandler.getLocalDirs() + : dirsHandler.getLogDirs(); + String[] dirs = list.toArray(new String[list.size()]); + Assert.assertEquals("Number of nm-" + dirType + "-dirs is wrong.", + numLocalDirs, dirs.length); + String expectedDirs = StringUtils.join(",", list); + // validate the health of disks initially + verifyDisksHealth(localORLogDirs, expectedDirs, true); + + // Make 1 nm-local-dir fail and verify if "the nodemanager can identify + // the disk failure(s) and can update the list of good nm-local-dirs. + prepareDirToFail(dirs[2]); + expectedDirs = dirs[0] + "," + dirs[1] + "," + + dirs[3]; + verifyDisksHealth(localORLogDirs, expectedDirs, true); + + // Now, make 1 more nm-local-dir/nm-log-dir fail and verify if "the + // nodemanager can identify the disk failures and can update the list of + // good nm-local-dirs/nm-log-dirs and can update the overall health status + // of the node to unhealthy". + prepareDirToFail(dirs[0]); + expectedDirs = dirs[1] + "," + dirs[3]; + verifyDisksHealth(localORLogDirs, expectedDirs, false); + + // Fail the remaining 2 local-dirs/log-dirs and verify if NM remains with + // empty list of local-dirs/log-dirs and the overall health status is + // unhealthy. + prepareDirToFail(dirs[1]); + prepareDirToFail(dirs[3]); + expectedDirs = ""; + verifyDisksHealth(localORLogDirs, expectedDirs, false); + } + + /** + * Wait for the NodeManger to go for the disk-health-check at least once. + */ + private void waitForDiskHealthCheck() { + long lastDisksCheckTime = dirsHandler.getLastDisksCheckTime(); + long time = lastDisksCheckTime; + for (int i = 0; i < 10 && (time <= lastDisksCheckTime); i++) { + try { + Thread.sleep(1000); + } catch(InterruptedException e) { + LOG.error( + "Interrupted while waiting for NodeManager's disk health check."); + } + time = dirsHandler.getLastDisksCheckTime(); + } + } + + /** + * Verify if the NodeManager could identify disk failures. + * @param localORLogDirs <em>true</em> represent nm-local-dirs and <em>false + * </em> means nm-log-dirs + * @param expectedDirs expected nm-local-dirs/nm-log-dirs as a string + * @param isHealthy <em>true</em> if the overall node should be healthy + */ + private void verifyDisksHealth(boolean localORLogDirs, String expectedDirs, + boolean isHealthy) { + // Wait for the NodeManager to identify disk failures. + waitForDiskHealthCheck(); + + List<String> list = localORLogDirs ? dirsHandler.getLocalDirs() + : dirsHandler.getLogDirs(); + String seenDirs = StringUtils.join(",", list); + LOG.info("ExpectedDirs=" + expectedDirs); + LOG.info("SeenDirs=" + seenDirs); + Assert.assertTrue("NodeManager could not identify disk failure.", + expectedDirs.equals(seenDirs)); + + Assert.assertEquals("Node's health in terms of disks is wrong", + isHealthy, dirsHandler.areDisksHealthy()); + for (int i = 0; i < 10; i++) { + Iterator<RMNode> iter = yarnCluster.getResourceManager().getRMContext() + .getRMNodes().values().iterator(); + if (iter.next().getNodeHealthStatus().getIsNodeHealthy() == isHealthy) { + break; + } + // wait for the node health info to go to RM + try { + Thread.sleep(1000); + } catch(InterruptedException e) { + LOG.error("Interrupted while waiting for NM->RM heartbeat."); + } + } + Iterator<RMNode> iter = yarnCluster.getResourceManager().getRMContext() + .getRMNodes().values().iterator(); + Assert.assertEquals("RM is not updated with the health status of a node", + isHealthy, iter.next().getNodeHealthStatus().getIsNodeHealthy()); + } + + /** + * Prepare directory for a failure: Replace the given directory on the + * local FileSystem with a regular file with the same name. + * This would cause failure of creation of directory in DiskChecker.checkDir() + * with the same name. + * @param dir the directory to be failed + * @throws IOException + */ + private void prepareDirToFail(String dir) throws IOException { + File file = new File(dir); + FileUtil.fullyDelete(file); + file.createNewFile(); + LOG.info("Prepared " + dir + " to fail."); + } +} diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/ClusterSetup.apt.vm b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/ClusterSetup.apt.vm index 4643faecbd98b..079c54b48b0e7 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/ClusterSetup.apt.vm +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/ClusterSetup.apt.vm @@ -398,6 +398,15 @@ Hadoop MapReduce Next Generation - Cluster Setup | | | Timeout for health script execution. | *-------------------------+-------------------------+------------------------+ + The health checker script is not supposed to give ERROR if only some of the + local disks become bad. NodeManager has the ability to periodically check + the health of the local disks (specifically checks nodemanager-local-dirs + and nodemanager-log-dirs) and after reaching the threshold of number of + bad directories based on the value set for the config property + yarn.nodemanager.disk-health-checker.min-healthy-disks. The boot disk is + either raided or a failure in the boot disk is identified by the health + checker script. + * {Slaves file} Typically you choose one machine in the cluster to act as the NameNode and @@ -874,13 +883,6 @@ KVNO Timestamp Principal *-------------------------+-------------------------+------------------------+ || Parameter || Value || Notes | *-------------------------+-------------------------+------------------------+ -| <<<yarn.nodemanager.local-dirs>>> | | -| | Comma-separated list of NodeManager local directories. | | -| | | Paths to NodeManager local directories. Should be same as the value | -| | | which was provided to key in <<<conf/yarn-site.xml>>>. This is | -| | | required to validate paths passed to the setuid executable in order | -| | to prevent arbitrary paths being passed to it. | -*-------------------------+-------------------------+------------------------+ | <<<yarn.nodemanager.linux-container-executor.group>>> | <hadoop> | | | | | Unix group of the NodeManager. The group owner of the | | | |<container-executor> binary should be this group. Should be same as the | @@ -888,14 +890,6 @@ KVNO Timestamp Principal | | | required for validating the secure access of the <container-executor> | | | | binary. | *-------------------------+-------------------------+------------------------+ -| <<<yarn.nodemanager.log-dirs>>> | | -| | Comma-separated list of NodeManager log directories. | | -| | | Paths to NodeManager log directories. Should be same as the value | -| | | which was provided to key in <<<conf/yarn-site.xml>>>. This is | -| | | required to set proper permissions on the log files so that they can | -| | | be written to by the user's containers and read by the NodeManager for | -| | | <log aggregation>. | -*-------------------------+-------------------------+------------------------+ | <<<banned.users>>> | hfds,yarn,mapred,bin | Banned users. | *-------------------------+-------------------------+------------------------+ | <<<min.user.id>>> | 1000 | Prevent other super-users. |
bee0854d90822fe0d879ae12aed4de362a74ceec
camel
Fixed tests using released version.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@991460 13f79535-47bb-0310-9956-ffa450edef68-
c
https://github.com/apache/camel
diff --git a/tests/camel-itest-karaf/src/test/java/org/apache/camel/itest/karaf/AbstractFeatureTest.java b/tests/camel-itest-karaf/src/test/java/org/apache/camel/itest/karaf/AbstractFeatureTest.java index a96da63b12f1d..9579d5b012529 100644 --- a/tests/camel-itest-karaf/src/test/java/org/apache/camel/itest/karaf/AbstractFeatureTest.java +++ b/tests/camel-itest-karaf/src/test/java/org/apache/camel/itest/karaf/AbstractFeatureTest.java @@ -159,7 +159,7 @@ public static Option[] configure(String feature, FrameworkOption framework) { //need to install some karaf features mavenBundle("org.apache.felix", "org.apache.felix.configadmin").versionAsInProject(), - mavenBundle("org.apache.servicemix.bundles", "org.apache.servicemix.bundles.jaxp-ri").version("1.4.2_4-SNAPSHOT"), + mavenBundle("org.apache.servicemix.bundles", "org.apache.servicemix.bundles.jaxp-ri").version("1.4.2_4"), scanFeatures(getCamelKarafFeatureUrl(), "camel-spring", "camel-" + feature),
6383157b2961749ab46b8d05653ffb5f757f0be8
ReactiveX-RxJava
Remove Unnecessary Subscription--- be explicit for error case in JoinObserver-
p
https://github.com/ReactiveX/RxJava
diff --git a/rxjava-core/src/main/java/rx/joins/JoinObserver1.java b/rxjava-core/src/main/java/rx/joins/JoinObserver1.java index 873d3d1a7f..ede55f0584 100644 --- a/rxjava-core/src/main/java/rx/joins/JoinObserver1.java +++ b/rxjava-core/src/main/java/rx/joins/JoinObserver1.java @@ -19,9 +19,11 @@ import java.util.LinkedList; import java.util.List; import java.util.Queue; +import java.util.concurrent.atomic.AtomicBoolean; + import rx.Notification; import rx.Observable; -import rx.subscriptions.SingleAssignmentSubscription; +import rx.operators.SafeObservableSubscription; import rx.util.functions.Action1; /** @@ -33,14 +35,15 @@ public final class JoinObserver1<T> extends ObserverBase<Notification<T>> implem private final Action1<Throwable> onError; private final List<ActivePlan0> activePlans; private final Queue<Notification<T>> queue; - private final SingleAssignmentSubscription subscription; + private final SafeObservableSubscription subscription; private volatile boolean done; + private final AtomicBoolean subscribed = new AtomicBoolean(false); public JoinObserver1(Observable<T> source, Action1<Throwable> onError) { this.source = source; this.onError = onError; queue = new LinkedList<Notification<T>>(); - subscription = new SingleAssignmentSubscription(); + subscription = new SafeObservableSubscription(); activePlans = new ArrayList<ActivePlan0>(); } public Queue<Notification<T>> queue() { @@ -51,8 +54,12 @@ public void addActivePlan(ActivePlan0 activePlan) { } @Override public void subscribe(Object gate) { - this.gate = gate; - subscription.set(source.materialize().subscribe(this)); + if (subscribed.compareAndSet(false, true)) { + this.gate = gate; + subscription.wrap(source.materialize().subscribe(this)); + } else { + throw new IllegalStateException("Can only be subscribed to once."); + } } @Override diff --git a/rxjava-core/src/main/java/rx/subscriptions/SingleAssignmentSubscription.java b/rxjava-core/src/main/java/rx/subscriptions/SingleAssignmentSubscription.java deleted file mode 100644 index c960db2ea4..0000000000 --- a/rxjava-core/src/main/java/rx/subscriptions/SingleAssignmentSubscription.java +++ /dev/null @@ -1,81 +0,0 @@ -/** - * Copyright 2013 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package rx.subscriptions; - -import java.util.concurrent.atomic.AtomicReference; -import rx.Subscription; - -/** - * A subscription that allows only a single resource to be assigned. - * <p> - * If this subscription is live, no other subscription may be set() and - * yields an {@link IllegalStateException}. - * <p> - * If the unsubscribe has been called, setting a new subscription will - * unsubscribe it immediately. - */ -public final class SingleAssignmentSubscription implements Subscription { - /** Holds the current resource. */ - private final AtomicReference<Subscription> current = new AtomicReference<Subscription>(); - /** Sentinel for the unsubscribed state. */ - private static final Subscription UNSUBSCRIBED_SENTINEL = new Subscription() { - @Override - public void unsubscribe() { - } - }; - /** - * Returns the current subscription or null if not yet set. - */ - public Subscription get() { - Subscription s = current.get(); - if (s == UNSUBSCRIBED_SENTINEL) { - return Subscriptions.empty(); - } - return s; - } - /** - * Sets a new subscription if not already set. - * @param s the new subscription - * @throws IllegalStateException if this subscription is live and contains - * another subscription. - */ - public void set(Subscription s) { - if (current.compareAndSet(null, s)) { - return; - } - if (current.get() != UNSUBSCRIBED_SENTINEL) { - throw new IllegalStateException("Subscription already set"); - } - if (s != null) { - s.unsubscribe(); - } - } - @Override - public void unsubscribe() { - Subscription old = current.getAndSet(UNSUBSCRIBED_SENTINEL); - if (old != null) { - old.unsubscribe(); - } - } - /** - * Test if this subscription is already unsubscribed. - */ - public boolean isUnsubscribed() { - return current.get() == UNSUBSCRIBED_SENTINEL; - } - -}
016918b2e276f95a7e8868dc6cd00fc3ca6fb71c
camel
CAMEL-870: Added transferExchange option to- camel-jms.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@756685 13f79535-47bb-0310-9956-ffa450edef68-
a
https://github.com/apache/camel
diff --git a/camel-core/src/main/java/org/apache/camel/component/mock/MockEndpoint.java b/camel-core/src/main/java/org/apache/camel/component/mock/MockEndpoint.java index 8c00e9ef54d63..0c2c20116682a 100644 --- a/camel-core/src/main/java/org/apache/camel/component/mock/MockEndpoint.java +++ b/camel-core/src/main/java/org/apache/camel/component/mock/MockEndpoint.java @@ -76,6 +76,9 @@ public class MockEndpoint extends DefaultEndpoint implements BrowsableEndpoint { private String headerName; private Object headerValue; private Object actualHeader; + private String propertyName; + private Object propertyValue; + private Object actualProperty; private Processor reporter; public MockEndpoint(String endpointUri, Component component) { @@ -322,6 +325,24 @@ public void run() { }); } + /** + * Adds an expectation that the given property name & value are received by this + * endpoint + */ + public void expectedPropertyReceived(final String name, final Object value) { + this.propertyName = name; + this.propertyValue = value; + + expects(new Runnable() { + public void run() { + assertTrue("No property with name " + propertyName + " found.", actualProperty != null); + + Object actualValue = getCamelContext().getTypeConverter().convertTo(actualProperty.getClass(), propertyValue); + assertEquals("Property of message", actualValue, actualProperty); + } + }); + } + /** * Adds an expectation that the given body values are received by this * endpoint in the specified order @@ -725,6 +746,10 @@ protected void performAssertions(Exchange exchange) throws Exception { actualHeader = in.getHeader(headerName); } + if (propertyName != null) { + actualProperty = exchange.getProperty(propertyName); + } + if (expectedBodyValues != null) { int index = actualBodyValues.size(); if (expectedBodyValues.size() > index) { diff --git a/components/camel-mina/src/main/java/org/apache/camel/component/mina/MinaPayloadHolder.java b/camel-core/src/main/java/org/apache/camel/impl/DefaultExchangeHolder.java similarity index 72% rename from components/camel-mina/src/main/java/org/apache/camel/component/mina/MinaPayloadHolder.java rename to camel-core/src/main/java/org/apache/camel/impl/DefaultExchangeHolder.java index 1dd038c506fed..6eef003093f7b 100644 --- a/components/camel-mina/src/main/java/org/apache/camel/component/mina/MinaPayloadHolder.java +++ b/camel-core/src/main/java/org/apache/camel/impl/DefaultExchangeHolder.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.camel.component.mina; +package org.apache.camel.impl; import java.io.Serializable; import java.util.LinkedHashMap; @@ -25,28 +25,29 @@ import org.apache.commons.logging.LogFactory; /** - * Holder object for sending an exchange over the wire using the MINA ObjectSerializationCodecFactory codec. - * This is configured using the <tt>transferExchange=true</tt> option for the TCP protocol. + * Holder object for sending an exchange over a remote wire as a serialized object. + * This is usually configured using the <tt>transferExchange=true</tt> option on the endpoint. * <p/> - * As opposed to normal usage of camel-mina where only the body part of the exchange is transfered, this holder - * object serializes the following fields over the wire: + * As opposed to normal usage where only the body part of the exchange is transfered over the wire, + * this holder object serializes the following fields over the wire: * <ul> - * <li>in body</li> - * <li>out body</li> - * <li>in headers</li> - * <li>out headers</li> - * <li>fault body </li> - * <li>fault headers</li> - * <li>exchange properties</li> - * <li>exception</li> + * <li>in body</li> + * <li>out body</li> + * <li>in headers</li> + * <li>out headers</li> + * <li>fault body </li> + * <li>fault headers</li> + * <li>exchange properties</li> + * <li>exception</li> * </ul> * Any object that is not serializable will be skipped and Camel will log this at WARN level. * * @version $Revision$ */ -public class MinaPayloadHolder implements Serializable { +public class DefaultExchangeHolder implements Serializable { + private static final long serialVersionUID = 1L; - private static final transient Log LOG = LogFactory.getLog(MinaPayloadHolder.class); + private static final transient Log LOG = LogFactory.getLog(DefaultExchangeHolder.class); private Object inBody; private Object outBody; @@ -61,11 +62,11 @@ public class MinaPayloadHolder implements Serializable { * Creates a payload object with the information from the given exchange. * Only marshal the Serializable object * - * @param exchange the exchange + * @param exchange the exchange * @return the holder object with information copied form the exchange */ - public static MinaPayloadHolder marshal(Exchange exchange) { - MinaPayloadHolder payload = new MinaPayloadHolder(); + public static DefaultExchangeHolder marshal(Exchange exchange) { + DefaultExchangeHolder payload = new DefaultExchangeHolder(); payload.inBody = checkSerializableObject("in body", exchange.getIn().getBody()); payload.inHeaders.putAll(checkMapSerializableObjects("in headers", exchange.getIn().getHeaders())); @@ -86,10 +87,10 @@ public static MinaPayloadHolder marshal(Exchange exchange) { /** * Transfers the information from the payload to the exchange. * - * @param exchange the exchange to set values from the payload - * @param payload the payload with the values + * @param exchange the exchange to set values from the payload + * @param payload the payload with the values */ - public static void unmarshal(Exchange exchange, MinaPayloadHolder payload) { + public static void unmarshal(Exchange exchange, DefaultExchangeHolder payload) { exchange.getIn().setBody(payload.inBody); exchange.getIn().setHeaders(payload.inHeaders); if (payload.outBody != null) { @@ -107,16 +108,19 @@ public static void unmarshal(Exchange exchange, MinaPayloadHolder payload) { } public String toString() { - return "MinaPayloadHolder{" + "inBody=" + inBody + ", outBody=" + outBody + ", inHeaders=" - + inHeaders + ", outHeaders=" + outHeaders + ", faultBody=" + faultBody + ", faultHeaders=" - + faultHeaders + ", properties=" + properties + ", exception=" + exception + '}'; + StringBuilder sb = new StringBuilder("DefaultExchangeHolder["); + sb.append("inBody=").append(inBody).append(", outBody=").append(outBody); + sb.append(", inHeaders=").append(inHeaders).append(", outHeaders=").append(outHeaders); + sb.append(", faultBody=").append(faultBody).append(", faultHeaders=").append(faultHeaders); + sb.append(", properties=").append(properties).append(", exception=").append(exception); + return sb.append(']').toString(); } private static Object checkSerializableObject(String type, Object object) { if (object instanceof Serializable) { return object; } else { - LOG.warn(type + " containig object " + object + " cannot be serialized, it will be excluded by the MinaPayloadHolder"); + LOG.warn(type + " containig object " + object + " cannot be serialized, it will be excluded by the holder"); return null; } } @@ -132,7 +136,7 @@ private static Map<String, Object> checkMapSerializableObjects(String type, Map< result.put(entry.getKey(), entry.getValue()); } else { LOG.warn(type + " containing object " + entry.getValue() + " of key " + entry.getKey() - + " cannot be serialized, it will be excluded by the MinaPayloadHolder"); + + " cannot be serialized, it will be excluded by the holder"); } } diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsBinding.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsBinding.java index 21f9eb455b9e2..a176b864e077d 100644 --- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsBinding.java +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsBinding.java @@ -43,6 +43,7 @@ import org.apache.camel.Exchange; import org.apache.camel.RuntimeCamelException; import org.apache.camel.component.file.GenericFile; +import org.apache.camel.impl.DefaultExchangeHolder; import org.apache.camel.spi.HeaderFilterStrategy; import org.apache.camel.util.CamelContextHelper; import org.apache.camel.util.ExchangeHelper; @@ -88,7 +89,14 @@ public Object extractBodyFromJms(Exchange exchange, Message message) { try { if (message instanceof ObjectMessage) { ObjectMessage objectMessage = (ObjectMessage)message; - return objectMessage.getObject(); + Object payload = objectMessage.getObject(); + if (payload instanceof DefaultExchangeHolder) { + DefaultExchangeHolder holder = (DefaultExchangeHolder) payload; + DefaultExchangeHolder.unmarshal(exchange, holder); + return exchange.getIn().getBody(); + } else { + return objectMessage.getObject(); + } } else if (message instanceof TextMessage) { TextMessage textMessage = (TextMessage)message; return textMessage.getText(); @@ -194,7 +202,7 @@ public Message makeJmsMessage(Exchange exchange, org.apache.camel.Message camelM } } if (answer == null) { - answer = createJmsMessage(camelMessage.getBody(), camelMessage.getHeaders(), session, exchange.getContext()); + answer = createJmsMessage(exchange, camelMessage.getBody(), camelMessage.getHeaders(), session, exchange.getContext()); appendJmsProperties(answer, exchange, camelMessage); } return answer; @@ -288,9 +296,18 @@ protected Object getValidJMSHeaderValue(String headerName, Object headerValue) { return null; } - protected Message createJmsMessage(Object body, Map<String, Object> headers, Session session, CamelContext context) throws JMSException { + protected Message createJmsMessage(Exchange exchange, Object body, Map<String, Object> headers, Session session, CamelContext context) throws JMSException { JmsMessageType type = null; + // special for transferExchange + if (endpoint != null && endpoint.isTransferExchange()) { + if (LOG.isDebugEnabled()) { + LOG.debug("Option transferExchange=true so we use JmsMessageType: Object"); + } + Serializable holder = DefaultExchangeHolder.marshal(exchange); + return session.createObjectMessage(holder); + } + // check if header have a type set, if so we force to use it if (headers.containsKey(JmsConstants.JMS_MESSAGE_TYPE)) { type = context.getTypeConverter().convertTo(JmsMessageType.class, headers.get(JmsConstants.JMS_MESSAGE_TYPE)); diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsConfiguration.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsConfiguration.java index 247a5dbba42f3..38cdc5586b451 100644 --- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsConfiguration.java +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsConfiguration.java @@ -133,6 +133,7 @@ public class JmsConfiguration implements Cloneable { private String replyToDestinationSelectorName; private JmsMessageType jmsMessageType; private JmsKeyFormatStrategy jmsKeyFormatStrategy; + private boolean transferExchange; public JmsConfiguration() { } @@ -1165,4 +1166,12 @@ public JmsKeyFormatStrategy getJmsKeyFormatStrategy() { public void setJmsKeyFormatStrategy(JmsKeyFormatStrategy jmsKeyFormatStrategy) { this.jmsKeyFormatStrategy = jmsKeyFormatStrategy; } + + public boolean isTransferExchange() { + return transferExchange; + } + + public void setTransferExchange(boolean transferExchange) { + this.transferExchange = transferExchange; + } } diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsEndpoint.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsEndpoint.java index f3e7500a0c97d..28df7f988e07d 100644 --- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsEndpoint.java +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsEndpoint.java @@ -779,6 +779,14 @@ public void setJmsKeyFormatStrategy(JmsKeyFormatStrategy jmsHeaderStrategy) { getConfiguration().setJmsKeyFormatStrategy(jmsHeaderStrategy); } + public boolean isTransferExchange() { + return getConfiguration().isTransferExchange(); + } + + public void setTransferExchange(boolean transferExchange) { + getConfiguration().setTransferExchange(transferExchange); + } + // Implementation methods //------------------------------------------------------------------------- diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsDeadLetterQueueTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsDeadLetterQueueTest.java new file mode 100644 index 0000000000000..f6eb502f91331 --- /dev/null +++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsDeadLetterQueueTest.java @@ -0,0 +1,101 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.camel.component.jms; + +import javax.jms.ConnectionFactory; + +import org.apache.activemq.ActiveMQConnectionFactory; +import org.apache.camel.CamelContext; +import org.apache.camel.ContextTestSupport; +import org.apache.camel.Exchange; +import org.apache.camel.Processor; +import org.apache.camel.RuntimeCamelException; +import org.apache.camel.builder.RouteBuilder; +import org.apache.camel.component.mock.MockEndpoint; +import static org.apache.camel.component.jms.JmsComponent.jmsComponentClientAcknowledge; + +/** + * Unit test for using JMS as DLQ + * + * @version $Revision$ + */ +public class JmsDeadLetterQueueTest extends ContextTestSupport { + + protected String getUri() { + return "activemq:queue:dead"; + } + + public void testOk() throws Exception { + MockEndpoint mock = getMockEndpoint("mock:result"); + mock.expectedBodiesReceived("Hello World"); + + template.sendBody("direct:start", "Hello World"); + + assertMockEndpointsSatisfied(); + } + + public void testKabom() throws Exception { + MockEndpoint mock = getMockEndpoint("mock:dead"); + mock.expectedBodiesReceived("Kabom"); + + try { + template.sendBody("direct:start", "Kabom"); + fail("Should have thrown a RuntimeCamelException"); + } catch (RuntimeCamelException e) { + assertEquals("Kabom", e.getCause().getMessage()); + } + + assertMockEndpointsSatisfied(); + + // the cause exception is gone in the transformation below + assertNull(mock.getReceivedExchanges().get(0).getProperty(Exchange.EXCEPTION_CAUGHT)); + + } + + protected CamelContext createCamelContext() throws Exception { + CamelContext camelContext = super.createCamelContext(); + + ConnectionFactory connectionFactory = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false"); + camelContext.addComponent("activemq", jmsComponentClientAcknowledge(connectionFactory)); + + return camelContext; + } + + @Override + protected RouteBuilder createRouteBuilder() throws Exception { + return new RouteBuilder() { + @Override + public void configure() throws Exception { + errorHandler(deadLetterChannel("seda:dead").disableRedelivery()); + + from("direct:start").process(new Processor() { + public void process(Exchange exchange) throws Exception { + String body = exchange.getIn().getBody(String.class); + if ("Kabom".equals(body)) { + throw new IllegalArgumentException("Kabom"); + } + } + }).to("mock:result"); + + from("seda:dead").transform(exceptionMessage()).to(getUri()); + + from(getUri()).to("mock:dead"); + } + }; + } + +} \ No newline at end of file diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsDeadLetterQueueUsingTransferExchangeTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsDeadLetterQueueUsingTransferExchangeTest.java new file mode 100644 index 0000000000000..da85673d5ed59 --- /dev/null +++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsDeadLetterQueueUsingTransferExchangeTest.java @@ -0,0 +1,99 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.camel.component.jms; + +import javax.jms.ConnectionFactory; + +import org.apache.activemq.ActiveMQConnectionFactory; +import org.apache.camel.CamelContext; +import org.apache.camel.ContextTestSupport; +import org.apache.camel.Exchange; +import org.apache.camel.Processor; +import org.apache.camel.RuntimeCamelException; +import org.apache.camel.builder.RouteBuilder; +import org.apache.camel.component.mock.MockEndpoint; +import static org.apache.camel.component.jms.JmsComponent.jmsComponentClientAcknowledge; + +/** + * Unit test for using JMS as DLQ and to preserve the Exchange using transferExchange=true option + * + * @version $Revision$ + */ +public class JmsDeadLetterQueueUsingTransferExchangeTest extends ContextTestSupport { + + protected String getUri() { + return "activemq:queue:dead?transferExchange=true"; + } + + public void testOk() throws Exception { + MockEndpoint mock = getMockEndpoint("mock:result"); + mock.expectedBodiesReceived("Hello World"); + + template.sendBody("direct:start", "Hello World"); + + assertMockEndpointsSatisfied(); + } + + public void testKabom() throws Exception { + MockEndpoint mock = getMockEndpoint("mock:dead"); + mock.expectedBodiesReceived("Kabom"); + + try { + template.sendBody("direct:start", "Kabom"); + fail("Should have thrown a RuntimeCamelException"); + } catch (RuntimeCamelException e) { + assertEquals("Kabom", e.getCause().getMessage()); + } + + assertMockEndpointsSatisfied(); + + Exchange dead = mock.getReceivedExchanges().get(0); + // caused exception is stored as a property + assertEquals("Kabom", dead.getProperty(Exchange.EXCEPTION_CAUGHT, Exception.class).getMessage()); + } + + protected CamelContext createCamelContext() throws Exception { + CamelContext camelContext = super.createCamelContext(); + + ConnectionFactory connectionFactory = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false"); + camelContext.addComponent("activemq", jmsComponentClientAcknowledge(connectionFactory)); + + return camelContext; + } + + @Override + protected RouteBuilder createRouteBuilder() throws Exception { + return new RouteBuilder() { + @Override + public void configure() throws Exception { + errorHandler(deadLetterChannel(getUri()).disableRedelivery()); + + from("direct:start").process(new Processor() { + public void process(Exchange exchange) throws Exception { + String body = exchange.getIn().getBody(String.class); + if ("Kabom".equals(body)) { + throw new IllegalArgumentException("Kabom"); + } + } + }).to("mock:result"); + + from(getUri()).to("mock:dead"); + } + }; + } + +} \ No newline at end of file diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsTransferExchangeTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsTransferExchangeTest.java new file mode 100644 index 0000000000000..7565714b00e44 --- /dev/null +++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsTransferExchangeTest.java @@ -0,0 +1,95 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.camel.component.jms; + +import javax.jms.ConnectionFactory; + +import org.apache.activemq.ActiveMQConnectionFactory; +import org.apache.camel.CamelContext; +import org.apache.camel.ContextTestSupport; +import org.apache.camel.Exchange; +import org.apache.camel.Processor; +import org.apache.camel.builder.RouteBuilder; +import org.apache.camel.component.mock.MockEndpoint; +import static org.apache.camel.component.jms.JmsComponent.jmsComponentClientAcknowledge; + +/** + * @version $Revision$ + */ +public class JmsTransferExchangeTest extends ContextTestSupport { + + protected String getUri() { + return "activemq:queue:foo?transferExchange=true"; + } + + public void testBodyOnly() throws Exception { + MockEndpoint mock = getMockEndpoint("mock:result"); + mock.expectedBodiesReceived("Hello World"); + + template.sendBody("direct:start", "Hello World"); + + assertMockEndpointsSatisfied(); + } + + public void testBodyAndHeaderOnly() throws Exception { + MockEndpoint mock = getMockEndpoint("mock:result"); + mock.expectedBodiesReceived("Hello World"); + mock.expectedHeaderReceived("foo", "cheese"); + + template.sendBodyAndHeader("direct:start", "Hello World", "foo", "cheese"); + + assertMockEndpointsSatisfied(); + } + + public void testSendExchange() throws Exception { + MockEndpoint mock = getMockEndpoint("mock:result"); + mock.expectedBodiesReceived("Hello World"); + mock.expectedHeaderReceived("foo", "cheese"); + mock.expectedPropertyReceived("bar", 123); + + template.send("direct:start", new Processor() { + public void process(Exchange exchange) throws Exception { + exchange.getIn().setBody("Hello World"); + exchange.getIn().setHeader("foo", "cheese"); + exchange.setProperty("bar", 123); + } + }); + + assertMockEndpointsSatisfied(); + } + + protected CamelContext createCamelContext() throws Exception { + CamelContext camelContext = super.createCamelContext(); + + ConnectionFactory connectionFactory = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false"); + camelContext.addComponent("activemq", jmsComponentClientAcknowledge(connectionFactory)); + + return camelContext; + } + + @Override + protected RouteBuilder createRouteBuilder() throws Exception { + return new RouteBuilder() { + @Override + public void configure() throws Exception { + from("direct:start").to(getUri()); + from(getUri()).to("mock:result"); + } + }; + } + +} diff --git a/components/camel-mina/src/main/java/org/apache/camel/component/mina/MinaPayloadHelper.java b/components/camel-mina/src/main/java/org/apache/camel/component/mina/MinaPayloadHelper.java index 7c5a4befbaf21..9cae9e999e3d1 100644 --- a/components/camel-mina/src/main/java/org/apache/camel/component/mina/MinaPayloadHelper.java +++ b/components/camel-mina/src/main/java/org/apache/camel/component/mina/MinaPayloadHelper.java @@ -17,15 +17,15 @@ package org.apache.camel.component.mina; import org.apache.camel.Exchange; +import org.apache.camel.impl.DefaultExchangeHolder; /** * Helper to get and set the correct payload when transfering data using camel-mina. * Always use this helper instead of direct access on the exchange object. * <p/> * This helper ensures that we can also transfer exchange objects over the wire using the - * <tt>exchangePayload=true</tt> option. + * <tt>transferExchange=true</tt> option. * - * @see org.apache.camel.component.mina.MinaPayloadHolder * @version $Revision$ */ public final class MinaPayloadHelper { @@ -37,7 +37,7 @@ private MinaPayloadHelper() { public static Object getIn(MinaEndpoint endpoint, Exchange exchange) { if (endpoint.getConfiguration().isTransferExchange()) { // we should transfer the entire exchange over the wire (includes in/out) - return MinaPayloadHolder.marshal(exchange); + return DefaultExchangeHolder.marshal(exchange); } else { // normal transfer using the body only return exchange.getIn().getBody(); @@ -47,7 +47,7 @@ public static Object getIn(MinaEndpoint endpoint, Exchange exchange) { public static Object getOut(MinaEndpoint endpoint, Exchange exchange) { if (endpoint.getConfiguration().isTransferExchange()) { // we should transfer the entire exchange over the wire (includes in/out) - return MinaPayloadHolder.marshal(exchange); + return DefaultExchangeHolder.marshal(exchange); } else { // normal transfer using the body only return exchange.getOut().getBody(); @@ -55,8 +55,8 @@ public static Object getOut(MinaEndpoint endpoint, Exchange exchange) { } public static void setIn(Exchange exchange, Object payload) { - if (payload instanceof MinaPayloadHolder) { - MinaPayloadHolder.unmarshal(exchange, (MinaPayloadHolder) payload); + if (payload instanceof DefaultExchangeHolder) { + DefaultExchangeHolder.unmarshal(exchange, (DefaultExchangeHolder) payload); } else { // normal transfer using the body only exchange.getIn().setBody(payload); @@ -64,8 +64,8 @@ public static void setIn(Exchange exchange, Object payload) { } public static void setOut(Exchange exchange, Object payload) { - if (payload instanceof MinaPayloadHolder) { - MinaPayloadHolder.unmarshal(exchange, (MinaPayloadHolder) payload); + if (payload instanceof DefaultExchangeHolder) { + DefaultExchangeHolder.unmarshal(exchange, (DefaultExchangeHolder) payload); } else { // normal transfer using the body only and preserve the headers exchange.getOut().setHeaders(exchange.getIn().getHeaders());
c5c3e019a461fedf064eef019f4536f00df61dec
restlet-framework-java
- Continued support for non-blocking HTTPS to the- internal NIO connectors, client-side and server-side.--
a
https://github.com/restlet/restlet-framework-java
diff --git a/modules/org.restlet/src/org/restlet/engine/io/ReadableSslChannel.java b/modules/org.restlet/src/org/restlet/engine/io/ReadableSslChannel.java index 860d7de34e..eb27e8119e 100644 --- a/modules/org.restlet/src/org/restlet/engine/io/ReadableSslChannel.java +++ b/modules/org.restlet/src/org/restlet/engine/io/ReadableSslChannel.java @@ -32,11 +32,13 @@ import java.io.IOException; import java.nio.ByteBuffer; +import java.util.logging.Level; import javax.net.ssl.SSLEngineResult; import org.restlet.engine.connector.SslConnection; import org.restlet.engine.security.SslManager; +import org.restlet.engine.security.SslState; /** * SSL byte channel that unwraps all read data using the SSL/TLS protocols. It @@ -78,13 +80,18 @@ public int read(ByteBuffer dst) throws IOException { // If the packet buffer is empty, first try to refill it refill(); - if (getPacketBufferState() == BufferState.DRAINING) { + if ((getPacketBufferState() == BufferState.DRAINING) + || (getManager().getState() == SslState.HANDSHAKING)) { int dstSize = dst.remaining(); if (dstSize > 0) { - SSLEngineResult sslResult = runEngine(dst); - handleResult(sslResult, dst); - refill(); + while (getPacketBuffer().hasRemaining() + && (getConnection().getInboundWay().getIoState() != IoState.IDLE)) { + SSLEngineResult sslResult = runEngine(dst); + handleResult(sslResult, dst); + refill(); + } + result = dstSize - dst.remaining(); } } @@ -105,6 +112,11 @@ protected int refill() throws IOException { if (getPacketBufferState() == BufferState.FILLING) { result = getWrappedChannel().read(getPacketBuffer()); + if (getConnection().getLogger().isLoggable(Level.INFO)) { + getConnection().getLogger().log(Level.INFO, + "Packet bytes read: " + result); + } + if (result > 0) { setPacketBufferState(BufferState.DRAINING); getPacketBuffer().flip(); @@ -117,16 +129,20 @@ protected int refill() throws IOException { @Override protected SSLEngineResult runEngine(ByteBuffer applicationBuffer) throws IOException { + if (getConnection().getLogger().isLoggable(Level.INFO)) { + getConnection().getLogger().log(Level.INFO, + "Unwrapping bytes with: " + getPacketBuffer()); + } + SSLEngineResult result = getManager().getEngine().unwrap( getPacketBuffer(), applicationBuffer); int remaining = getPacketBuffer().remaining(); if (remaining == 0) { setPacketBufferState(BufferState.FILLING); - refill(); + getPacketBuffer().clear(); } return result; } - } diff --git a/modules/org.restlet/src/org/restlet/engine/io/SslChannel.java b/modules/org.restlet/src/org/restlet/engine/io/SslChannel.java index 3041b97b99..3194f34462 100644 --- a/modules/org.restlet/src/org/restlet/engine/io/SslChannel.java +++ b/modules/org.restlet/src/org/restlet/engine/io/SslChannel.java @@ -188,7 +188,9 @@ protected void handleResult(SSLEngineResult sslResult, protected void log(SSLEngineResult sslResult) { if (Context.getCurrentLogger().isLoggable(Level.INFO)) { Context.getCurrentLogger().log(Level.INFO, - "SSL I/O result" + sslResult); + "SSL I/O result: " + sslResult); + Context.getCurrentLogger().log(Level.INFO, + "SSL Manager: " + getManager()); } } diff --git a/modules/org.restlet/src/org/restlet/engine/io/WritableSslChannel.java b/modules/org.restlet/src/org/restlet/engine/io/WritableSslChannel.java index 1030c4ad76..fca970ba0a 100644 --- a/modules/org.restlet/src/org/restlet/engine/io/WritableSslChannel.java +++ b/modules/org.restlet/src/org/restlet/engine/io/WritableSslChannel.java @@ -36,9 +36,9 @@ import javax.net.ssl.SSLEngineResult; -import org.restlet.Context; import org.restlet.engine.connector.SslConnection; import org.restlet.engine.security.SslManager; +import org.restlet.engine.security.SslState; /** * SSL byte channel that wraps all application data using the SSL/TLS protocols. @@ -77,6 +77,11 @@ protected int flush() throws IOException { if (getPacketBufferState() == BufferState.DRAINING) { result = getWrappedChannel().write(getPacketBuffer()); + if (getConnection().getLogger().isLoggable(Level.INFO)) { + getConnection().getLogger().log(Level.INFO, + "Packet bytes written: " + result); + } + if (getPacketBuffer().remaining() == 0) { setPacketBufferState(BufferState.FILLING); getPacketBuffer().clear(); @@ -89,6 +94,11 @@ protected int flush() throws IOException { @Override protected SSLEngineResult runEngine(ByteBuffer applicationBuffer) throws IOException { + if (getConnection().getLogger().isLoggable(Level.INFO)) { + getConnection().getLogger().log(Level.INFO, + "Wrapping bytes with: " + getPacketBuffer()); + } + SSLEngineResult result = getManager().getEngine().wrap( applicationBuffer, getPacketBuffer()); getPacketBuffer().flip(); @@ -96,7 +106,6 @@ protected SSLEngineResult runEngine(ByteBuffer applicationBuffer) if (remaining > 0) { setPacketBufferState(BufferState.DRAINING); - flush(); } else { getPacketBuffer().clear(); } @@ -115,21 +124,22 @@ protected SSLEngineResult runEngine(ByteBuffer applicationBuffer) public int write(ByteBuffer src) throws IOException { int result = 0; - if (Context.getCurrentLogger().isLoggable(Level.INFO)) { - Context.getCurrentLogger().log(Level.INFO, getManager().toString()); - } - // If the packet buffer isn't empty, first try to flush it flush(); // Refill the packet buffer - if (getPacketBufferState() == BufferState.FILLING) { + if ((getPacketBufferState() == BufferState.FILLING) + || (getManager().getState() == SslState.HANDSHAKING)) { int srcSize = src.remaining(); if (srcSize > 0) { - SSLEngineResult sslResult = runEngine(src); - handleResult(sslResult, src); - flush(); + while (getPacketBuffer().hasRemaining() + && (getConnection().getOutboundWay().getIoState() != IoState.IDLE)) { + SSLEngineResult sslResult = runEngine(src); + handleResult(sslResult, src); + flush(); + } + result = srcSize - src.remaining(); } } diff --git a/modules/org.restlet/src/org/restlet/engine/security/SslManager.java b/modules/org.restlet/src/org/restlet/engine/security/SslManager.java index a6728e4c9c..063205e425 100644 --- a/modules/org.restlet/src/org/restlet/engine/security/SslManager.java +++ b/modules/org.restlet/src/org/restlet/engine/security/SslManager.java @@ -146,7 +146,7 @@ public SslState getState() { @Override public String toString() { - return "SSL Manager: " + getState() + " | " + getEngine(); + return getState() + " | " + getEngine(); } /**
14f8d46f2fdfc5994b025ba2b4d19ba685f90b2e
hbase
HBASE-9366 TestHTraceHooks.testTraceCreateTable- errors out sometimes.--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1523816 13f79535-47bb-0310-9956-ffa450edef68-
c
https://github.com/apache/hbase
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/trace/TestHTraceHooks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/trace/TestHTraceHooks.java index faf015cb347b..3595c392e34f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/trace/TestHTraceHooks.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/trace/TestHTraceHooks.java @@ -25,6 +25,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.MediumTests; +import org.apache.hadoop.hbase.Waiter; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.cloudera.htrace.Sampler; @@ -71,6 +72,15 @@ public void testTraceCreateTable() throws Exception { tableCreationSpan.close(); } + // Some table creation is async. Need to make sure that everything is full in before + // checking to see if the spans are there. + TEST_UTIL.waitFor(1000, new Waiter.Predicate<Exception>() { + @Override + public boolean evaluate() throws Exception { + return rcvr.getSpans().size() >= 5; + } + }); + Collection<Span> spans = rcvr.getSpans(); TraceTree traceTree = new TraceTree(spans); Collection<Span> roots = traceTree.getRoots();
c65e0bd91c6d4092efd7526a8021ab9b0f1e7b7c
drools
JBRULES-393 Xml dump with illegal characters -fixed- by Javier Prieto--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@5949 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
c
https://github.com/kiegroup/drools
diff --git a/drools-compiler/src/main/java/org/drools/xml/XmlDumper.java b/drools-compiler/src/main/java/org/drools/xml/XmlDumper.java index 428186c1ba4..87945759802 100644 --- a/drools-compiler/src/main/java/org/drools/xml/XmlDumper.java +++ b/drools-compiler/src/main/java/org/drools/xml/XmlDumper.java @@ -75,7 +75,7 @@ public void visitAttributeDescr(final AttributeDescr attributeDescr) { public void visitVariableRestrictionDescr(final VariableRestrictionDescr descr) { this.template = new String(); - this.template = "<variable-restriction evaluator=\"" + getEvaluator( descr.getEvaluator() ) + "\" identifier=\"" + descr.getIdentifier() + "\" />" + XmlDumper.eol; + this.template = "<variable-restriction evaluator=\"" + replaceIllegalChars( descr.getEvaluator() ) + "\" identifier=\"" + descr.getIdentifier() + "\" />" + XmlDumper.eol; } public void visitColumnDescr(final ColumnDescr descr) { @@ -99,12 +99,12 @@ public void visitColumnDescr(final ColumnDescr descr) { public void visitFieldConstraintDescr(final FieldConstraintDescr descr) { if ( !descr.getRestrictions().isEmpty() ) { processFieldConstraint( descr.getRestrictions() ); - } - } - + } + } + public void visitEvalDescr(final EvalDescr descr) { this.template = new String(); - this.template = "<eval>" + descr.getText() + "</eval>" + XmlDumper.eol; + this.template = "<eval>" + replaceIllegalChars( descr.getText() ) + "</eval>" + XmlDumper.eol; } public void visitExistsDescr(final ExistsDescr descr) { @@ -124,14 +124,15 @@ public void visitFieldBindingDescr(final FieldBindingDescr descr) { public void visitFunctionDescr(final FunctionDescr functionDescr) { this.template = new String(); final String parameterTemplate = processParameters( functionDescr.getParameterNames(), - functionDescr.getParameterTypes() ); + functionDescr.getParameterTypes() ); - this.template = "<function return-type=\"" + functionDescr.getReturnType() + "\" name=\"" + functionDescr.getName() + "\">" + XmlDumper.eol + parameterTemplate + "<body>" + XmlDumper.eol + functionDescr.getText() + XmlDumper.eol + "</body>" + XmlDumper.eol + "</function>" + XmlDumper.eol; + this.template = "<function return-type=\"" + functionDescr.getReturnType() + "\" name=\"" + functionDescr.getName() + "\">" + XmlDumper.eol + parameterTemplate + "<body>" + XmlDumper.eol + replaceIllegalChars( functionDescr.getText() ) + XmlDumper.eol + "</body>" + + XmlDumper.eol + "</function>" + XmlDumper.eol; } public void visitLiteralRestrictionDescr(final LiteralRestrictionDescr descr) { this.template = new String(); - this.template = "<literal-restriction evaluator=\"" + getEvaluator( descr.getEvaluator() ) + "\" value=\"" + descr.getText() + "\" />" + XmlDumper.eol; + this.template = "<literal-restriction evaluator=\"" + replaceIllegalChars( descr.getEvaluator() ) + "\" value=\"" + replaceIllegalChars( descr.getText() ) + "\" />" + XmlDumper.eol; } public void visitNotDescr(final NotDescr descr) { @@ -155,8 +156,8 @@ public void visitOrDescr(final OrDescr descr) { public void visitPackageDescr(final PackageDescr packageDescr) { final String packageName = packageDescr.getName(); - final String xmlString = "<?xml version=\"1.0\" encoding=\"UTF-8\"?> " + XmlDumper.eol + " <package name=\"" + packageName + "\" " + XmlDumper.eol + "\txmlns=\"http://drools.org/drools-3.0\" " + XmlDumper.eol + "\txmlns:xs=\"http://www.w3.org/2001/XMLSchema-instance\" " + XmlDumper.eol - + "\txs:schemaLocation=\"http://drools.org/drools-3.0 drools-3.0.xsd\"> " + XmlDumper.eol; + final String xmlString = "<?xml version=\"1.0\" encoding=\"UTF-8\"?> " + XmlDumper.eol + " <package name=\"" + packageName + "\" " + XmlDumper.eol + "\txmlns=\"http://drools.org/drools-3.0\" " + XmlDumper.eol + + "\txmlns:xs=\"http://www.w3.org/2001/XMLSchema-instance\" " + XmlDumper.eol + "\txs:schemaLocation=\"http://drools.org/drools-3.0 drools-3.0.xsd\"> " + XmlDumper.eol; appendXmlDump( xmlString ); appendXmlDump( processImportsList( packageDescr.getImports() ) ); appendXmlDump( processGlobalsMap( packageDescr.getGlobals() ) ); @@ -167,12 +168,12 @@ public void visitPackageDescr(final PackageDescr packageDescr) { public void visitPredicateDescr(final PredicateDescr descr) { this.template = new String(); - this.template = "<predicate field-name=\"" + descr.getFieldName() + "\" identifier=\"" + descr.getDeclaration() + "\" >" + descr.getText() + "</predicate>" + XmlDumper.eol; + this.template = "<predicate field-name=\"" + descr.getFieldName() + "\" identifier=\"" + descr.getDeclaration() + "\" >" + replaceIllegalChars( descr.getText() ) + "</predicate>" + XmlDumper.eol; } public void visitReturnValueRestrictionDescr(final ReturnValueRestrictionDescr descr) { this.template = new String(); - this.template = "<return-value-restriction evaluator=\"" + getEvaluator( descr.getEvaluator() ) + "\" >" + descr.getText() + "</return-value>" + XmlDumper.eol; + this.template = "<return-value-restriction evaluator=\"" + replaceIllegalChars( descr.getEvaluator() ) + "\" >" + replaceIllegalChars( descr.getText() ) + "</return-value>" + XmlDumper.eol; } public void visitQueryDescr(final QueryDescr descr) { @@ -196,7 +197,7 @@ private String processRules(final List rules) { lhs = "<lhs> </lhs>"; } - final String rhs = "<rhs>" + ruleDescr.getConsequence() + "</rhs>" + XmlDumper.eol; + final String rhs = "<rhs>" + replaceIllegalChars( ruleDescr.getConsequence() ) + "</rhs>" + XmlDumper.eol; rule += attribute; rule += lhs; rule += rhs; @@ -206,19 +207,19 @@ private String processRules(final List rules) { return ruleList + XmlDumper.eol; } - + private String processFieldConstraint(List list) { String descrString = ""; for ( final Iterator it = list.iterator(); it.hasNext(); ) { final Object temp = it.next(); - descrString += "<field-restrictions name=\"" +((FieldConstraintDescr) temp).getFieldName() + "\"> "; + descrString += "<field-restrictions name=\"" + ((FieldConstraintDescr) temp).getFieldName() + "\"> "; visit( temp ); - descrString += "</field-restrictions>"; + descrString += "</field-restrictions>"; descrString += this.template; } return descrString.substring( 0, descrString.length() - 2 ); - } + } private String processDescrList(final List descr) { String descrString = ""; @@ -292,13 +293,30 @@ private String processImportsList(final List imports) { private void appendXmlDump(final String temp) { this.xmlDump.append( temp ); } - - private String getEvaluator(String eval) { - - eval = eval.replaceAll( "<", - "&lt;" ); - eval = eval.replaceAll( ">", - "&gt;" ); - return eval; - } + + /** + * Replace illegal xml characters with their escaped equivalent + * <P>The escaped characters are : + * <ul> + * <li> < + * <li> > + * <li> & + * </ul> + * </p> + * @author <a href="mailto:[email protected]">Author Javier Prieto</a> + */ + private String replaceIllegalChars(String code) { + StringBuffer sb = new StringBuffer(); + int n = code.length(); + for (int i = 0; i < n; i++) { + char c = code.charAt(i); + switch (c) { + case '<': sb.append("&lt;"); break; + case '>': sb.append("&gt;"); break; + case '&': sb.append("&amp;"); break; + default: sb.append(c); break; + } + } + return sb.toString(); + } } \ No newline at end of file diff --git a/drools-compiler/src/test/resources/org/drools/integrationtests/test_Dumpers.drl b/drools-compiler/src/test/resources/org/drools/integrationtests/test_Dumpers.drl index d79f225ab65..5dd5634db9d 100644 --- a/drools-compiler/src/test/resources/org/drools/integrationtests/test_Dumpers.drl +++ b/drools-compiler/src/test/resources/org/drools/integrationtests/test_Dumpers.drl @@ -9,8 +9,12 @@ rule "test MAIN 1" when Cheese( ) then - list.add( "MAIN" ); - drools.setFocus( "agenda group 1" ); + // lets also make sure that special chars are converted + if ( 3 < 4 && 4 > 3 ) { + list.add( "MAIN" ); + drools.setFocus( "agenda group 1" ); + } + end rule "test group1 1" @@ -29,7 +33,7 @@ rule "test group3 1" when Cheese( ) then - list.add( "3 1" ); + list.add( "3 1" ); end
abd66858b71d2cad75bbfbcb4cb0b616caacc5ed
ReactiveX-RxJava
javadoc interlinkings and other javadoc- improvements--
p
https://github.com/ReactiveX/RxJava
diff --git a/rxjava-core/src/main/java/rx/observables/BlockingObservable.java b/rxjava-core/src/main/java/rx/observables/BlockingObservable.java index 3753c2bad7..3ef4dbcebc 100644 --- a/rxjava-core/src/main/java/rx/observables/BlockingObservable.java +++ b/rxjava-core/src/main/java/rx/observables/BlockingObservable.java @@ -73,119 +73,122 @@ public Subscription call(Observer<T> observer) { } /** - * Returns an Iterator that iterates over all items emitted by a specified Observable. + * Returns an {@link Iterator} that iterates over all items emitted by a specified + * {@link Observable}. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.toIterator.png"> * * @param source - * the source Observable + * the source {@link Observable} * @param <T> - * the type of items emitted by the source Observable - * @return an iterator that can iterate over the items emitted by the Observable + * the type of items emitted by the source {@link Observable} + * @return an {@link Iterator} that can iterate over the items emitted by the {@link Observable} */ public static <T> Iterator<T> toIterator(Observable<T> source) { return OperationToIterator.toIterator(source); } /** - * Returns the last item emitted by a specified Observable. + * Returns the last item emitted by a specified {@link Observable}. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.last.png"> * * @param source - * the source Observable - * @return the last item emitted by the source Observable + * the source {@link Observable} + * @return the last item emitted by the source {@link Observable} */ public static <T> T last(final Observable<T> source) { return from(source).last(); } /** - * Returns the last item emitted by an Observable that matches a given predicate. + * Returns the last item emitted by an {@link Observable} that matches a given predicate. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.last.p.png"> * * @param source - * the source Observable + * the source {@link Observable} * @param predicate - * a predicate function to evaluate items emitted by the Observable - * @return the last item emitted by the Observable for which the predicate function returns - * <code>true</code> + * a predicate function to evaluate items emitted by the {@link Observable} + * @return the last item emitted by the {@link Observable} for which the predicate function + * returns <code>true</code> */ public static <T> T last(final Observable<T> source, final Func1<T, Boolean> predicate) { return last(source.filter(predicate)); } /** - * Returns the last item emitted by an Observable that matches a given predicate. + * Returns the last item emitted by an {@link Observable} that matches a given predicate. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.last.p.png"> * * @param source - * the source Observable + * the source {@link Observable} * @param predicate - * a predicate function to evaluate items emitted by the Observable - * @return the last item emitted by the Observable for which the predicate function returns - * <code>true</code> + * a predicate function to evaluate items emitted by the {@link Observable} + * @return the last item emitted by the {@link Observable} for which the predicate function + * returns <code>true</code> */ public static <T> T last(final Observable<T> source, final Object predicate) { return last(source.filter(predicate)); } /** - * Returns the last item emitted by an Observable, or a default value if no item is emitted. + * Returns the last item emitted by an {@link Observable}, or a default value if no item is + * emitted. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.lastOrDefault.png"> * * @param source - * the source Observable + * the source {@link Observable} * @param defaultValue - * a default value to return if the Observable emits no items + * a default value to return if the {@link Observable} emits no items * @param <T> - * the type of items emitted by the Observable - * @return the last item emitted by an Observable, or the default value if no item is emitted + * the type of items emitted by the {@link Observable} + * @return the last item emitted by an {@link Observable}, or the default value if no item is + * emitted */ public static <T> T lastOrDefault(Observable<T> source, T defaultValue) { return from(source).lastOrDefault(defaultValue); } /** - * Returns the last item emitted by an Observable that matches a given predicate, or a default - * value if no such item is emitted. + * Returns the last item emitted by an {@link Observable} that matches a given predicate, or a + * default value if no such item is emitted. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.lastOrDefault.p.png"> * * @param source - * the source Observable + * the source {@link Observable} * @param defaultValue - * a default value to return if the Observable emits no matching items + * a default value to return if the {@link Observable} emits no matching items * @param predicate - * a predicate function to evaluate items emitted by the Observable + * a predicate function to evaluate items emitted by the {@link Observable} * @param <T> - * the type of items emitted by the Observable - * @return the last item emitted by an Observable that matches the predicate, or the default - * value if no matching item is emitted + * the type of items emitted by the {@link Observable} + * @return the last item emitted by an {@link Observable} that matches the predicate, or the + * default value if no matching item is emitted */ public static <T> T lastOrDefault(Observable<T> source, T defaultValue, Func1<T, Boolean> predicate) { return lastOrDefault(source.filter(predicate), defaultValue); } /** - * Returns the last item emitted by an Observable that matches a given predicate, or a default - * value if no such item is emitted. + * Returns the last item emitted by an {@link Observable} that matches a given predicate, or a + * default value if no such item is emitted. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.lastOrDefault.p.png"> * * @param source - * the source Observable + * the source {@link Observable} * @param defaultValue - * a default value to return if the Observable emits no matching items + * a default value to return if the {@link Observable} emits no matching items * @param predicate - * a predicate function to evaluate items emitted by the Observable + * a predicate function to evaluate items emitted by the {@link Observable} * @param <T> - * the type of items emitted by the Observable - * @return the last item emitted by an Observable that matches the predicate, or the default - * value if no matching item is emitted + * the type of items emitted by the {@link Observable} + * @return the last item emitted by an {@link Observable} that matches the predicate, or the + * default value if no matching item is emitted */ public static <T> T lastOrDefault(Observable<T> source, T defaultValue, Object predicate) { @SuppressWarnings("rawtypes") @@ -200,36 +203,37 @@ public Boolean call(T args) { } /** - * Returns an Iterable that always returns the item most recently emitted by an Observable. + * Returns an {@link Iterable} that always returns the item most recently emitted by an + * {@link Observable}. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.mostRecent.png"> * * @param source - * the source Observable + * the source {@link Observable} * @param <T> - * the type of items emitted by the Observable + * the type of items emitted by the {@link Observable} * @param initialValue - * the initial value that will be yielded by the Iterable sequence if the Observable - * has not yet emitted an item - * @return an Iterable that on each iteration returns the item that the Observable has most - * recently emitted + * the initial value that will be yielded by the {@link Iterable} sequence if the + * {@link Observable} has not yet emitted an item + * @return an {@link Iterable} that on each iteration returns the item that the + * {@link Observable} has most recently emitted */ public static <T> Iterable<T> mostRecent(Observable<T> source, T initialValue) { return OperationMostRecent.mostRecent(source, initialValue); } /** - * Returns an Iterable that blocks until the Observable emits another item, then returns that - * item. + * Returns an {@link Iterable} that blocks until the {@link Observable} emits another item, + * then returns that item. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.next.png"> * * @param items - * the source Observable + * the source {@link Observable} * @param <T> - * the type of items emitted by the Observable - * @return an Iterable that blocks upon each iteration until the Observable emits a new item, - * whereupon the Iterable returns that item + * the type of items emitted by the {@link Observable} + * @return an {@link Iterable} that blocks upon each iteration until the {@link Observable} + * emits a new item, whereupon the Iterable returns that item */ public static <T> Iterable<T> next(Observable<T> items) { return OperationNext.next(items); @@ -255,144 +259,149 @@ private static <T> T _singleOrDefault(BlockingObservable<T> source, boolean hasD } /** - * If the Observable completes after emitting a single item, return that item, otherwise throw - * an exception. + * If the {@link Observable} completes after emitting a single item, return that item, + * otherwise throw an exception. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.single.png"> * * @param source - * the source Observable - * @return the single item emitted by the Observable + * the source {@link Observable} + * @return the single item emitted by the {@link Observable} * @throws IllegalStateException - * if the Observable does not emit exactly one item + * if the {@link Observable} does not emit exactly one item */ public static <T> T single(Observable<T> source) { return from(source).single(); } /** - * If the Observable completes after emitting a single item that matches a given predicate, - * return that item, otherwise throw an exception. + * If the {@link Observable} completes after emitting a single item that matches a given + * predicate, return that item, otherwise throw an exception. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.single.p.png"> * * @param source - * the source Observable + * the source {@link Observable} * @param predicate - * a predicate function to evaluate items emitted by the Observable - * @return the single item emitted by the source Observable that matches the predicate + * a predicate function to evaluate items emitted by the {@link Observable} + * @return the single item emitted by the source {@link Observable} that matches the predicate * @throws IllegalStateException - * if the Observable does not emit exactly one item that matches the predicate + * if the {@link Observable} does not emit exactly one item that matches the + * predicate */ public static <T> T single(Observable<T> source, Func1<T, Boolean> predicate) { return from(source).single(predicate); } /** - * If the Observable completes after emitting a single item that matches a given predicate, - * return that item, otherwise throw an exception. + * If the {@link Observable} completes after emitting a single item that matches a given + * predicate, return that item, otherwise throw an exception. * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.single.p.png"> * * @param source - * the source Observable + * the source {@link Observable} * @param predicate - * a predicate function to evaluate items emitted by the Observable - * @return the single item emitted by the source Observable that matches the predicate + * a predicate function to evaluate items emitted by the {@link Observable} + * @return the single item emitted by the source {@link Observable} that matches the predicate * @throws IllegalStateException - * if the Observable does not emit exactly one item that matches the predicate + * if the {@link Observable} does not emit exactly one item that matches the + * predicate */ public static <T> T single(Observable<T> source, Object predicate) { return from(source).single(predicate); } /** - * If the Observable completes after emitting a single item, return that item, otherwise return - * a default value. + * If the {@link Observable} completes after emitting a single item, return that item, otherwise + * return a default value. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.singleOrDefault.png"> * * @param source - * the source Observable + * the source {@link Observable} * @param defaultValue - * a default value to return if the Observable emits no items - * @return the single item emitted by the source Observable, or a default value if no value is - * emitted + * a default value to return if the {@link Observable} emits no items + * @return the single item emitted by the source {@link Observable}, or a default value if no + * value is emitted */ public static <T> T singleOrDefault(Observable<T> source, T defaultValue) { return from(source).singleOrDefault(defaultValue); } /** - * If the Observable completes after emitting a single item that matches a given predicate, - * return that item, otherwise return a default value. + * If the {@link Observable} completes after emitting a single item that matches a given + * predicate, return that item, otherwise return a default value. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.singleOrDefault.p.png"> * * @param source - * the source Observable + * the source {@link Observable} * @param defaultValue - * a default value to return if the Observable emits no matching items + * a default value to return if the {@link Observable} emits no matching items * @param predicate - * a predicate function to evaluate items emitted by the Observable - * @return the single item emitted by the source Observable that matches the predicate, or a - * default value if no such value is emitted + * a predicate function to evaluate items emitted by the {@link Observable} + * @return the single item emitted by the source {@link Observable} that matches the predicate, + * or a default value if no such value is emitted */ public static <T> T singleOrDefault(Observable<T> source, T defaultValue, Func1<T, Boolean> predicate) { return from(source).singleOrDefault(defaultValue, predicate); } /** - * If the Observable completes after emitting a single item that matches a given predicate, - * return that item, otherwise return a default value. + * If the {@link Observable} completes after emitting a single item that matches a given + * predicate, return that item, otherwise return a default value. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.singleOrDefault.p.png"> * * @param source - * the source Observable + * the source {@link Observable} * @param defaultValue - * a default value to return if the Observable emits no matching items + * a default value to return if the {@link Observable} emits no matching items * @param predicate - * a predicate function to evaluate items emitted by the Observable - * @return the single item emitted by the source Observable that matches the predicate, or a - * default value if no such value is emitted + * a predicate function to evaluate items emitted by the {@link Observable} + * @return the single item emitted by the source {@link Observable} that matches the predicate, + * or a default value if no such value is emitted */ public static <T> T singleOrDefault(Observable<T> source, T defaultValue, Object predicate) { return from(source).singleOrDefault(defaultValue, predicate); } /** - * Returns a Future representing the single value emitted by an Observable. + * Returns a {@link Future} representing the single value emitted by an {@link Observable}. * <p> - * <code>toFuture()</code> throws an exception if the Observable emits more than one item. If - * the Observable may emit more than item, use <code>toList().toFuture()</code>. + * <code>toFuture()</code> throws an exception if the {@link Observable} emits more than one + * item. If the Observable may emit more than item, use + * {@link Observable#toList toList()}.toFuture()</code>. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.toFuture.png"> * * @param source - * the source Observable - * @return a Future that expects a single item to be emitted by the source Observable + * the source {@link Observable} + * @return a Future that expects a single item to be emitted by the source {@link Observable} */ public static <T> Future<T> toFuture(final Observable<T> source) { return OperationToFuture.toFuture(source); } /** - * Converts an Observable into an Iterable. + * Converts an {@link Observable} into an {@link Iterable}. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.toIterable.png"> * * @param source - * the source Observable - * @return an Iterable version of the underlying Observable + * the source {@link Observable} + * @return an {@link Iterable} version of the underlying {@link Observable} */ public static <T> Iterable<T> toIterable(final Observable<T> source) { return from(source).toIterable(); } /** - * Used for protecting against errors being thrown from Observer implementations and ensuring onNext/onError/onCompleted contract compliance. + * Used for protecting against errors being thrown from {@link Observer} implementations and + * ensuring onNext/onError/onCompleted contract compliance. * <p> - * See https://github.com/Netflix/RxJava/issues/216 for discussion on "Guideline 6.4: Protect calls to user code from within an operator" + * See https://github.com/Netflix/RxJava/issues/216 for discussion on "Guideline 6.4: Protect + * calls to user code from within an operator" */ private Subscription protectivelyWrapAndSubscribe(Observer<T> o) { AtomicObservableSubscription subscription = new AtomicObservableSubscription(); @@ -400,7 +409,7 @@ private Subscription protectivelyWrapAndSubscribe(Observer<T> o) { } /** - * Invoke a method on each item emitted by the Observable; block until the Observable + * Invoke a method on each item emitted by the {@link Observable}; block until the Observable * completes. * <p> * NOTE: This will block even if the Observable is asynchronous. @@ -411,7 +420,7 @@ private Subscription protectivelyWrapAndSubscribe(Observer<T> o) { * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.forEach.png"> * * @param onNext - * the {@link Action1} to invoke for every item emitted by the Observable + * the {@link Action1} to invoke for every item emitted by the {@link Observable} * @throws RuntimeException * if an error occurs */ @@ -468,7 +477,7 @@ public void onNext(T args) { } /** - * Invoke a method on each item emitted by the Observable; block until the Observable + * Invoke a method on each item emitted by the {@link Observable}; block until the Observable * completes. * <p> * NOTE: This will block even if the Observable is asynchronous. @@ -479,7 +488,7 @@ public void onNext(T args) { * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.forEach.png"> * * @param o - * the {@link Action1} to invoke for every item emitted by the Observable + * the {@link Action1} to invoke for every item emitted by the {@link Observable} * @throws RuntimeException * if an error occurs */ @@ -507,22 +516,23 @@ public void call(Object args) { } /** - * Returns an Iterator that iterates over all items emitted by a specified Observable. + * Returns an {@link Iterator} that iterates over all items emitted by a specified + * {@link Observable}. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.getIterator.png"> * - * @return an iterator that can iterate over the items emitted by the Observable + * @return an {@link Iterator} that can iterate over the items emitted by the {@link Observable} */ public Iterator<T> getIterator() { return OperationToIterator.toIterator(this); } /** - * Returns the last item emitted by a specified Observable. + * Returns the last item emitted by a specified {@link Observable}. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.last.png"> * - * @return the last item emitted by the source Observable + * @return the last item emitted by the source {@link Observable} */ public T last() { T result = null; @@ -533,26 +543,26 @@ public T last() { } /** - * Returns the last item emitted by a specified Observable that matches a predicate. + * Returns the last item emitted by a specified {@link Observable} that matches a predicate. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.last.p.png"> * * @param predicate - * a predicate function to evaluate items emitted by the Observable - * @return the last item emitted by the Observable that matches the predicate + * a predicate function to evaluate items emitted by the {@link Observable} + * @return the last item emitted by the {@link Observable} that matches the predicate */ public T last(final Func1<T, Boolean> predicate) { return last(this, predicate); } /** - * Returns the last item emitted by a specified Observable that matches a predicate. + * Returns the last item emitted by a specified {@link Observable} that matches a predicate. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.last.p.png"> * * @param predicate - * a predicate function to evaluate items emitted by the Observable - * @return the last item emitted by the Observable that matches the predicate + * a predicate function to evaluate items emitted by the {@link Observable} + * @return the last item emitted by the {@link Observable} that matches the predicate */ public T last(final Object predicate) { @SuppressWarnings("rawtypes") @@ -567,14 +577,15 @@ public Boolean call(T args) { } /** - * Returns the last item emitted by a specified Observable, or a default value if no items are - * emitted. + * Returns the last item emitted by a specified {@link Observable}, or a default value if no + * items are emitted. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.lastOrDefault.png"> * * @param defaultValue - * a default value to return if the Observable emits no items - * @return the last item emitted by the Observable, or the default value if no items are emitted + * a default value to return if the {@link Observable} emits no items + * @return the last item emitted by the {@link Observable}, or the default value if no items + * are emitted */ public T lastOrDefault(T defaultValue) { boolean found = false; @@ -593,102 +604,103 @@ public T lastOrDefault(T defaultValue) { } /** - * Returns the last item emitted by a specified Observable that matches a predicate, or a - * default value if no such items are emitted. + * Returns the last item emitted by a specified {@link Observable} that matches a predicate, or + * a default value if no such items are emitted. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.lastOrDefault.p.png"> * * @param defaultValue - * a default value to return if the Observable emits no matching items + * a default value to return if the {@link Observable} emits no matching items * @param predicate - * a predicate function to evaluate items emitted by the Observable - * @return the last item emitted by the Observable that matches the predicate, or the default - * value if no matching items are emitted + * a predicate function to evaluate items emitted by the {@link Observable} + * @return the last item emitted by the {@link Observable} that matches the predicate, or the + * default value if no matching items are emitted */ public T lastOrDefault(T defaultValue, Func1<T, Boolean> predicate) { return lastOrDefault(this, defaultValue, predicate); } /** - * Returns the last item emitted by a specified Observable that matches a predicate, or a - * default value if no such items are emitted. + * Returns the last item emitted by a specified {@link Observable} that matches a predicate, or + * a default value if no such items are emitted. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.lastOrDefault.p.png"> * * @param defaultValue - * a default value to return if the Observable emits no matching items + * a default value to return if the {@link Observable} emits no matching items * @param predicate - * a predicate function to evaluate items emitted by the Observable - * @return the last item emitted by the Observable that matches the predicate, or the default - * value if no matching items are emitted + * a predicate function to evaluate items emitted by the {@link Observable} + * @return the last item emitted by the {@link Observable} that matches the predicate, or the + * default value if no matching items are emitted */ public T lastOrDefault(T defaultValue, Object predicate) { return lastOrDefault(this, defaultValue, predicate); } /** - * Returns an Iterable that always returns the item most recently emitted by an Observable. + * Returns an {@link Iterable} that always returns the item most recently emitted by an + * {@link Observable}. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.mostRecent.png"> * * @param initialValue - * the initial value that will be yielded by the Iterable sequence if the Observable - * has not yet emitted an item - * @return an Iterable that on each iteration returns the item that the Observable has most - * recently emitted + * the initial value that will be yielded by the {@link Iterable} sequence if the + * {@link Observable} has not yet emitted an item + * @return an {@link Iterable} that on each iteration returns the item that the + * {@link Observable} has most recently emitted */ public Iterable<T> mostRecent(T initialValue) { return mostRecent(this, initialValue); } /** - * Returns an Iterable that blocks until the Observable emits another item, then returns that - * item. + * Returns an {@link Iterable} that blocks until the {@link Observable} emits another item, + * then returns that item. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.next.png"> * - * @return an Iterable that blocks upon each iteration until the Observable emits a new item, - * whereupon the Iterable returns that item + * @return an {@link Iterable} that blocks upon each iteration until the {@link Observable} + * emits a new item, whereupon the Iterable returns that item */ public Iterable<T> next() { return next(this); } /** - * If the Observable completes after emitting a single item, return that item, otherwise throw - * an exception. + * If the {@link Observable} completes after emitting a single item, return that item, + * otherwise throw an exception. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.single.png"> * - * @return the single item emitted by the Observable + * @return the single item emitted by the {@link Observable} */ public T single() { return _singleOrDefault(this, false, null); } /** - * If the Observable completes after emitting a single item that matches a given predicate, - * return that item, otherwise throw an exception. + * If the {@link Observable} completes after emitting a single item that matches a given + * predicate, return that item, otherwise throw an exception. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.single.p.png"> * * @param predicate - * a predicate function to evaluate items emitted by the Observable - * @return the single item emitted by the source Observable that matches the predicate + * a predicate function to evaluate items emitted by the {@link Observable} + * @return the single item emitted by the source {@link Observable} that matches the predicate */ public T single(Func1<T, Boolean> predicate) { return _singleOrDefault(from(this.filter(predicate)), false, null); } /** - * If the Observable completes after emitting a single item that matches a given predicate, - * return that item, otherwise throw an exception. + * If the {@link Observable} completes after emitting a single item that matches a given + * predicate, return that item, otherwise throw an exception. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.single.p.png"> * * @param predicate - * a predicate function to evaluate items emitted by the Observable - * @return the single item emitted by the source Observable that matches the predicate + * a predicate function to evaluate items emitted by the {@link Observable} + * @return the single item emitted by the source {@link Observable} that matches the predicate */ public T single(Object predicate) { @SuppressWarnings("rawtypes") @@ -703,51 +715,51 @@ public Boolean call(T t) { } /** - * If the Observable completes after emitting a single item, return that item; if it emits more - * than one item, throw an exception; if it emits no items, return a default value. + * If the {@link Observable} completes after emitting a single item, return that item; if it + * emits more than one item, throw an exception; if it emits no items, return a default value. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.singleOrDefault.png"> * * @param defaultValue - * a default value to return if the Observable emits no items - * @return the single item emitted by the Observable, or the default value if no items are - * emitted + * a default value to return if the {@link Observable} emits no items + * @return the single item emitted by the {@link Observable}, or the default value if no items + * are emitted */ public T singleOrDefault(T defaultValue) { return _singleOrDefault(this, true, defaultValue); } /** - * If the Observable completes after emitting a single item that matches a predicate, return - * that item; if it emits more than one such item, throw an exception; if it emits no items, - * return a default value. + * If the {@link Observable} completes after emitting a single item that matches a predicate, + * return that item; if it emits more than one such item, throw an exception; if it emits no + * items, return a default value. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.singleOrDefault.p.png"> * * @param defaultValue - * a default value to return if the Observable emits no matching items + * a default value to return if the {@link Observable} emits no matching items * @param predicate - * a predicate function to evaluate items emitted by the Observable - * @return the single item emitted by the Observable that matches the predicate, or the default - * value if no such items are emitted + * a predicate function to evaluate items emitted by the {@link Observable} + * @return the single item emitted by the {@link Observable} that matches the predicate, or the + * default value if no such items are emitted */ public T singleOrDefault(T defaultValue, Func1<T, Boolean> predicate) { return _singleOrDefault(from(this.filter(predicate)), true, defaultValue); } /** - * If the Observable completes after emitting a single item that matches a predicate, return - * that item; if it emits more than one such item, throw an exception; if it emits no items, - * return a default value. + * If the {@link Observable} completes after emitting a single item that matches a predicate, + * return that item; if it emits more than one such item, throw an exception; if it emits no + * items, return a default value. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.singleOrDefault.p.png"> * * @param defaultValue - * a default value to return if the Observable emits no matching items + * a default value to return if the {@link Observable} emits no matching items * @param predicate - * a predicate function to evaluate items emitted by the Observable - * @return the single item emitted by the Observable that matches the predicate, or the default - * value if no such items are emitted + * a predicate function to evaluate items emitted by the {@link Observable} + * @return the single item emitted by the {@link Observable} that matches the predicate, or the + * default value if no such items are emitted */ public T singleOrDefault(T defaultValue, final Object predicate) { @SuppressWarnings("rawtypes") @@ -762,25 +774,27 @@ public Boolean call(T t) { } /** - * Returns a Future representing the single value emitted by an Observable. + * Returns a {@link Future} representing the single value emitted by an {@link Observable}. * <p> * <code>toFuture()</code> throws an exception if the Observable emits more than one item. If - * the Observable may emit more than item, use <code>toList().toFuture()</code>. + * the Observable may emit more than item, use + * {@link Observable#toList toList()}.toFuture()</code>. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.toFuture.png"> * - * @return a Future that expects a single item to be emitted by the source Observable + * @return a {@link Future} that expects a single item to be emitted by the source + * {@link Observable} */ public Future<T> toFuture() { return toFuture(this); } /** - * Converts an Observable into an Iterable. + * Converts an {@link Observable} into an {@link Iterable}. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.toIterable.png"> * - * @return an Iterable version of the underlying Observable + * @return an {@link Iterable} version of the underlying {@link Observable} */ public Iterable<T> toIterable() { return new Iterable<T>() { diff --git a/rxjava-core/src/main/java/rx/observables/ConnectableObservable.java b/rxjava-core/src/main/java/rx/observables/ConnectableObservable.java index 7214d09b25..de0fdba643 100644 --- a/rxjava-core/src/main/java/rx/observables/ConnectableObservable.java +++ b/rxjava-core/src/main/java/rx/observables/ConnectableObservable.java @@ -21,10 +21,10 @@ import rx.util.functions.Func1; /** - * A Connectable Observable resembles an ordinary Observable, except that it does not begin - * emitting items when it is subscribed to, but only when its connect() method is called. In this - * way you can wait for all intended Observers to subscribe to the Observable before the Observable - * begins emitting items. + * A ConnectableObservable resembles an ordinary {@link Observable}, except that it does not begin + * emitting items when it is subscribed to, but only when its {@link #connect} method is called. In + * this way you can wait for all intended {@link Observer}s to {@link Observable#subscribe} to the + * Observable before the Observable begins emitting items. * <p> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/publishConnect.png"> * <p> @@ -42,8 +42,8 @@ protected ConnectableObservable(Func1<Observer<T>, Subscription> onSubscribe) { } /** - * Call a Connectable Observable's connect() method to instruct it to begin emitting the - * items from its underlying Observable to its Observers. + * Call a ConnectableObservable's connect() method to instruct it to begin emitting the + * items from its underlying {@link Observable} to its {@link Observer}s. */ public abstract Subscription connect(); diff --git a/rxjava-core/src/main/java/rx/observables/GroupedObservable.java b/rxjava-core/src/main/java/rx/observables/GroupedObservable.java index 05e2184bba..e2e075c9bd 100644 --- a/rxjava-core/src/main/java/rx/observables/GroupedObservable.java +++ b/rxjava-core/src/main/java/rx/observables/GroupedObservable.java @@ -21,7 +21,8 @@ import rx.util.functions.Func1; /** - * An {@link Observable} that has been grouped by a key whose value can be obtained using {@link #getKey()} <p> + * An {@link Observable} that has been grouped by a key whose value can be obtained using + * {@link #getKey()} <p> * * @see Observable#groupBy(Observable, Func1) *
615fc435cc88c2c4fe66e9359f1d69e5eb134d18
elasticsearch
Http Transport: Allow to configure- `max_header_size`, `max_initial_line_length`, and `max_chunk_size`, closes- -1174.--
a
https://github.com/elastic/elasticsearch
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java b/modules/elasticsearch/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java index 180d513c5a1c1..45f05e7be1ad9 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java @@ -82,13 +82,16 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer private final NetworkService networkService; - private final ByteSizeValue maxContentLength; + final ByteSizeValue maxContentLength; + final ByteSizeValue maxInitialLineLength; + final ByteSizeValue maxHeaderSize; + final ByteSizeValue maxChunkSize; private final int workerCount; private final boolean blockingServer; - private final boolean compression; + final boolean compression; private final int compressionLevel; @@ -114,7 +117,7 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer private volatile Channel serverChannel; - private volatile OpenChannelsHandler serverOpenChannels; + OpenChannelsHandler serverOpenChannels; private volatile HttpServerAdapter httpServerAdapter; @@ -122,6 +125,9 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer super(settings); this.networkService = networkService; ByteSizeValue maxContentLength = componentSettings.getAsBytesSize("max_content_length", settings.getAsBytesSize("http.max_content_length", new ByteSizeValue(100, ByteSizeUnit.MB))); + this.maxChunkSize = componentSettings.getAsBytesSize("max_chunk_size", settings.getAsBytesSize("http.max_chunk_size", new ByteSizeValue(8, ByteSizeUnit.KB))); + this.maxHeaderSize = componentSettings.getAsBytesSize("max_header_size", settings.getAsBytesSize("http.max_header_size", new ByteSizeValue(8, ByteSizeUnit.KB))); + this.maxInitialLineLength = componentSettings.getAsBytesSize("max_initial_line_length", settings.getAsBytesSize("http.max_initial_line_length", new ByteSizeValue(4, ByteSizeUnit.KB))); this.workerCount = componentSettings.getAsInt("worker_count", Runtime.getRuntime().availableProcessors() * 2); this.blockingServer = settings.getAsBoolean("http.blocking_server", settings.getAsBoolean(TCP_BLOCKING_SERVER, settings.getAsBoolean(TCP_BLOCKING, false))); this.port = componentSettings.get("port", settings.get("http.port", "9200-9300")); @@ -142,6 +148,9 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer maxContentLength = new ByteSizeValue(100, ByteSizeUnit.MB); } this.maxContentLength = maxContentLength; + + logger.debug("using max_chunk_size[{}], max_header_size[{}], max_initial_line_length[{}], max_content_length[{}]", + maxChunkSize, maxHeaderSize, maxInitialLineLength, this.maxContentLength); } public void httpServerAdapter(HttpServerAdapter httpServerAdapter) { @@ -163,27 +172,7 @@ public void httpServerAdapter(HttpServerAdapter httpServerAdapter) { workerCount)); } - final HttpRequestHandler requestHandler = new HttpRequestHandler(this); - - ChannelPipelineFactory pipelineFactory = new ChannelPipelineFactory() { - @Override public ChannelPipeline getPipeline() throws Exception { - ChannelPipeline pipeline = Channels.pipeline(); - pipeline.addLast("openChannels", serverOpenChannels); - pipeline.addLast("decoder", new HttpRequestDecoder()); - if (compression) { - pipeline.addLast("decoder_compress", new HttpContentDecompressor()); - } - pipeline.addLast("aggregator", new HttpChunkAggregator((int) maxContentLength.bytes())); - pipeline.addLast("encoder", new HttpResponseEncoder()); - if (compression) { - pipeline.addLast("encoder_compress", new HttpContentCompressor(compressionLevel)); - } - pipeline.addLast("handler", requestHandler); - return pipeline; - } - }; - - serverBootstrap.setPipelineFactory(pipelineFactory); + serverBootstrap.setPipelineFactory(new MyChannelPipelineFactory(this)); if (tcpNoDelay != null) { serverBootstrap.setOption("child.tcpNoDelay", tcpNoDelay); @@ -287,4 +276,36 @@ void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) throws Excepti } } } + + static class MyChannelPipelineFactory implements ChannelPipelineFactory { + + private final NettyHttpServerTransport transport; + + private final HttpRequestHandler requestHandler; + + MyChannelPipelineFactory(NettyHttpServerTransport transport) { + this.transport = transport; + this.requestHandler = new HttpRequestHandler(transport); + } + + @Override public ChannelPipeline getPipeline() throws Exception { + ChannelPipeline pipeline = Channels.pipeline(); + pipeline.addLast("openChannels", transport.serverOpenChannels); + pipeline.addLast("decoder", new HttpRequestDecoder( + (int) transport.maxInitialLineLength.bytes(), + (int) transport.maxHeaderSize.bytes(), + (int) transport.maxChunkSize.bytes() + )); + if (transport.compression) { + pipeline.addLast("decoder_compress", new HttpContentDecompressor()); + } + pipeline.addLast("aggregator", new HttpChunkAggregator((int) transport.maxContentLength.bytes())); + pipeline.addLast("encoder", new HttpResponseEncoder()); + if (transport.compression) { + pipeline.addLast("encoder_compress", new HttpContentCompressor(transport.compressionLevel)); + } + pipeline.addLast("handler", requestHandler); + return pipeline; + } + } }
2ec7834124dfa32d7b90afbb23805433a4567bf5
spring-framework
Resolve nested placeholders via PropertyResolver--Prior to this change, PropertySourcesPropertyResolver (and therefore-all AbstractEnvironment) implementations failed to resolve nested-placeholders as in the following example:-- p1=v1- p2=v2- p3=${v1}:{$v2}--Calls to PropertySource-getProperty for keys 'p1' and 'v1' would-successfully return their respective values, but for 'p3' the return-value would be the unresolved placeholders. This behavior is-inconsistent with that of PropertyPlaceholderConfigurer.--PropertySourcesPropertyResolver -getProperty variants now resolve any-nested placeholders recursively, throwing IllegalArgumentException for-any unresolvable placeholders (as is the default behavior for-PropertyPlaceholderConfigurer). See SPR-9569 for an enhancement that-will intoduce an 'ignoreUnresolvablePlaceholders' switch to make this-behavior configurable.--This commit also improves error output in-PropertyPlaceholderHelper-parseStringValue by including the original-string in which an unresolvable placeholder was found.--Issue: SPR-9473, SPR-9569-
c
https://github.com/spring-projects/spring-framework
diff --git a/spring-core/src/main/java/org/springframework/core/env/PropertySourcesPropertyResolver.java b/spring-core/src/main/java/org/springframework/core/env/PropertySourcesPropertyResolver.java index b390d4ddaba0..cee4db51a6aa 100644 --- a/spring-core/src/main/java/org/springframework/core/env/PropertySourcesPropertyResolver.java +++ b/spring-core/src/main/java/org/springframework/core/env/PropertySourcesPropertyResolver.java @@ -72,6 +72,9 @@ public <T> T getProperty(String key, Class<T> targetValueType) { Object value; if ((value = propertySource.getProperty(key)) != null) { Class<?> valueType = value.getClass(); + if (String.class.equals(valueType)) { + value = this.resolveRequiredPlaceholders((String) value); + } if (debugEnabled) { logger.debug( format("Found key '%s' in [%s] with type [%s] and value '%s'", diff --git a/spring-core/src/main/java/org/springframework/util/PropertyPlaceholderHelper.java b/spring-core/src/main/java/org/springframework/util/PropertyPlaceholderHelper.java index 5c769fae5ced..0bea53ea6a49 100644 --- a/spring-core/src/main/java/org/springframework/util/PropertyPlaceholderHelper.java +++ b/spring-core/src/main/java/org/springframework/util/PropertyPlaceholderHelper.java @@ -171,7 +171,8 @@ else if (this.ignoreUnresolvablePlaceholders) { startIndex = buf.indexOf(this.placeholderPrefix, endIndex + this.placeholderSuffix.length()); } else { - throw new IllegalArgumentException("Could not resolve placeholder '" + placeholder + "'"); + throw new IllegalArgumentException("Could not resolve placeholder '" + + placeholder + "'" + " in string value [" + strVal + "]"); } visitedPlaceholders.remove(originalPlaceholder); diff --git a/spring-core/src/test/java/org/springframework/core/env/PropertySourcesPropertyResolverTests.java b/spring-core/src/test/java/org/springframework/core/env/PropertySourcesPropertyResolverTests.java index 4d02d12ef452..c51fd358c827 100644 --- a/spring-core/src/test/java/org/springframework/core/env/PropertySourcesPropertyResolverTests.java +++ b/spring-core/src/test/java/org/springframework/core/env/PropertySourcesPropertyResolverTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2002-2011 the original author or authors. + * Copyright 2002-2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,22 +16,20 @@ package org.springframework.core.env; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.nullValue; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - import java.util.HashMap; import java.util.Map; import java.util.Properties; +import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Test; + import org.springframework.core.convert.ConversionException; import org.springframework.mock.env.MockPropertySource; +import static org.hamcrest.CoreMatchers.*; +import static org.junit.Assert.*; + /** * Unit tests for {@link PropertySourcesPropertyResolver}. * @@ -352,6 +350,39 @@ public void setRequiredProperties_andValidateRequiredProperties() { propertyResolver.validateRequiredProperties(); } + @Test + public void resolveNestedPropertyPlaceholders() { + MutablePropertySources ps = new MutablePropertySources(); + ps.addFirst(new MockPropertySource() + .withProperty("p1", "v1") + .withProperty("p2", "v2") + .withProperty("p3", "${p1}:${p2}") // nested placeholders + .withProperty("p4", "${p3}") // deeply nested placeholders + .withProperty("p5", "${p1}:${p2}:${bogus}") // unresolvable placeholder + .withProperty("p6", "${p1}:${p2}:${bogus:def}") // unresolvable w/ default + .withProperty("pL", "${pR}") // cyclic reference left + .withProperty("pR", "${pL}") // cyclic reference right + ); + PropertySourcesPropertyResolver pr = new PropertySourcesPropertyResolver(ps); + assertThat(pr.getProperty("p1"), equalTo("v1")); + assertThat(pr.getProperty("p2"), equalTo("v2")); + assertThat(pr.getProperty("p3"), equalTo("v1:v2")); + assertThat(pr.getProperty("p4"), equalTo("v1:v2")); + try { + pr.getProperty("p5"); + } catch (IllegalArgumentException ex) { + assertThat(ex.getMessage(), Matchers.containsString( + "Could not resolve placeholder 'bogus' in string value [${p1}:${p2}:${bogus}]")); + } + assertThat(pr.getProperty("p6"), equalTo("v1:v2:def")); + try { + pr.getProperty("pL"); + } catch (StackOverflowError ex) { + // no explicit handling for cyclic references for now + } + } + + static interface SomeType { } static class SpecificType implements SomeType { } }
e32f16b78909e9567d3410edd5cb760ac2a70bfd
elasticsearch
Fix file handle leak in readBlob method of- AbstractFsBlobContainer--
c
https://github.com/elastic/elasticsearch
diff --git a/src/main/java/org/elasticsearch/common/blobstore/fs/AbstractFsBlobContainer.java b/src/main/java/org/elasticsearch/common/blobstore/fs/AbstractFsBlobContainer.java index 0565220424839..ebf9eb6a83d4c 100644 --- a/src/main/java/org/elasticsearch/common/blobstore/fs/AbstractFsBlobContainer.java +++ b/src/main/java/org/elasticsearch/common/blobstore/fs/AbstractFsBlobContainer.java @@ -84,21 +84,17 @@ public void run() { FileInputStream is = null; try { is = new FileInputStream(new File(path, blobName)); - } catch (FileNotFoundException e) { - IOUtils.closeWhileHandlingException(is); - listener.onFailure(e); - return; - } - try { int bytesRead; while ((bytesRead = is.read(buffer)) != -1) { listener.onPartial(buffer, 0, bytesRead); } - listener.onCompleted(); - } catch (Exception e) { + } catch (Throwable e) { IOUtils.closeWhileHandlingException(is); listener.onFailure(e); + return; } + IOUtils.closeWhileHandlingException(is); + listener.onCompleted(); } }); }
41a8d9ab2bd15c19edff0f374179fba4db5405a7
hbase
HBASE-2787 PE is confused about flushCommits--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@957750 13f79535-47bb-0310-9956-ffa450edef68-
c
https://github.com/apache/hbase
diff --git a/CHANGES.txt b/CHANGES.txt index 3be1383ed323..53fd598b7541 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -422,6 +422,7 @@ Release 0.21.0 - Unreleased HBASE-2774 Spin in ReadWriteConsistencyControl eating CPU (load > 40) and no progress running YCSB on clean cluster startup HBASE-2785 TestScannerTimeout.test2772 is flaky + HBASE-2787 PE is confused about flushCommits IMPROVEMENTS HBASE-1760 Cleanup TODOs in HTable diff --git a/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java b/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java index 65aafc07a0df..3b756878409c 100644 --- a/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java +++ b/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java @@ -118,7 +118,7 @@ public class PerformanceEvaluation { private boolean nomapred = false; private int N = 1; private int R = ROWS_PER_GB; - private boolean flushCommits = false; + private boolean flushCommits = true; private boolean writeToWAL = true; private static final Path PERF_EVAL_DIR = new Path("performance_evaluation"); @@ -1250,7 +1250,7 @@ public int doCommandLine(final String[] args) { final String writeToWAL = "--writeToWAL="; if (cmd.startsWith(writeToWAL)) { - this.flushCommits = Boolean.parseBoolean(cmd.substring(writeToWAL.length())); + this.writeToWAL = Boolean.parseBoolean(cmd.substring(writeToWAL.length())); continue; }
ac53634e318a28950845d0e2ae429e89ab1e9fd1
restlet-framework-java
JAX-RS extension - Issue 800 (an NPE): I've checked- all methods with the name, and all possible points for NPE should be defanged- now--
c
https://github.com/restlet/restlet-framework-java
diff --git a/modules/org.restlet.ext.jaxrs/src/org/restlet/ext/jaxrs/internal/util/Util.java b/modules/org.restlet.ext.jaxrs/src/org/restlet/ext/jaxrs/internal/util/Util.java index 6accbacefd..e8e9a42937 100644 --- a/modules/org.restlet.ext.jaxrs/src/org/restlet/ext/jaxrs/internal/util/Util.java +++ b/modules/org.restlet.ext.jaxrs/src/org/restlet/ext/jaxrs/internal/util/Util.java @@ -77,8 +77,8 @@ import org.restlet.data.Request; import org.restlet.data.Response; import org.restlet.engine.http.ContentType; -import org.restlet.engine.http.HttpClientCall; import org.restlet.engine.http.HttpClientAdapter; +import org.restlet.engine.http.HttpClientCall; import org.restlet.engine.http.HttpServerAdapter; import org.restlet.engine.http.HttpUtils; import org.restlet.engine.util.DateUtils; @@ -91,7 +91,6 @@ import org.restlet.ext.jaxrs.internal.exceptions.JaxRsRuntimeException; import org.restlet.ext.jaxrs.internal.exceptions.MethodInvokeException; import org.restlet.ext.jaxrs.internal.exceptions.MissingAnnotationException; -import org.restlet.ext.jaxrs.internal.provider.JaxbElementProvider; import org.restlet.representation.EmptyRepresentation; import org.restlet.representation.Representation; import org.restlet.util.Series; @@ -306,8 +305,8 @@ public static void copyResponseHeaders( restletResponse.setEntity(new EmptyRepresentation()); } - HttpClientAdapter.copyResponseTransportHeaders(headers, - restletResponse); + HttpClientAdapter + .copyResponseTransportHeaders(headers, restletResponse); HttpClientCall.copyResponseEntityHeaders(headers, restletResponse .getEntity()); } @@ -325,8 +324,8 @@ public static void copyResponseHeaders( public static Series<Parameter> copyResponseHeaders(Response restletResponse) { final Series<Parameter> headers = new Form(); HttpServerAdapter.addResponseHeaders(restletResponse, headers); - HttpServerAdapter.addEntityHeaders(restletResponse.getEntity(), - headers); + HttpServerAdapter + .addEntityHeaders(restletResponse.getEntity(), headers); return headers; } @@ -752,24 +751,29 @@ public static <K, V> V getFirstValue(Map<K, V> map) */ public static Class<?> getGenericClass(Class<?> clazz, Class<?> implInterface) { + if (clazz == null) + throw new IllegalArgumentException("The class must not be null"); + if (implInterface == null) + throw new IllegalArgumentException( + "The interface to b eimplemented must not be null"); return getGenericClass(clazz, implInterface, null); } private static Class<?> getGenericClass(Class<?> clazz, Class<?> implInterface, Type[] gsatp) { - if (clazz.equals(JaxbElementProvider.class)) { - clazz.toString(); - } else if (clazz.equals(MultivaluedMap.class)) { - clazz.toString(); - } for (Type ifGenericType : clazz.getGenericInterfaces()) { if (!(ifGenericType instanceof ParameterizedType)) { continue; } final ParameterizedType pt = (ParameterizedType) ifGenericType; - if (!pt.getRawType().equals(implInterface)) + Type ptRawType = pt.getRawType(); + if (ptRawType == null) + continue; + if (!ptRawType.equals(implInterface)) continue; final Type[] atps = pt.getActualTypeArguments(); + if (atps == null || atps.length == 0) + continue; final Type atp = atps[0]; if (atp instanceof Class) { return (Class<?>) atp; @@ -783,13 +787,18 @@ private static Class<?> getGenericClass(Class<?> clazz, if (atp instanceof TypeVariable<?>) { TypeVariable<?> tv = (TypeVariable<?>) atp; String name = tv.getName(); + if (name == null) + continue; // clazz = AbstractProvider // implInterface = MessageBodyReader // name = "T" // pt = MessageBodyReader<T> for (int i = 0; i < atps.length; i++) { TypeVariable<?> tv2 = (TypeVariable<?>) atps[i]; - if (tv2.getName().equals(name)) { + String tv2Name = tv2.getName(); + if (tv2Name == null) + continue; + if (tv2Name.equals(name)) { Type gsatpn = gsatp[i]; if (gsatpn instanceof Class) { return (Class<?>) gsatpn; @@ -836,7 +845,7 @@ private static Class<?> getGenericClass(Class<?> clazz, } /** - * Example: in List&lt;String&lt; -&gt; out: String.class + * Example: in List&lt;String&gt; -&gt; out: String.class * * @param genericType * @return otherwise null @@ -846,7 +855,10 @@ public static Class<?> getGenericClass(Type genericType) { return null; } final ParameterizedType pt = (ParameterizedType) genericType; - final Type atp = pt.getActualTypeArguments()[0]; + Type[] actualTypeArguments = pt.getActualTypeArguments(); + if(actualTypeArguments == null || actualTypeArguments.length == 0) + return null; + final Type atp = actualTypeArguments[0]; if (atp instanceof Class) { return (Class<?>) atp; }
5d4ad11296c8ed4eb447509c110ca206d6c94218
camel
CAMEL-845: added firedtime property.--git-svn-id: https://svn.apache.org/repos/asf/activemq/camel/trunk@688957 13f79535-47bb-0310-9956-ffa450edef68-
a
https://github.com/apache/camel
diff --git a/camel-core/src/main/java/org/apache/camel/component/timer/TimerConsumer.java b/camel-core/src/main/java/org/apache/camel/component/timer/TimerConsumer.java index 94fad2c5e0f2a..6b25e5dae839c 100644 --- a/camel-core/src/main/java/org/apache/camel/component/timer/TimerConsumer.java +++ b/camel-core/src/main/java/org/apache/camel/component/timer/TimerConsumer.java @@ -88,7 +88,12 @@ protected void sendTimerExchange() { exchange.setProperty("org.apache.camel.timer.name", endpoint.getTimerName()); exchange.setProperty("org.apache.camel.timer.time", endpoint.getTime()); exchange.setProperty("org.apache.camel.timer.period", endpoint.getPeriod()); - exchange.setProperty("org.apache.camel.timer.firedtime", new Date()); + + Date now = new Date(); + exchange.setProperty("org.apache.camel.timer.firedTime", now); + // also set now on in header with same key as quaartz to be consistent + exchange.getIn().setHeader("firedTime", now); + try { getProcessor().process(exchange); } catch (Exception e) { diff --git a/camel-core/src/test/java/org/apache/camel/component/timer/TimerFiredTimeTest.java b/camel-core/src/test/java/org/apache/camel/component/timer/TimerFiredTimeTest.java index 43abcadaaec91..96badc297e140 100644 --- a/camel-core/src/test/java/org/apache/camel/component/timer/TimerFiredTimeTest.java +++ b/camel-core/src/test/java/org/apache/camel/component/timer/TimerFiredTimeTest.java @@ -34,7 +34,8 @@ public void testFired() throws Exception { Exchange exchange = mock.getExchanges().get(0); assertEquals("hello", exchange.getProperty("org.apache.camel.timer.name")); - assertNotNull(exchange.getProperty("org.apache.camel.timer.firedtime")); + assertNotNull(exchange.getProperty("org.apache.camel.timer.firedTime")); + assertNotNull(exchange.getIn().getHeader("firedTime")); } @Override
af522d6852fef03bfd02233f2066156db4459b90
hbase
HBASE-11248-KeyOnlyKeyValue-toString() passes- wrong offset to keyToString() (Ram)--
c
https://github.com/apache/hbase
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index 4cca2d46a275..0fdd9e46c18f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -2741,8 +2741,7 @@ public String toString() { if (this.b == null || this.b.length == 0) { return "empty"; } - return keyToString(this.b, this.offset + ROW_OFFSET, getKeyLength()) + "/vlen=" - + getValueLength() + "/mvcc=" + 0; + return keyToString(this.b, this.offset, getKeyLength()) + "/vlen=0/mvcc=0"; } @Override
5813377777c7b1e56ff0a0c67d61a4bfd63f2cbe
restlet-framework-java
- Fixed potential NPE in ReferenceList--
c
https://github.com/restlet/restlet-framework-java
diff --git a/module/org.restlet.example/src/org/restlet/example/book/rest/ch7/handler/BookmarkHandler.java b/module/org.restlet.example/src/org/restlet/example/book/rest/ch7/handler/BookmarkHandler.java index 8a9de741db..64e592fb15 100644 --- a/module/org.restlet.example/src/org/restlet/example/book/rest/ch7/handler/BookmarkHandler.java +++ b/module/org.restlet.example/src/org/restlet/example/book/rest/ch7/handler/BookmarkHandler.java @@ -19,6 +19,7 @@ package org.restlet.example.book.rest.ch7.handler; import org.restlet.Handler; +import org.restlet.data.ChallengeResponse; import org.restlet.data.Request; import org.restlet.data.Response; import org.restlet.example.book.rest.ch7.resource.BookmarkResource; @@ -34,8 +35,9 @@ public class BookmarkHandler extends Handler { @Override public Resource findTarget(final Request request, Response response) { String userName = (String) request.getAttributes().get("username"); - String login = request.getChallengeResponse().getIdentifier(); - String password = request.getChallengeResponse().getSecret(); + ChallengeResponse cr = request.getChallengeResponse(); + String login = (cr != null) ? cr.getIdentifier() : null; + String password = (cr != null) ? cr.getSecret() : null; String uri = (String) request.getAttributes().get("URI"); return new BookmarkResource(userName, login, password, uri); } diff --git a/module/org.restlet.example/src/org/restlet/example/book/rest/ch7/handler/BookmarksHandler.java b/module/org.restlet.example/src/org/restlet/example/book/rest/ch7/handler/BookmarksHandler.java index b35b229324..9cb3f210a6 100644 --- a/module/org.restlet.example/src/org/restlet/example/book/rest/ch7/handler/BookmarksHandler.java +++ b/module/org.restlet.example/src/org/restlet/example/book/rest/ch7/handler/BookmarksHandler.java @@ -19,6 +19,7 @@ package org.restlet.example.book.rest.ch7.handler; import org.restlet.Handler; +import org.restlet.data.ChallengeResponse; import org.restlet.data.Request; import org.restlet.data.Response; import org.restlet.example.book.rest.ch7.resource.BookmarksResource; @@ -34,8 +35,9 @@ public class BookmarksHandler extends Handler { @Override public Resource findTarget(final Request request, Response response) { String userName = (String) request.getAttributes().get("username"); - String login = request.getChallengeResponse().getIdentifier(); - String password = request.getChallengeResponse().getSecret(); + ChallengeResponse cr = request.getChallengeResponse(); + String login = (cr != null) ? cr.getIdentifier() : null; + String password = (cr != null) ? cr.getSecret() : null; return new BookmarksResource(userName, login, password); } } diff --git a/module/org.restlet/src/org/restlet/data/ReferenceList.java b/module/org.restlet/src/org/restlet/data/ReferenceList.java index b2ea964788..95cedafcb4 100644 --- a/module/org.restlet/src/org/restlet/data/ReferenceList.java +++ b/module/org.restlet/src/org/restlet/data/ReferenceList.java @@ -146,13 +146,17 @@ public Representation getWebRepresentation() { // Create a simple HTML list StringBuilder sb = new StringBuilder(); sb.append("<html><body>\n"); - sb - .append("<h1>Listing of \"" + getIdentifier().getPath() - + "\"</h1>\n"); - Reference parentRef = getIdentifier().getParentRef(); - if (!parentRef.equals(getIdentifier())) { - sb.append("<a href=\"" + parentRef + "\">..</a><br/>\n"); + if (getIdentifier() != null) { + sb.append("<h2>Listing of \"" + getIdentifier().getPath() + + "\"</h2>\n"); + Reference parentRef = getIdentifier().getParentRef(); + + if (!parentRef.equals(getIdentifier())) { + sb.append("<a href=\"" + parentRef + "\">..</a><br/>\n"); + } + } else { + sb.append("<h2>List of references</h2>\n"); } for (Reference ref : this) {
d43b3d1f76bc13cd4c2bf8cb4f951f751aea86e6
ReactiveX-RxJava
Fix autoConnect calling onStart twice.--
c
https://github.com/ReactiveX/RxJava
diff --git a/src/main/java/rx/internal/operators/OnSubscribeAutoConnect.java b/src/main/java/rx/internal/operators/OnSubscribeAutoConnect.java index c664717332..75ea9c82cf 100644 --- a/src/main/java/rx/internal/operators/OnSubscribeAutoConnect.java +++ b/src/main/java/rx/internal/operators/OnSubscribeAutoConnect.java @@ -18,9 +18,11 @@ import java.util.concurrent.atomic.AtomicInteger; import rx.Observable.OnSubscribe; -import rx.*; +import rx.Subscriber; +import rx.Subscription; import rx.functions.Action1; import rx.observables.ConnectableObservable; +import rx.observers.Subscribers; /** * Wraps a ConnectableObservable and calls its connect() method once @@ -47,7 +49,7 @@ public OnSubscribeAutoConnect(ConnectableObservable<? extends T> source, } @Override public void call(Subscriber<? super T> child) { - source.unsafeSubscribe(child); + source.unsafeSubscribe(Subscribers.wrap(child)); if (clients.incrementAndGet() == numberOfSubscribers) { source.connect(connection); }
a0daab1afdab6266b59424efa079931623e2d5bf
orientdb
Fixed bugs on index where internal records were- not deleted--
c
https://github.com/orientechnologies/orientdb
diff --git a/commons/src/main/java/com/orientechnologies/common/collection/AbstractEntryIterator.java b/commons/src/main/java/com/orientechnologies/common/collection/AbstractEntryIterator.java index 53c02178eb8..4ec332fb673 100644 --- a/commons/src/main/java/com/orientechnologies/common/collection/AbstractEntryIterator.java +++ b/commons/src/main/java/com/orientechnologies/common/collection/AbstractEntryIterator.java @@ -58,8 +58,8 @@ final OMVRBTreeEntry<K, V> nextEntry() { next = OMVRBTree.successor(next); tree.pageIndex = 0; - lastReturned = next; } + lastReturned = next; return next; } @@ -78,9 +78,9 @@ final OMVRBTreeEntry<K, V> prevEntry() { next = OMVRBTree.predecessor(e); tree.pageIndex = next.getSize() - 1; - lastReturned = e; } + lastReturned = e; return e; } @@ -92,7 +92,7 @@ public void remove() { // deleted entries are replaced by their successors if (lastReturned.getLeft() != null && lastReturned.getRight() != null) next = lastReturned; - tree.deleteEntry(lastReturned); + next = tree.deleteEntry(lastReturned); expectedModCount = tree.modCount; lastReturned = null; } diff --git a/commons/src/main/java/com/orientechnologies/common/collection/OMVRBTree.java b/commons/src/main/java/com/orientechnologies/common/collection/OMVRBTree.java index 8d3cb611280..4561da2d673 100644 --- a/commons/src/main/java/com/orientechnologies/common/collection/OMVRBTree.java +++ b/commons/src/main/java/com/orientechnologies/common/collection/OMVRBTree.java @@ -2377,8 +2377,9 @@ private void insert_case5(final OMVRBTreeEntry<K, V> n) { * * @param p * node to delete + * @return */ - void deleteEntry(OMVRBTreeEntry<K, V> p) { + OMVRBTreeEntry<K, V> deleteEntry(OMVRBTreeEntry<K, V> p) { setSize(size() - 1); if (listener != null) @@ -2389,11 +2390,15 @@ void deleteEntry(OMVRBTreeEntry<K, V> p) { p.remove(); if (p.getSize() > 0) - return; + return p; } + final OMVRBTreeEntry<K, V> next = successor(p); // DELETE THE ENTIRE NODE, RE-BUILDING THE STRUCTURE removeNode(p); + + // RETURN NEXT NODE + return next; } /** diff --git a/core/src/main/java/com/orientechnologies/orient/core/serialization/OMemoryStream.java b/core/src/main/java/com/orientechnologies/orient/core/serialization/OMemoryStream.java index d741d60d6eb..4703da14f06 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/serialization/OMemoryStream.java +++ b/core/src/main/java/com/orientechnologies/orient/core/serialization/OMemoryStream.java @@ -29,11 +29,12 @@ * */ public class OMemoryStream extends OutputStream { + public static final int DEF_SIZE = 1024; + private byte[] buffer; private int position; private static final int NATIVE_COPY_THRESHOLD = 9; - private static final int DEF_SIZE = 1024; // private int fixedSize = 0; @@ -74,6 +75,7 @@ public void copyFrom(final OMemoryStream iSource, final int iSize) { if (iSize < 0) return; + assureSpaceFor(position + iSize); System.arraycopy(iSource.buffer, iSource.position, buffer, position, iSize); } @@ -245,7 +247,7 @@ private void assureSpaceFor(final int iLength) { final int bufferLength = localBuffer.length; - if (bufferLength <= capacity) { + if (bufferLength < capacity) { OProfiler.getInstance().updateCounter("OMemOutStream.resize", +1); final byte[] newbuf = new byte[Math.max(bufferLength << 1, capacity)]; diff --git a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerCSVAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerCSVAbstract.java index b1a8329e416..88f28193781 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerCSVAbstract.java +++ b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerCSVAbstract.java @@ -700,7 +700,7 @@ private static OIdentifiable linkToStream(final StringBuilder buffer, final ORec // JUST THE REFERENCE rid = (ORID) iLinked; - if (rid.isNew()) { + if (rid.isValid() && rid.isNew()) { // SAVE AT THE FLY AND STORE THE NEW RID final ORecord<?> record = rid.getRecord(); diff --git a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeEntryPersistent.java b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeEntryPersistent.java index cc2dc8d125a..4c208d0e52c 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeEntryPersistent.java +++ b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeEntryPersistent.java @@ -139,7 +139,7 @@ public OMVRBTreeEntryPersistent(final OMVRBTreeEntry<K, V> iParent, final int iP markDirty(); } - public OMVRBTreeEntryDataProvider<K, V> getDataEntry() { + public OMVRBTreeEntryDataProvider<K, V> getProvider() { return dataProvider; } @@ -240,20 +240,22 @@ protected void updateRefsAfterCreation() { * @throws IOException */ public OMVRBTreeEntryPersistent<K, V> delete() throws IOException { - pTree.removeNodeFromMemory(this); - pTree.removeEntry(dataProvider.getIdentity()); + if (dataProvider != null) { + pTree.removeNodeFromMemory(this); + pTree.removeEntry(dataProvider.getIdentity()); - // EARLY LOAD LEFT AND DELETE IT RECURSIVELY - if (getLeft() != null) - ((OMVRBTreeEntryPersistent<K, V>) getLeft()).delete(); + // EARLY LOAD LEFT AND DELETE IT RECURSIVELY + if (getLeft() != null) + ((OMVRBTreeEntryPersistent<K, V>) getLeft()).delete(); - // EARLY LOAD RIGHT AND DELETE IT RECURSIVELY - if (getRight() != null) - ((OMVRBTreeEntryPersistent<K, V>) getRight()).delete(); + // EARLY LOAD RIGHT AND DELETE IT RECURSIVELY + if (getRight() != null) + ((OMVRBTreeEntryPersistent<K, V>) getRight()).delete(); - // DELETE MYSELF - dataProvider.delete(); - clear(); + // DELETE MYSELF + dataProvider.delete(); + clear(); + } return this; } @@ -535,7 +537,7 @@ protected void remove() { if (dataProvider.removeAt(index)) markDirty(); - tree.setPageIndex(0); + tree.setPageIndex(index - 1); if (index == 0) pTree.updateEntryPoint(oldKey, this); diff --git a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreePersistent.java b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreePersistent.java index 7c125093dff..2f49542599c 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreePersistent.java +++ b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreePersistent.java @@ -31,6 +31,7 @@ import com.orientechnologies.common.log.OLogManager; import com.orientechnologies.common.profiler.OProfiler; import com.orientechnologies.orient.core.config.OGlobalConfiguration; +import com.orientechnologies.orient.core.exception.ORecordNotFoundException; import com.orientechnologies.orient.core.exception.OStorageException; import com.orientechnologies.orient.core.id.ORID; import com.orientechnologies.orient.core.memory.OLowMemoryException; @@ -114,7 +115,7 @@ public OMVRBTreePersistent<K, V> save() throws IOException { protected void saveTreeNode() throws IOException { if (root != null) { OMVRBTreeEntryPersistent<K, V> pRoot = (OMVRBTreeEntryPersistent<K, V>) root; - if (pRoot.getDataEntry().getIdentity().isNew()) { + if (pRoot.getProvider().getIdentity().isNew()) { // FIRST TIME: SAVE IT pRoot.save(); } @@ -189,7 +190,10 @@ public void clear() { try { recordsToCommit.clear(); if (root != null) { - ((OMVRBTreeEntryPersistent<K, V>) root).delete(); + try { + ((OMVRBTreeEntryPersistent<K, V>) root).delete(); + } catch (ORecordNotFoundException e) { + } super.clear(); markDirty(); save(); @@ -835,6 +839,7 @@ protected void rotateRight(final OMVRBTreeEntry<K, V> p) { @Override protected void removeNode(final OMVRBTreeEntry<K, V> p) { removeNodeFromMemory((OMVRBTreeEntryPersistent<K, V>) p); + ((OMVRBTreeEntryPersistent<K, V>) p).getProvider().delete(); super.removeNode(p); } diff --git a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeRID.java b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeRID.java index a3c4eb7bf6a..1998c809c54 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeRID.java +++ b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeRID.java @@ -42,6 +42,7 @@ public OMVRBTreeRID() { public OMVRBTreeRID(final ORID iRID) { this(new OMVRBTreeRIDProvider(null, iRID.getClusterId(), iRID)); + load(); } public OMVRBTreeRID(final String iClusterName) { diff --git a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeRIDSet.java b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeRIDSet.java index ef81bc20de0..9e9689948af 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeRIDSet.java +++ b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeRIDSet.java @@ -137,6 +137,7 @@ public void save() throws IOException { } public ODocument toDocument() { + tree.lazySave(); return ((OMVRBTreeRIDProvider) tree.getProvider()).toDocument(); } diff --git a/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeEntryDataProviderAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeEntryDataProviderAbstract.java index f92ed3c02d8..d02516b795e 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeEntryDataProviderAbstract.java +++ b/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeEntryDataProviderAbstract.java @@ -39,11 +39,12 @@ public abstract class OMVRBTreeEntryDataProviderAbstract<K, V> implements OMVRBT protected ORecordId rightRid; protected boolean color = OMVRBTree.RED; protected ORecordBytesLazy record; - protected OMemoryStream stream = new OMemoryStream(); + protected OMemoryStream stream; - public OMVRBTreeEntryDataProviderAbstract(final OMVRBTreeProviderAbstract<K, V> iTreeDataProvider) { + public OMVRBTreeEntryDataProviderAbstract(final OMVRBTreeProviderAbstract<K, V> iTreeDataProvider, final int iFixedSize) { this(iTreeDataProvider, null); pageSize = treeDataProvider.getDefaultPageSize(); + stream = new OMemoryStream(iFixedSize); } public OMVRBTreeEntryDataProviderAbstract(final OMVRBTreeProviderAbstract<K, V> iTreeDataProvider, final ORID iRID) { diff --git a/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeMapEntryProvider.java b/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeMapEntryProvider.java index 7b075da0b07..37f1e976772 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeMapEntryProvider.java +++ b/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeMapEntryProvider.java @@ -36,7 +36,7 @@ public class OMVRBTreeMapEntryProvider<K, V> extends OMVRBTreeEntryDataProviderA @SuppressWarnings("unchecked") public OMVRBTreeMapEntryProvider(final OMVRBTreeMapProvider<K, V> iTreeDataProvider) { - super(iTreeDataProvider); + super(iTreeDataProvider, OMemoryStream.DEF_SIZE); keys = (K[]) new Object[pageSize]; values = (V[]) new Object[pageSize]; serializedKeys = new int[pageSize]; @@ -199,7 +199,10 @@ public void clear() { public OSerializableStream fromStream(final byte[] iStream) throws OSerializationException { final long timer = OProfiler.getInstance().startChrono(); - stream.setSource(iStream); + if (stream == null) + stream = new OMemoryStream(iStream); + else + stream.setSource(iStream); try { pageSize = stream.getAsInteger(); diff --git a/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeProvider.java b/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeProvider.java index 1b90c50db65..0ae061e25d2 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeProvider.java +++ b/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeProvider.java @@ -35,10 +35,10 @@ public interface OMVRBTreeProvider<K, V> { public ORID getRoot(); - public boolean setSize(int iSize); - public boolean setRoot(ORID iRid); + public boolean setSize(int iSize); + /** Give a chance to update config parameters (defaultSizePage, ...) */ public boolean updateConfig(); diff --git a/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeRIDEntryProvider.java b/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeRIDEntryProvider.java index 87da12afb56..6fc516f6af8 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeRIDEntryProvider.java +++ b/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeRIDEntryProvider.java @@ -53,7 +53,7 @@ public class OMVRBTreeRIDEntryProvider extends OMVRBTreeEntryDataProviderAbstrac protected final static int OFFSET_RIDLIST = OFFSET_RIGHT + ORecordId.PERSISTENT_SIZE; public OMVRBTreeRIDEntryProvider(final OMVRBTreeRIDProvider iTreeDataProvider) { - super(iTreeDataProvider); + super(iTreeDataProvider, OFFSET_RIDLIST + (iTreeDataProvider.getDefaultPageSize() * ORecordId.PERSISTENT_SIZE)); } public OMVRBTreeRIDEntryProvider(final OMVRBTreeRIDProvider iTreeDataProvider, final ORID iRID) { @@ -101,16 +101,14 @@ public boolean removeAt(final int iIndex) { return setDirty(); } - public boolean copyDataFrom(final OMVRBTreeEntryDataProvider<OIdentifiable, OIdentifiable> iFrom, int iStartPosition) { - OMVRBTreeRIDEntryProvider parent = (OMVRBTreeRIDEntryProvider) iFrom; + public boolean copyDataFrom(final OMVRBTreeEntryDataProvider<OIdentifiable, OIdentifiable> iFrom, final int iStartPosition) { size = iFrom.getSize() - iStartPosition; - stream.jump(0).copyFrom(parent.moveToIndex(iStartPosition), size); - stream.setSource(parent.stream.copy()); + moveToIndex(0).copyFrom(((OMVRBTreeRIDEntryProvider) iFrom).moveToIndex(iStartPosition), size * ORecordId.PERSISTENT_SIZE); return setDirty(); } public boolean truncate(final int iNewSize) { - stream.jump(iNewSize).fill(size - iNewSize, (byte) 0); + moveToIndex(iNewSize).fill((size - iNewSize) * ORecordId.PERSISTENT_SIZE, (byte) 0); size = iNewSize; return setDirty(); } @@ -125,7 +123,10 @@ public boolean copyFrom(final OMVRBTreeEntryDataProvider<OIdentifiable, OIdentif } public OSerializableStream fromStream(final byte[] iStream) throws OSerializationException { - stream.setSource(iStream); + if (stream == null) + stream = new OMemoryStream(iStream); + else + stream.setSource(iStream); size = stream.jump(OFFSET_NODESIZE).getAsInteger(); color = stream.jump(OFFSET_COLOR).getAsBoolean(); @@ -148,7 +149,7 @@ public byte[] toStream() throws OSerializationException { } // RETURN DIRECTLY THE UNDERLYING BUFFER SINCE IT'S FIXED - final byte[] buffer = stream.toByteArray(); + final byte[] buffer = stream.getInternalBuffer(); record.fromStream(buffer); return buffer; } diff --git a/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeRIDProvider.java b/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeRIDProvider.java index 369ff25aae5..1b6c167a327 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeRIDProvider.java +++ b/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeRIDProvider.java @@ -29,6 +29,7 @@ import com.orientechnologies.orient.core.serialization.OSerializableStream; import com.orientechnologies.orient.core.serialization.serializer.string.OStringBuilderSerializable; import com.orientechnologies.orient.core.storage.OStorage; +import com.orientechnologies.orient.core.type.tree.OMVRBTreePersistent; /** * MVRB-Tree implementation to handle a set of RID. @@ -37,22 +38,20 @@ */ public class OMVRBTreeRIDProvider extends OMVRBTreeProviderAbstract<OIdentifiable, OIdentifiable> implements OStringBuilderSerializable { - private static final long serialVersionUID = 1L; - private static final int PROTOCOL_VERSION = 0; + private static final long serialVersionUID = 1L; + private static final int PROTOCOL_VERSION = 0; - private OMVRBTree<OIdentifiable, OIdentifiable> tree; - private boolean embeddedStreaming = true; + private OMVRBTreePersistent<OIdentifiable, OIdentifiable> tree; + private boolean embeddedStreaming = true; public OMVRBTreeRIDProvider(final OStorage iStorage, final int iClusterId, final ORID iRID) { this(iStorage, getDatabase().getClusterNameById(iClusterId)); record.setIdentity(iRID.getClusterId(), iRID.getClusterPosition()); - load(); } public OMVRBTreeRIDProvider(final OStorage iStorage, final String iClusterName, final ORID iRID) { this(iStorage, iClusterName); record.setIdentity(iRID.getClusterId(), iRID.getClusterPosition()); - load(); } public OMVRBTreeRIDProvider(final OStorage iStorage, final int iClusterId) { @@ -126,7 +125,7 @@ public OMVRBTree<OIdentifiable, OIdentifiable> getTree() { return tree; } - public void setTree(OMVRBTree<OIdentifiable, OIdentifiable> tree) { + public void setTree(final OMVRBTreePersistent<OIdentifiable, OIdentifiable> tree) { this.tree = tree; } diff --git a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java index b2bafcfc634..7da0d62ee3f 100644 --- a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java +++ b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java @@ -871,9 +871,11 @@ else if (iLinked instanceof Map<?, ?>) } protected int deleteRecord(final ORID rid, final int version) { - ORecordInternal<?> record = connection.database.load(rid); - record.setVersion(version); - record.delete(); + final ORecordInternal<?> record = connection.database.load(rid); + if (record != null) { + record.setVersion(version); + record.delete(); + } return 1; } diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/OMVRBTreeTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/OMVRBTreeTest.java index f998fd8529a..295366fc265 100644 --- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/OMVRBTreeTest.java +++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/OMVRBTreeTest.java @@ -15,6 +15,8 @@ */ package com.orientechnologies.orient.test.database.auto; +import java.util.Iterator; + import org.testng.Assert; import org.testng.annotations.Parameters; import org.testng.annotations.Test; @@ -22,6 +24,7 @@ import com.orientechnologies.orient.core.db.document.ODatabaseDocument; import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; import com.orientechnologies.orient.core.db.record.OIdentifiable; +import com.orientechnologies.orient.core.id.ORID; import com.orientechnologies.orient.core.id.ORecordId; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.type.tree.OMVRBTreeRIDSet; @@ -48,23 +51,45 @@ public OMVRBTreeTest(String iURL) { @Test public void treeSet() { database.open("admin", "admin"); + int total = 1000; OMVRBTreeRIDSet set = new OMVRBTreeRIDSet("index"); - for (int i = 0; i < 10000; ++i) + for (int i = 0; i < total; ++i) set.add(new ORecordId(10, i)); - Assert.assertEquals(set.size(), 10000); + Assert.assertEquals(set.size(), total); ODocument doc = set.toDocument(); doc.save(); database.close(); database.open("admin", "admin"); - OMVRBTreeRIDSet set2 = new OMVRBTreeRIDSet(doc.getIdentity()); - Assert.assertEquals(set2.size(), 10000); + OMVRBTreeRIDSet set2 = new OMVRBTreeRIDSet(doc.getIdentity()).setAutoConvert(false); + Assert.assertEquals(set2.size(), total); + // ITERABLE int i = 0; - for (OIdentifiable rid : set2) - Assert.assertEquals(rid.getIdentity().getClusterPosition(), i++); + for (OIdentifiable rid : set2) { + Assert.assertEquals(rid.getIdentity().getClusterPosition(), i); + // System.out.println("Adding " + rid); + i++; + } + Assert.assertEquals(i, total); + + ORID rootRID = doc.field("root", ORecordId.class); + + // ITERATOR REMOVE + i = 0; + for (Iterator<OIdentifiable> it = set2.iterator(); it.hasNext();) { + final OIdentifiable rid = it.next(); + Assert.assertEquals(rid.getIdentity().getClusterPosition(), i); + // System.out.println("Removing " + rid); + it.remove(); + i++; + } + Assert.assertEquals(i, total); + Assert.assertEquals(set2.size(), 0); + + //Assert.assertNull(database.load(rootRID)); database.close(); }
aec021e668ad6786d20feaadf119f5407c2b3191
kotlin
Implemented better rendering for parameters with- default values in decompiler and descriptor renderer.-- -KT-1582 fixed-
p
https://github.com/JetBrains/kotlin
diff --git a/compiler/frontend/src/org/jetbrains/jet/resolve/DescriptorRenderer.java b/compiler/frontend/src/org/jetbrains/jet/resolve/DescriptorRenderer.java index 3d21cefaf2984..1d10cd18130c7 100644 --- a/compiler/frontend/src/org/jetbrains/jet/resolve/DescriptorRenderer.java +++ b/compiler/frontend/src/org/jetbrains/jet/resolve/DescriptorRenderer.java @@ -79,7 +79,11 @@ public Void visitTypeParameterDescriptor(TypeParameterDescriptor descriptor, Str @Override public Void visitValueParameterDescriptor(ValueParameterDescriptor descriptor, StringBuilder builder) { - return super.visitVariableDescriptor(descriptor, builder); + super.visitVariableDescriptor(descriptor, builder); + if (descriptor.hasDefaultValue()) { + builder.append(" = ..."); + } + return null; } }; diff --git a/compiler/testData/renderer/GlobalFunctions.kt b/compiler/testData/renderer/GlobalFunctions.kt index 1dcfd0b607792..21a4d40204ade 100644 --- a/compiler/testData/renderer/GlobalFunctions.kt +++ b/compiler/testData/renderer/GlobalFunctions.kt @@ -15,7 +15,7 @@ public fun Int.ext() : Int {} //internal final fun int() : jet.String defined in <module>.<root>.rendererTest //internal final fun int2(val ints : jet.IntArray) : jet.Int defined in <module>.<root>.rendererTest //value-parameter vararg val ints : jet.IntArray defined in <module>.<root>.rendererTest.int2 -//private final fun prv(val a : jet.String, val b : jet.Int) : jet.Int defined in <module>.<root>.rendererTest +//private final fun prv(val a : jet.String, val b : jet.Int = ...) : jet.Int defined in <module>.<root>.rendererTest //value-parameter val a : jet.String defined in <module>.<root>.rendererTest.prv //value-parameter val b : jet.Int defined in <module>.<root>.rendererTest.prv //public final fun jet.Int.ext() : jet.Int defined in <module>.<root>.rendererTest \ No newline at end of file diff --git a/idea/src/org/jetbrains/jet/plugin/libraries/DecompiledDataFactory.java b/idea/src/org/jetbrains/jet/plugin/libraries/DecompiledDataFactory.java index 1b45b4b1b02c3..9a29e15afdf39 100644 --- a/idea/src/org/jetbrains/jet/plugin/libraries/DecompiledDataFactory.java +++ b/idea/src/org/jetbrains/jet/plugin/libraries/DecompiledDataFactory.java @@ -179,7 +179,9 @@ public int compare(DeclarationDescriptor o1, DeclarationDescriptor o2) { private void appendDescriptor(DeclarationDescriptor descriptor, String indent) { int startOffset = myBuilder.length(); - myBuilder.append(DescriptorRenderer.COMPACT.render(descriptor)); + String renderedDescriptor = DescriptorRenderer.COMPACT.render(descriptor); + renderedDescriptor = renderedDescriptor.replace("= ...", "= " + DECOMPILED_COMMENT); + myBuilder.append(renderedDescriptor); int endOffset = myBuilder.length(); if (descriptor instanceof FunctionDescriptor || descriptor instanceof PropertyDescriptor) { diff --git a/idea/testData/libraries/decompiled/namespace.kt b/idea/testData/libraries/decompiled/namespace.kt index 0b9274c42a103..100819a064765 100644 --- a/idea/testData/libraries/decompiled/namespace.kt +++ b/idea/testData/libraries/decompiled/namespace.kt @@ -17,7 +17,7 @@ package testData.libraries [public final fun func(val a : jet.Int, val b : jet.Int) : Unit { /* compiled code */ }] -[public final fun func(val a : jet.Int, val b : jet.String) : Unit { /* compiled code */ }] +[public final fun func(val a : jet.Int, val b : jet.String = /* compiled code */) : Unit { /* compiled code */ }] [public final fun func(val str : jet.String) : Unit { /* compiled code */ }]
aeb879a8afa7d78e96c32bdd76a519f9ff4279bd
ReactiveX-RxJava
Handle concurrent unsubscription in drain (avoid- NPE).--
p
https://github.com/ReactiveX/RxJava
diff --git a/src/main/java/rx/internal/operators/OperatorPublish.java b/src/main/java/rx/internal/operators/OperatorPublish.java index 798517cced..41041f9846 100644 --- a/src/main/java/rx/internal/operators/OperatorPublish.java +++ b/src/main/java/rx/internal/operators/OperatorPublish.java @@ -367,8 +367,10 @@ public void drainQueue(OriginSubscriber<T> originSubscriber) { for (Subscriber<? super T> s : localState.getSubscribers()) { AtomicLong req = localMap.get(s); - nl.accept(s, o); - req.decrementAndGet(); + if (req != null) { // null req indicates a concurrent unsubscription happened + nl.accept(s, o); + req.decrementAndGet(); + } } emitted++; }
265e2fb8bcfede045d213568f157873abcf445b4
elasticsearch
zen disco: support for a node to act as a client- (and not become master) using discovery.zen.master setting (default to true).- It will automatically be set to false when node.client is set to true.--
a
https://github.com/elastic/elasticsearch
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java index 0b3aef49fb16c..7f4bfb676c59f 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java @@ -20,21 +20,45 @@ package org.elasticsearch.cluster.node; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; import org.apache.lucene.util.StringHelper; import org.elasticsearch.util.io.stream.StreamInput; import org.elasticsearch.util.io.stream.StreamOutput; import org.elasticsearch.util.io.stream.Streamable; +import org.elasticsearch.util.settings.Settings; import org.elasticsearch.util.transport.TransportAddress; import org.elasticsearch.util.transport.TransportAddressSerializers; import java.io.IOException; import java.io.Serializable; +import java.util.Map; + +import static org.elasticsearch.util.transport.TransportAddressSerializers.*; /** - * @author kimchy (Shay Banon) + * @author kimchy (shay.banon) */ public class DiscoveryNode implements Streamable, Serializable { + public static Map<String, String> buildCommonNodesAttributes(Settings settings) { + Map<String, String> attributes = Maps.newHashMap(settings.getByPrefix("node.").getAsMap()); + if (attributes.containsKey("client")) { + if (attributes.get("client").equals("false")) { + attributes.remove("client"); // this is the default + } else { + // if we are client node, don't store data ... + attributes.put("data", "false"); + } + } + if (attributes.containsKey("data")) { + if (attributes.get("data").equals("true")) { + attributes.remove("data"); + } + } + return attributes; + } + public static final ImmutableList<DiscoveryNode> EMPTY_LIST = ImmutableList.of(); private String nodeName = StringHelper.intern(""); @@ -43,22 +67,26 @@ public class DiscoveryNode implements Streamable, Serializable { private TransportAddress address; - private boolean dataNode = true; + private ImmutableMap<String, String> attributes; private DiscoveryNode() { } public DiscoveryNode(String nodeId, TransportAddress address) { - this("", true, nodeId, address); + this("", nodeId, address, ImmutableMap.<String, String>of()); } - public DiscoveryNode(String nodeName, boolean dataNode, String nodeId, TransportAddress address) { + public DiscoveryNode(String nodeName, String nodeId, TransportAddress address, Map<String, String> attributes) { if (nodeName == null) { this.nodeName = StringHelper.intern(""); } else { this.nodeName = StringHelper.intern(nodeName); } - this.dataNode = dataNode; + ImmutableMap.Builder<String, String> builder = ImmutableMap.builder(); + for (Map.Entry<String, String> entry : attributes.entrySet()) { + builder.put(StringHelper.intern(entry.getKey()), StringHelper.intern(entry.getValue())); + } + this.attributes = builder.build(); this.nodeId = StringHelper.intern(nodeId); this.address = address; } @@ -105,11 +133,26 @@ public String getName() { return name(); } + /** + * The node attributes. + */ + public ImmutableMap<String, String> attributes() { + return this.attributes; + } + + /** + * The node attributes. + */ + public ImmutableMap<String, String> getAttributes() { + return attributes(); + } + /** * Should this node hold data (shards) or not. */ public boolean dataNode() { - return dataNode; + String data = attributes.get("data"); + return data == null || data.equals("true"); } /** @@ -119,6 +162,18 @@ public boolean isDataNode() { return dataNode(); } + /** + * Is the node a client node or not. + */ + public boolean clientNode() { + String client = attributes.get("client"); + return client != null && client.equals("true"); + } + + public boolean isClientNode() { + return clientNode(); + } + public static DiscoveryNode readNode(StreamInput in) throws IOException { DiscoveryNode node = new DiscoveryNode(); node.readFrom(in); @@ -127,16 +182,25 @@ public static DiscoveryNode readNode(StreamInput in) throws IOException { @Override public void readFrom(StreamInput in) throws IOException { nodeName = StringHelper.intern(in.readUTF()); - dataNode = in.readBoolean(); nodeId = StringHelper.intern(in.readUTF()); address = TransportAddressSerializers.addressFromStream(in); + int size = in.readVInt(); + ImmutableMap.Builder<String, String> builder = ImmutableMap.builder(); + for (int i = 0; i < size; i++) { + builder.put(StringHelper.intern(in.readUTF()), StringHelper.intern(in.readUTF())); + } + attributes = builder.build(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeUTF(nodeName); - out.writeBoolean(dataNode); out.writeUTF(nodeId); - TransportAddressSerializers.addressToStream(out, address); + addressToStream(out, address); + out.writeVInt(attributes.size()); + for (Map.Entry<String, String> entry : attributes.entrySet()) { + out.writeUTF(entry.getKey()); + out.writeUTF(entry.getValue()); + } } @Override public boolean equals(Object obj) { @@ -159,12 +223,12 @@ public static DiscoveryNode readNode(StreamInput in) throws IOException { if (nodeId != null) { sb.append('[').append(nodeId).append(']'); } - if (dataNode) { - sb.append("[data]"); - } if (address != null) { sb.append('[').append(address).append(']'); } + if (!attributes.isEmpty()) { + sb.append(attributes); + } return sb.toString(); } } diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/jgroups/JgroupsDiscovery.java b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/jgroups/JgroupsDiscovery.java index c8b18942a5846..13d5f458c6d88 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/jgroups/JgroupsDiscovery.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/jgroups/JgroupsDiscovery.java @@ -50,6 +50,7 @@ import static com.google.common.collect.Maps.*; import static com.google.common.collect.Sets.*; import static org.elasticsearch.cluster.ClusterState.*; +import static org.elasticsearch.cluster.node.DiscoveryNode.*; /** * @author kimchy (Shay Banon) @@ -142,11 +143,11 @@ public class JgroupsDiscovery extends AbstractLifecycleComponent<Discovery> impl channel.connect(clusterName.value()); channel.setReceiver(this); logger.debug("Connected to cluster [{}], address [{}]", channel.getClusterName(), channel.getAddress()); - this.localNode = new DiscoveryNode(settings.get("name"), settings.getAsBoolean("node.data", !settings.getAsBoolean("node.client", false)), channel.getAddress().toString(), transportService.boundAddress().publishAddress()); + this.localNode = new DiscoveryNode(settings.get("name"), channel.getAddress().toString(), transportService.boundAddress().publishAddress(), buildCommonNodesAttributes(settings)); if (isMaster()) { firstMaster = true; - clusterService.submitStateUpdateTask("jgroups-disco-initialconnect(master)", new ProcessedClusterStateUpdateTask() { + clusterService.submitStateUpdateTask("jgroups-disco-initial_connect(master)", new ProcessedClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { DiscoveryNodes.Builder builder = new DiscoveryNodes.Builder() .localNodeId(localNode.id()) diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java index 3da929a2a634d..aebc1d8cc8e23 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java @@ -42,6 +42,7 @@ import static com.google.common.collect.Sets.*; import static org.elasticsearch.cluster.ClusterState.*; +import static org.elasticsearch.cluster.node.DiscoveryNode.*; /** * @author kimchy (Shay Banon) @@ -84,14 +85,14 @@ public class LocalDiscovery extends AbstractLifecycleComponent<Discovery> implem clusterGroups.put(clusterName, clusterGroup); } logger.debug("Connected to cluster [{}]", clusterName); - this.localNode = new DiscoveryNode(settings.get("name"), settings.getAsBoolean("node.data", !settings.getAsBoolean("node.client", false)), Long.toString(nodeIdGenerator.incrementAndGet()), transportService.boundAddress().publishAddress()); + this.localNode = new DiscoveryNode(settings.get("name"), Long.toString(nodeIdGenerator.incrementAndGet()), transportService.boundAddress().publishAddress(), buildCommonNodesAttributes(settings)); clusterGroup.members().add(this); if (clusterGroup.members().size() == 1) { // we are the first master (and the master) master = true; firstMaster = true; - clusterService.submitStateUpdateTask("local-disco-initialconnect(master)", new ProcessedClusterStateUpdateTask() { + clusterService.submitStateUpdateTask("local-disco-initial_connect(master)", new ProcessedClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { DiscoveryNodes.Builder builder = new DiscoveryNodes.Builder() .localNodeId(localNode.id()) diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index 68a60a333a7b3..c89613444d969 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -42,11 +42,13 @@ import org.elasticsearch.util.settings.Settings; import java.util.List; +import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicBoolean; import static com.google.common.collect.Lists.*; import static org.elasticsearch.cluster.ClusterState.*; +import static org.elasticsearch.cluster.node.DiscoveryNode.*; import static org.elasticsearch.cluster.node.DiscoveryNodes.*; import static org.elasticsearch.util.TimeValue.*; @@ -55,6 +57,8 @@ */ public class ZenDiscovery extends AbstractLifecycleComponent<Discovery> implements Discovery, DiscoveryNodesProvider { + private final ThreadPool threadPool; + private final TransportService transportService; private final ClusterService clusterService; @@ -94,6 +98,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent<Discovery> implemen ZenPingService pingService) { super(settings); this.clusterName = clusterName; + this.threadPool = threadPool; this.clusterService = clusterService; this.transportService = transportService; this.pingService = pingService; @@ -114,57 +119,29 @@ public class ZenDiscovery extends AbstractLifecycleComponent<Discovery> implemen } @Override protected void doStart() throws ElasticSearchException { - localNode = new DiscoveryNode(settings.get("name"), settings.getAsBoolean("node.data", !settings.getAsBoolean("node.client", false)), UUID.randomUUID().toString(), transportService.boundAddress().publishAddress()); + Map<String, String> nodeAttributes = buildCommonNodesAttributes(settings); + Boolean zenMaster = componentSettings.getAsBoolean("master", null); + if (zenMaster != null) { + if (zenMaster.equals(Boolean.FALSE)) { + nodeAttributes.put("zen.master", "false"); + } + } else if (nodeAttributes.containsKey("client")) { + if (nodeAttributes.get("client").equals("true")) { + nodeAttributes.put("zen.master", "false"); + } + } + localNode = new DiscoveryNode(settings.get("name"), UUID.randomUUID().toString(), transportService.boundAddress().publishAddress(), nodeAttributes); pingService.start(); - boolean retry = true; - while (retry) { - retry = false; - DiscoveryNode masterNode = broadBingTillMasterResolved(); - if (localNode.equals(masterNode)) { - // we are the master (first) - this.firstMaster = true; - this.master = true; - nodesFD.start(); // start the nodes FD - clusterService.submitStateUpdateTask("zen-disco-initial_connect(master)", new ProcessedClusterStateUpdateTask() { - @Override public ClusterState execute(ClusterState currentState) { - DiscoveryNodes.Builder builder = new DiscoveryNodes.Builder() - .localNodeId(localNode.id()) - .masterNodeId(localNode.id()) - // put our local node - .put(localNode); - // update the fact that we are the master... - latestDiscoNodes = builder.build(); - return newClusterStateBuilder().state(currentState).nodes(builder).build(); - } - - @Override public void clusterStateProcessed(ClusterState clusterState) { - sendInitialStateEventIfNeeded(); - } - }); - } else { - this.firstMaster = false; - this.master = false; - try { - // first, make sure we can connect to the master - transportService.connectToNode(masterNode); - } catch (Exception e) { - logger.warn("Failed to connect to master [{}], retrying...", e, masterNode); - retry = true; - continue; - } - // send join request - try { - membership.sendJoinRequestBlocking(masterNode, localNode, initialPingTimeout); - } catch (Exception e) { - logger.warn("Failed to send join request to master [{}], retrying...", e, masterNode); - // failed to send the join request, retry - retry = true; - continue; + if (nodeAttributes.containsKey("zen.master") && nodeAttributes.get("zen.master").equals("false")) { + // do the join on a different thread + threadPool.execute(new Runnable() { + @Override public void run() { + initialJoin(); } - // cool, we found a master, start an FD on it - masterFD.start(masterNode); - } + }); + } else { + initialJoin(); } } @@ -239,6 +216,63 @@ public class ZenDiscovery extends AbstractLifecycleComponent<Discovery> implemen publishClusterState.publish(clusterState); } + private void initialJoin() { + boolean retry = true; + while (retry) { + retry = false; + DiscoveryNode masterNode = broadPingTillMasterResolved(); + if (localNode.equals(masterNode)) { + // we are the master (first) + this.firstMaster = true; + this.master = true; + nodesFD.start(); // start the nodes FD + clusterService.submitStateUpdateTask("zen-disco-initial_connect(master)", new ProcessedClusterStateUpdateTask() { + @Override public ClusterState execute(ClusterState currentState) { + DiscoveryNodes.Builder builder = new DiscoveryNodes.Builder() + .localNodeId(localNode.id()) + .masterNodeId(localNode.id()) + // put our local node + .put(localNode); + // update the fact that we are the master... + latestDiscoNodes = builder.build(); + return newClusterStateBuilder().state(currentState).nodes(builder).build(); + } + + @Override public void clusterStateProcessed(ClusterState clusterState) { + sendInitialStateEventIfNeeded(); + } + }); + } else { + this.firstMaster = false; + this.master = false; + try { + // first, make sure we can connect to the master + transportService.connectToNode(masterNode); + } catch (Exception e) { + logger.warn("Failed to connect to master [{}], retrying...", e, masterNode); + retry = true; + continue; + } + // send join request + try { + membership.sendJoinRequestBlocking(masterNode, localNode, initialPingTimeout); + } catch (Exception e) { + logger.warn("Failed to send join request to master [{}], retrying...", e, masterNode); + // failed to send the join request, retry + retry = true; + continue; + } + // cool, we found a master, start an FD on it + masterFD.start(masterNode); + } + if (retry) { + if (!lifecycle.started()) { + return; + } + } + } + } + private void handleNodeFailure(final DiscoveryNode node) { if (!master) { // nothing to do here... @@ -365,7 +399,7 @@ private void handleJoinRequest(final DiscoveryNode node) { } } - private DiscoveryNode broadBingTillMasterResolved() { + private DiscoveryNode broadPingTillMasterResolved() { while (true) { ZenPing.PingResponse[] pingResponses = pingService.pingAndWait(initialPingTimeout); List<DiscoveryNode> pingMasters = newArrayList(); diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java index aa18dce53e2ff..d8067301e9aa9 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java @@ -26,6 +26,7 @@ import java.util.Collections; import java.util.Comparator; +import java.util.Iterator; import java.util.List; import static com.google.common.collect.Lists.*; @@ -45,7 +46,7 @@ public ElectMasterService(Settings settings) { * Returns a list of the next possible masters. */ public DiscoveryNode[] nextPossibleMasters(Iterable<DiscoveryNode> nodes, int numberOfPossibleMasters) { - List<DiscoveryNode> sortedNodes = sortedNodes(nodes); + List<DiscoveryNode> sortedNodes = sortedMasterNodes(nodes); if (sortedNodes == null) { return new DiscoveryNode[0]; } @@ -65,18 +66,27 @@ public DiscoveryNode[] nextPossibleMasters(Iterable<DiscoveryNode> nodes, int nu * if no master has been elected. */ public DiscoveryNode electMaster(Iterable<DiscoveryNode> nodes) { - List<DiscoveryNode> sortedNodes = sortedNodes(nodes); - if (sortedNodes == null) { + List<DiscoveryNode> sortedNodes = sortedMasterNodes(nodes); + if (sortedNodes == null || sortedNodes.isEmpty()) { return null; } return sortedNodes.get(0); } - private List<DiscoveryNode> sortedNodes(Iterable<DiscoveryNode> nodes) { + private List<DiscoveryNode> sortedMasterNodes(Iterable<DiscoveryNode> nodes) { List<DiscoveryNode> possibleNodes = Lists.newArrayList(nodes); if (possibleNodes.isEmpty()) { return null; } + // clean non master nodes + for (Iterator<DiscoveryNode> it = possibleNodes.iterator(); it.hasNext();) { + DiscoveryNode node = it.next(); + if (node.attributes().containsKey("zen.master")) { + if (node.attributes().get("zen.master").equals("false")) { + it.remove(); + } + } + } Collections.sort(possibleNodes, nodeComparator); return possibleNodes; } diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java index 7319703f73e7f..9af8c720e6ce6 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java @@ -64,7 +64,12 @@ public class RestNodesInfoAction extends BaseRestHandler { builder.field("name", nodeInfo.node().name()); builder.field("transport_address", nodeInfo.node().address().toString()); - builder.field("data_node", nodeInfo.node().dataNode()); + + builder.startArray("attributes"); + for (Map.Entry<String, String> attr : nodeInfo.node().attributes().entrySet()) { + builder.field(attr.getKey(), attr.getValue()); + } + builder.endArray(); for (Map.Entry<String, String> nodeAttribute : nodeInfo.attributes().entrySet()) { builder.field(nodeAttribute.getKey(), nodeAttribute.getValue()); diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/util/settings/ImmutableSettings.java b/modules/elasticsearch/src/main/java/org/elasticsearch/util/settings/ImmutableSettings.java index 2a7679d9ef05d..773ec2518e215 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/util/settings/ImmutableSettings.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/util/settings/ImmutableSettings.java @@ -83,15 +83,19 @@ private ImmutableSettings(Map<String, String> settings, Settings globalSettings, throw new SettingsException("Component [" + type + "] does not start with prefix [" + prefix + "]"); } String settingPrefix = type.substring(prefix.length() + 1); // 1 for the '.' - settingPrefix = settingPrefix.substring(0, settingPrefix.length() - component.getSimpleName().length() - 1); // remove the simple class name + settingPrefix = settingPrefix.substring(0, settingPrefix.length() - component.getSimpleName().length()); // remove the simple class name (keep the dot) + return getByPrefix(settingPrefix); + } + + @Override public Settings getByPrefix(String prefix) { Builder builder = new Builder(); for (Map.Entry<String, String> entry : getAsMap().entrySet()) { - if (entry.getKey().startsWith(settingPrefix)) { - if (entry.getKey().length() <= settingPrefix.length()) { + if (entry.getKey().startsWith(prefix)) { + if (entry.getKey().length() < prefix.length()) { // ignore this one continue; } - builder.put(entry.getKey().substring(settingPrefix.length() + 1), entry.getValue()); + builder.put(entry.getKey().substring(prefix.length()), entry.getValue()); } } builder.globalSettings(this); diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/util/settings/Settings.java b/modules/elasticsearch/src/main/java/org/elasticsearch/util/settings/Settings.java index 50de0624c5c67..8f39c2066a1ed 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/util/settings/Settings.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/util/settings/Settings.java @@ -55,7 +55,12 @@ public interface Settings { Settings getComponentSettings(String prefix, Class component); /** - * The class loader associted with this settings. + * A settings that are filtered (and key is removed) with the specified prefix. + */ + Settings getByPrefix(String prefix); + + /** + * The class loader associated with this settings. */ ClassLoader getClassLoader();
67685860541cf7f66d048f314e1b6084591974c3
camel
Fixed test on other boxes.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@933097 13f79535-47bb-0310-9956-ffa450edef68-
c
https://github.com/apache/camel
diff --git a/camel-core/src/test/java/org/apache/camel/component/file/FileConcurrentAggregateBatchConsumerTest.java b/camel-core/src/test/java/org/apache/camel/component/file/FileConcurrentAggregateBatchConsumerTest.java index 4ed020e996ffe..83cb987a826d8 100644 --- a/camel-core/src/test/java/org/apache/camel/component/file/FileConcurrentAggregateBatchConsumerTest.java +++ b/camel-core/src/test/java/org/apache/camel/component/file/FileConcurrentAggregateBatchConsumerTest.java @@ -94,8 +94,8 @@ public void configure() throws Exception { long start = System.currentTimeMillis(); MockEndpoint result = getMockEndpoint("mock:result"); - // should be ordered - result.expectedBodiesReceived("A+C+E+G+I", "B+D+F+H+J"); + // should be ordered in the body, but the files can be loaded in different order per OS + result.expectedBodiesReceivedInAnyOrder("A+C+E+G+I", "B+D+F+H+J"); assertMockEndpointsSatisfied(); diff --git a/camel-core/src/test/java/org/apache/camel/component/file/FileConcurrentTest.java b/camel-core/src/test/java/org/apache/camel/component/file/FileConcurrentTest.java index f8fef2725d81c..d001c971b5617 100644 --- a/camel-core/src/test/java/org/apache/camel/component/file/FileConcurrentTest.java +++ b/camel-core/src/test/java/org/apache/camel/component/file/FileConcurrentTest.java @@ -131,8 +131,8 @@ public void configure() throws Exception { long start = System.currentTimeMillis(); MockEndpoint result = getMockEndpoint("mock:result"); - // should be ordered - result.expectedBodiesReceived("A+C+E+G+I", "B+D+F+H+J"); + // should be ordered in the body, but the files can be loaded in different order per OS + result.expectedBodiesReceivedInAnyOrder("A+C+E+G+I", "B+D+F+H+J"); assertMockEndpointsSatisfied(); diff --git a/camel-core/src/test/java/org/apache/camel/component/file/FileConsumerFailureHandledTest.java b/camel-core/src/test/java/org/apache/camel/component/file/FileConsumerFailureHandledTest.java index a7783cdd14c89..4d338c6dceee6 100644 --- a/camel-core/src/test/java/org/apache/camel/component/file/FileConsumerFailureHandledTest.java +++ b/camel-core/src/test/java/org/apache/camel/component/file/FileConsumerFailureHandledTest.java @@ -34,7 +34,7 @@ public class FileConsumerFailureHandledTest extends ContextTestSupport { @Override protected void setUp() throws Exception { - deleteDirectory("target/messages"); + deleteDirectory("target/messages/input"); super.setUp(); } @@ -42,13 +42,13 @@ public void testParis() throws Exception { MockEndpoint mock = getMockEndpoint("mock:valid"); mock.expectedBodiesReceived("Hello Paris"); - template.sendBodyAndHeader("file:target/messages/input/?delete=true&delay=5000", "Paris", Exchange.FILE_NAME, "paris.txt"); + template.sendBodyAndHeader("file:target/messages/input/", "Paris", Exchange.FILE_NAME, "paris.txt"); mock.assertIsSatisfied(); // sleep otherwise the file assertions below could fail Thread.sleep(200); - asserFiles("paris.txt", true); + assertFiles("paris.txt", true); } public void testLondon() throws Exception { @@ -56,14 +56,14 @@ public void testLondon() throws Exception { // we get the original input so its not Hello London but only London mock.expectedBodiesReceived("London"); - template.sendBodyAndHeader("file:target/messages/input/?delete=true&delay=5000", "London", Exchange.FILE_NAME, "london.txt"); + template.sendBodyAndHeader("file:target/messages/input/", "London", Exchange.FILE_NAME, "london.txt"); mock.assertIsSatisfied(); // sleep otherwise the file assertions below could fail Thread.sleep(200); - // london should be delated as we have failure handled it - asserFiles("london.txt", true); + // london should be deleted as we have failure handled it + assertFiles("london.txt", true); } public void testDublin() throws Exception { @@ -71,14 +71,14 @@ public void testDublin() throws Exception { // we get the original input so its not Hello London but only London mock.expectedBodiesReceived("Dublin"); - template.sendBodyAndHeader("file:target/messages/input/?delete=true&delay=5000", "Dublin", Exchange.FILE_NAME, "dublin.txt"); + template.sendBodyAndHeader("file:target/messages/input/", "Dublin", Exchange.FILE_NAME, "dublin.txt"); mock.assertIsSatisfied(); // sleep otherwise the file assertions below could fail Thread.sleep(200); // dublin should NOT be deleted, but should be retired on next consumer - asserFiles("dublin.txt", false); + assertFiles("dublin.txt", false); } public void testMadrid() throws Exception { @@ -86,18 +86,18 @@ public void testMadrid() throws Exception { // we get the original input so its not Hello London but only London mock.expectedBodiesReceived("Madrid"); - template.sendBodyAndHeader("file:target/messages/input/?delete=true&delay=5000", "Madrid", Exchange.FILE_NAME, "madrid.txt"); + template.sendBodyAndHeader("file:target/messages/input/", "Madrid", Exchange.FILE_NAME, "madrid.txt"); mock.assertIsSatisfied(); // sleep otherwise the file assertions below could fail Thread.sleep(200); // madrid should NOT be deleted, but should be retired on next consumer - asserFiles("madrid.txt", false); + assertFiles("madrid.txt", false); } - private static void asserFiles(String filename, boolean deleted) throws InterruptedException { - // file should be deleted as deleted=true in parameter in the route below + private static void assertFiles(String filename, boolean deleted) throws InterruptedException { + // file should be deleted as delete=true in parameter in the route below File file = new File("target/messages/input/" + filename); assertEquals("File " + filename + " should be deleted: " + deleted, deleted, !file.exists());
34da8eb5e9daa5a65ad12f4f284df43469caacd8
drools
- adding new test for issue related with IF and- pattern ordering, throwing a NotSupportedOperationException--
p
https://github.com/kiegroup/drools
diff --git a/drools-compiler/src/test/java/org/drools/compiler/integrationtests/MiscTest2.java b/drools-compiler/src/test/java/org/drools/compiler/integrationtests/MiscTest2.java index b81b851d6cd..da2572d6b08 100644 --- a/drools-compiler/src/test/java/org/drools/compiler/integrationtests/MiscTest2.java +++ b/drools-compiler/src/test/java/org/drools/compiler/integrationtests/MiscTest2.java @@ -86,6 +86,7 @@ import java.util.regex.Pattern; import static java.util.Arrays.asList; +import static org.junit.Assert.assertEquals; /** * Run all the tests with the ReteOO engine implementation @@ -2449,6 +2450,41 @@ public void reteErrorInIF() { assertEquals(1, firedRules.size()); + } + @Ignore + @Test + public void reteErrorInIF2() { + List<String> firedRules = new ArrayList<String>(); + String str = "import " + MiscTest2.Foo.class.getCanonicalName() + "\n" + + "import " + MiscTest2.Foo2.class.getCanonicalName() + "\n" + + "import " + MiscTest2.Foo3.class.getCanonicalName() + "\n" + + "global java.util.List fired;\n" + + "rule \"weird foo\"\n" + + " when\n" + + " $foo: Foo($x: x)\n" + + " $foo2: Foo2()\n" + + " if( $foo.getX() != 1 ) break[needThis] \n" + + " not( Foo(x == 2) ) \n" + + " $foo3: Foo3(x == $x)\n" + + " then\n" + + " fired.add(\"We made it!\");\n" + + " then[needThis]\n" + + " modify($foo){\n" + + " setX(1)\n" + + " };\n" + + "end"; + + KnowledgeBase kbase = loadKnowledgeBaseFromString(str); + StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession(); + ksession.setGlobal("fired", firedRules); + ksession.insert(new Foo()); + ksession.insert(new Foo2()); + ksession.insert(new Foo3()); + ksession.fireAllRules(); + + assertEquals(1, firedRules.size()); + + }
e3d1a1dda22723fc896bfc96c6db57c500faf208
spring-framework
@Resource injection points support @Lazy as well--Issue: SPR- -
a
https://github.com/spring-projects/spring-framework
diff --git a/spring-context/src/main/java/org/springframework/context/annotation/CommonAnnotationBeanPostProcessor.java b/spring-context/src/main/java/org/springframework/context/annotation/CommonAnnotationBeanPostProcessor.java index 592af8ef932f..ffb9b55320b6 100644 --- a/spring-context/src/main/java/org/springframework/context/annotation/CommonAnnotationBeanPostProcessor.java +++ b/spring-context/src/main/java/org/springframework/context/annotation/CommonAnnotationBeanPostProcessor.java @@ -1,5 +1,5 @@ /* - * Copyright 2002-2014 the original author or authors. + * Copyright 2002-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -44,6 +44,8 @@ import javax.xml.ws.WebServiceClient; import javax.xml.ws.WebServiceRef; +import org.springframework.aop.TargetSource; +import org.springframework.aop.framework.ProxyFactory; import org.springframework.beans.BeanUtils; import org.springframework.beans.BeansException; import org.springframework.beans.PropertyValues; @@ -414,6 +416,44 @@ else if (bridgedMethod.isAnnotationPresent(Resource.class)) { return new InjectionMetadata(clazz, elements); } + /** + * Obtain a lazily resolving resource proxy for the given name and type, + * delegating to {@link #getResource} on demand once a method call comes in. + * @param element the descriptor for the annotated field/method + * @param requestingBeanName the name of the requesting bean + * @return the resource object (never {@code null}) + * @since 4.2 + * @see #getResource + * @see Lazy + */ + protected Object buildLazyResourceProxy(final LookupElement element, final String requestingBeanName) { + TargetSource ts = new TargetSource() { + @Override + public Class<?> getTargetClass() { + return element.lookupType; + } + @Override + public boolean isStatic() { + return false; + } + @Override + public Object getTarget() { + return getResource(element, requestingBeanName); + } + @Override + public void releaseTarget(Object target) { + } + }; + ProxyFactory pf = new ProxyFactory(); + pf.setTargetSource(ts); + if (element.lookupType.isInterface()) { + pf.addInterface(element.lookupType); + } + ClassLoader classLoader = (this.beanFactory instanceof ConfigurableBeanFactory ? + ((ConfigurableBeanFactory) this.beanFactory).getBeanClassLoader() : null); + return pf.getProxy(classLoader); + } + /** * Obtain the resource object for the given name and type. * @param element the descriptor for the annotated field/method @@ -527,6 +567,8 @@ public final DependencyDescriptor getDependencyDescriptor() { */ private class ResourceElement extends LookupElement { + private final boolean lazyLookup; + public ResourceElement(Member member, AnnotatedElement ae, PropertyDescriptor pd) { super(member, pd); Resource resource = ae.getAnnotation(Resource.class); @@ -552,11 +594,14 @@ else if (beanFactory instanceof ConfigurableBeanFactory){ this.name = resourceName; this.lookupType = resourceType; this.mappedName = resource.mappedName(); + Lazy lazy = ae.getAnnotation(Lazy.class); + this.lazyLookup = (lazy != null && lazy.value()); } @Override protected Object getResourceToInject(Object target, String requestingBeanName) { - return getResource(this, requestingBeanName); + return (this.lazyLookup ? buildLazyResourceProxy(this, requestingBeanName) : + getResource(this, requestingBeanName)); } } diff --git a/spring-context/src/test/java/org/springframework/context/annotation/CommonAnnotationBeanPostProcessorTests.java b/spring-context/src/test/java/org/springframework/context/annotation/CommonAnnotationBeanPostProcessorTests.java index c44662b418a9..92c684543feb 100644 --- a/spring-context/src/test/java/org/springframework/context/annotation/CommonAnnotationBeanPostProcessorTests.java +++ b/spring-context/src/test/java/org/springframework/context/annotation/CommonAnnotationBeanPostProcessorTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2002-2014 the original author or authors. + * Copyright 2002-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -437,6 +437,60 @@ public void testExtendedEjbInjection() { assertTrue(bean.destroy2Called); } + @Test + public void testLazyResolutionWithResourceField() { + DefaultListableBeanFactory bf = new DefaultListableBeanFactory(); + CommonAnnotationBeanPostProcessor bpp = new CommonAnnotationBeanPostProcessor(); + bpp.setBeanFactory(bf); + bf.addBeanPostProcessor(bpp); + + bf.registerBeanDefinition("annotatedBean", new RootBeanDefinition(LazyResourceFieldInjectionBean.class)); + bf.registerBeanDefinition("testBean", new RootBeanDefinition(TestBean.class)); + + LazyResourceFieldInjectionBean bean = (LazyResourceFieldInjectionBean) bf.getBean("annotatedBean"); + assertFalse(bf.containsSingleton("testBean")); + bean.testBean.setName("notLazyAnymore"); + assertTrue(bf.containsSingleton("testBean")); + TestBean tb = (TestBean) bf.getBean("testBean"); + assertEquals("notLazyAnymore", tb.getName()); + } + + @Test + public void testLazyResolutionWithResourceMethod() { + DefaultListableBeanFactory bf = new DefaultListableBeanFactory(); + CommonAnnotationBeanPostProcessor bpp = new CommonAnnotationBeanPostProcessor(); + bpp.setBeanFactory(bf); + bf.addBeanPostProcessor(bpp); + + bf.registerBeanDefinition("annotatedBean", new RootBeanDefinition(LazyResourceMethodInjectionBean.class)); + bf.registerBeanDefinition("testBean", new RootBeanDefinition(TestBean.class)); + + LazyResourceMethodInjectionBean bean = (LazyResourceMethodInjectionBean) bf.getBean("annotatedBean"); + assertFalse(bf.containsSingleton("testBean")); + bean.testBean.setName("notLazyAnymore"); + assertTrue(bf.containsSingleton("testBean")); + TestBean tb = (TestBean) bf.getBean("testBean"); + assertEquals("notLazyAnymore", tb.getName()); + } + + @Test + public void testLazyResolutionWithCglibProxy() { + DefaultListableBeanFactory bf = new DefaultListableBeanFactory(); + CommonAnnotationBeanPostProcessor bpp = new CommonAnnotationBeanPostProcessor(); + bpp.setBeanFactory(bf); + bf.addBeanPostProcessor(bpp); + + bf.registerBeanDefinition("annotatedBean", new RootBeanDefinition(LazyResourceCglibInjectionBean.class)); + bf.registerBeanDefinition("testBean", new RootBeanDefinition(TestBean.class)); + + LazyResourceCglibInjectionBean bean = (LazyResourceCglibInjectionBean) bf.getBean("annotatedBean"); + assertFalse(bf.containsSingleton("testBean")); + bean.testBean.setName("notLazyAnymore"); + assertTrue(bf.containsSingleton("testBean")); + TestBean tb = (TestBean) bf.getBean("testBean"); + assertEquals("notLazyAnymore", tb.getName()); + } + public static class AnnotatedInitDestroyBean { @@ -716,6 +770,35 @@ private static class ConvertedResourceInjectionBean { } + private static class LazyResourceFieldInjectionBean { + + @Resource @Lazy + private ITestBean testBean; + } + + + private static class LazyResourceMethodInjectionBean { + + private ITestBean testBean; + + @Resource @Lazy + public void setTestBean(ITestBean testBean) { + this.testBean = testBean; + } + } + + + private static class LazyResourceCglibInjectionBean { + + private TestBean testBean; + + @Resource @Lazy + public void setTestBean(TestBean testBean) { + this.testBean = testBean; + } + } + + @SuppressWarnings("unused") private static class NullFactory {
420d11911bbfd59192bfefc061fdc253e326647c
spring-framework
SPR-5973: Extract UriComponentTemplate out of- UriTemplate--
p
https://github.com/spring-projects/spring-framework
diff --git a/org.springframework.web/src/main/java/org/springframework/web/util/UriBuilder.java b/org.springframework.web/src/main/java/org/springframework/web/util/UriBuilder.java index f2e0b7364d0c..b27db5b16a64 100644 --- a/org.springframework.web/src/main/java/org/springframework/web/util/UriBuilder.java +++ b/org.springframework.web/src/main/java/org/springframework/web/util/UriBuilder.java @@ -220,8 +220,8 @@ private URI buildFromMap(boolean encodeUriVariableValues, Map<String, ?> uriVari UriTemplate template; if (scheme != null) { - template = new UriTemplate(scheme, UriComponent.SCHEME); - uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariables)); + template = new UriComponentTemplate(scheme, UriComponent.SCHEME, encodeUriVariableValues); + uriBuilder.append(template.expandAsString(uriVariables)); uriBuilder.append(':'); } @@ -229,14 +229,14 @@ private URI buildFromMap(boolean encodeUriVariableValues, Map<String, ?> uriVari uriBuilder.append("//"); if (StringUtils.hasLength(userInfo)) { - template = new UriTemplate(userInfo, UriComponent.USER_INFO); - uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariables)); + template = new UriComponentTemplate(userInfo, UriComponent.USER_INFO, encodeUriVariableValues); + uriBuilder.append(template.expandAsString(uriVariables)); uriBuilder.append('@'); } if (host != null) { - template = new UriTemplate(host, UriComponent.HOST); - uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariables)); + template = new UriComponentTemplate(host, UriComponent.HOST, encodeUriVariableValues); + uriBuilder.append(template.expandAsString(uriVariables)); } if (port != -1) { @@ -256,20 +256,20 @@ private URI buildFromMap(boolean encodeUriVariableValues, Map<String, ?> uriVari else if (endsWithSlash && startsWithSlash) { pathSegment = pathSegment.substring(1); } - template = new UriTemplate(pathSegment, UriComponent.PATH_SEGMENT); - uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariables)); + template = new UriComponentTemplate(pathSegment, UriComponent.PATH_SEGMENT, encodeUriVariableValues); + uriBuilder.append(template.expandAsString(uriVariables)); } } if (queryBuilder.length() > 0) { uriBuilder.append('?'); - template = new UriTemplate(queryBuilder.toString(), UriComponent.QUERY); - uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariables)); + template = new UriComponentTemplate(queryBuilder.toString(), UriComponent.QUERY, encodeUriVariableValues); + uriBuilder.append(template.expandAsString(uriVariables)); } if (StringUtils.hasLength(fragment)) { uriBuilder.append('#'); - template = new UriTemplate(fragment, UriComponent.FRAGMENT); - uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariables)); + template = new UriComponentTemplate(fragment, UriComponent.FRAGMENT, encodeUriVariableValues); + uriBuilder.append(template.expandAsString(uriVariables)); } return URI.create(uriBuilder.toString()); @@ -308,8 +308,8 @@ private URI buildFromVarArg(boolean encodeUriVariableValues, Object... uriVariab UriTemplate template; if (scheme != null) { - template = new UriTemplate(scheme, UriComponent.SCHEME); - uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariableValues)); + template = new UriComponentTemplate(scheme, UriComponent.SCHEME, encodeUriVariableValues); + uriBuilder.append(template.expandAsString(uriVariableValues)); uriBuilder.append(':'); } @@ -317,14 +317,14 @@ private URI buildFromVarArg(boolean encodeUriVariableValues, Object... uriVariab uriBuilder.append("//"); if (StringUtils.hasLength(userInfo)) { - template = new UriTemplate(userInfo, UriComponent.USER_INFO); - uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariableValues)); + template = new UriComponentTemplate(userInfo, UriComponent.USER_INFO, encodeUriVariableValues); + uriBuilder.append(template.expandAsString(uriVariableValues)); uriBuilder.append('@'); } if (host != null) { - template = new UriTemplate(host, UriComponent.HOST); - uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariableValues)); + template = new UriComponentTemplate(host, UriComponent.HOST, encodeUriVariableValues); + uriBuilder.append(template.expandAsString(uriVariableValues)); } if (port != -1) { @@ -344,21 +344,21 @@ private URI buildFromVarArg(boolean encodeUriVariableValues, Object... uriVariab else if (endsWithSlash && startsWithSlash) { pathSegment = pathSegment.substring(1); } - template = new UriTemplate(pathSegment, UriComponent.PATH_SEGMENT); - uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariableValues)); + template = new UriComponentTemplate(pathSegment, UriComponent.PATH_SEGMENT, encodeUriVariableValues); + uriBuilder.append(template.expandAsString(uriVariableValues)); } } if (queryBuilder.length() > 0) { uriBuilder.append('?'); - template = new UriTemplate(queryBuilder.toString(), UriComponent.QUERY); - uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariableValues)); + template = new UriComponentTemplate(queryBuilder.toString(), UriComponent.QUERY, encodeUriVariableValues); + uriBuilder.append(template.expandAsString(uriVariableValues)); } if (StringUtils.hasLength(fragment)) { uriBuilder.append('#'); - template = new UriTemplate(fragment, UriComponent.FRAGMENT); - uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariableValues)); + template = new UriComponentTemplate(fragment, UriComponent.FRAGMENT, encodeUriVariableValues); + uriBuilder.append(template.expandAsString(uriVariableValues)); } return URI.create(uriBuilder.toString()); diff --git a/org.springframework.web/src/main/java/org/springframework/web/util/UriComponentTemplate.java b/org.springframework.web/src/main/java/org/springframework/web/util/UriComponentTemplate.java new file mode 100644 index 000000000000..d354ca2dcc3f --- /dev/null +++ b/org.springframework.web/src/main/java/org/springframework/web/util/UriComponentTemplate.java @@ -0,0 +1,46 @@ +/* + * Copyright 2002-2011 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.web.util; + +import org.springframework.util.Assert; + +/** + * Subclass of {@link UriTemplate} that operates on URI components, rather than full URIs. + * + * @author Arjen Poutsma + * @since 3.1 + */ +class UriComponentTemplate extends UriTemplate { + + private final UriComponent uriComponent; + + private boolean encodeUriVariableValues; + + UriComponentTemplate(String uriTemplate, UriComponent uriComponent, boolean encodeUriVariableValues) { + super(uriTemplate); + Assert.notNull(uriComponent, "'uriComponent' must not be null"); + this.uriComponent = uriComponent; + this.encodeUriVariableValues = encodeUriVariableValues; + } + + @Override + protected String getVariableValueAsString(Object variableValue) { + String variableValueString = super.getVariableValueAsString(variableValue); + return encodeUriVariableValues ? UriUtils.encode(variableValueString, uriComponent, false) : + variableValueString; + } +} diff --git a/org.springframework.web/src/main/java/org/springframework/web/util/UriTemplate.java b/org.springframework.web/src/main/java/org/springframework/web/util/UriTemplate.java index 51ebb224600e..6026020b9761 100644 --- a/org.springframework.web/src/main/java/org/springframework/web/util/UriTemplate.java +++ b/org.springframework.web/src/main/java/org/springframework/web/util/UriTemplate.java @@ -56,8 +56,6 @@ public class UriTemplate implements Serializable { private final String uriTemplate; - private final UriComponent uriComponent; - /** * Construct a new {@link UriTemplate} with the given URI String. @@ -68,19 +66,6 @@ public UriTemplate(String uriTemplate) { this.uriTemplate = uriTemplate; this.variableNames = parser.getVariableNames(); this.matchPattern = parser.getMatchPattern(); - this.uriComponent = null; - } - - /** - * Construct a new {@link UriTemplate} with the given URI String. - * @param uriTemplate the URI template string - */ - public UriTemplate(String uriTemplate, UriComponent uriComponent) { - Parser parser = new Parser(uriTemplate); - this.uriTemplate = uriTemplate; - this.variableNames = parser.getVariableNames(); - this.matchPattern = parser.getMatchPattern(); - this.uriComponent = uriComponent; } /** @@ -110,7 +95,7 @@ public List<String> getVariableNames() { * or if it does not contain values for all the variable names */ public URI expand(Map<String, ?> uriVariables) { - return encodeUri(expandAsString(false, uriVariables)); + return encodeUri(expandAsString(uriVariables)); } /** @@ -125,13 +110,13 @@ public URI expand(Map<String, ?> uriVariables) { * System.out.println(template.expand(uriVariables)); * </pre> * will print: <blockquote><code>http://example.com/hotels/1/bookings/42</code></blockquote> - * @param encodeUriVariableValues indicates whether uri template variables should be encoded or not + * * @param uriVariables the map of URI variables * @return the expanded URI * @throws IllegalArgumentException if <code>uriVariables</code> is <code>null</code>; * or if it does not contain values for all the variable names */ - public String expandAsString(boolean encodeUriVariableValues, Map<String, ?> uriVariables) { + public String expandAsString(Map<String, ?> uriVariables) { Assert.notNull(uriVariables, "'uriVariables' must not be null"); Object[] values = new Object[this.variableNames.size()]; for (int i = 0; i < this.variableNames.size(); i++) { @@ -141,7 +126,7 @@ public String expandAsString(boolean encodeUriVariableValues, Map<String, ?> uri } values[i] = uriVariables.get(name); } - return expandAsString(encodeUriVariableValues, values); + return expandAsString(values); } /** @@ -159,7 +144,7 @@ public String expandAsString(boolean encodeUriVariableValues, Map<String, ?> uri * or if it does not contain sufficient variables */ public URI expand(Object... uriVariableValues) { - return encodeUri(expandAsString(false, uriVariableValues)); + return encodeUri(expandAsString(uriVariableValues)); } /** @@ -171,13 +156,13 @@ public URI expand(Object... uriVariableValues) { * System.out.println(template.expand("1", "42)); * </pre> * will print: <blockquote><code>http://example.com/hotels/1/bookings/42</code></blockquote> - * @param encodeVariableValues indicates whether uri template variables should be encoded or not + * * @param uriVariableValues the array of URI variables * @return the expanded URI * @throws IllegalArgumentException if <code>uriVariables</code> is <code>null</code> * or if it does not contain sufficient variables */ - public String expandAsString(boolean encodeVariableValues, Object... uriVariableValues) { + public String expandAsString(Object... uriVariableValues) { Assert.notNull(uriVariableValues, "'uriVariableValues' must not be null"); if (uriVariableValues.length < this.variableNames.size()) { throw new IllegalArgumentException( @@ -188,18 +173,27 @@ public String expandAsString(boolean encodeVariableValues, Object... uriVariable StringBuffer uriBuffer = new StringBuffer(); int i = 0; while (matcher.find()) { - Object uriVariable = uriVariableValues[i++]; - String uriVariableString = uriVariable != null ? uriVariable.toString() : ""; - if (encodeVariableValues && uriComponent != null) { - uriVariableString = UriUtils.encode(uriVariableString, uriComponent, false); - } - String replacement = Matcher.quoteReplacement(uriVariableString); + Object uriVariableValue = uriVariableValues[i++]; + String uriVariableValueString = getVariableValueAsString(uriVariableValue); + String replacement = Matcher.quoteReplacement(uriVariableValueString); matcher.appendReplacement(uriBuffer, replacement); } matcher.appendTail(uriBuffer); return uriBuffer.toString(); } + /** + * Template method that returns the string representation of the given URI template value. + * + * <p>Defaults implementation simply calls {@link Object#toString()}, or returns an empty string for {@code null}. + * + * @param variableValue the URI template variable value + * @return the variable value as string + */ + protected String getVariableValueAsString(Object variableValue) { + return variableValue != null ? variableValue.toString() : ""; + } + /** * Indicate whether the given URI matches this template. * @param uri the URI to match to
0d1908fbb2a1e5d68b1b38b0feaa1f1a40e76d5b
orientdb
Fixed a bug on browsing clusters in transaction- as issue https://github.com/tinkerpop/blueprints/issues/312--
c
https://github.com/orientechnologies/orientdb
diff --git a/core/src/main/java/com/orientechnologies/orient/core/iterator/ORecordIteratorCluster.java b/core/src/main/java/com/orientechnologies/orient/core/iterator/ORecordIteratorCluster.java index eee78457d84..43e1c1231f8 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/iterator/ORecordIteratorCluster.java +++ b/core/src/main/java/com/orientechnologies/orient/core/iterator/ORecordIteratorCluster.java @@ -43,7 +43,7 @@ public ORecordIteratorCluster(final ODatabaseRecord iDatabase, final ODatabaseRe totalAvailableRecords = database.countClusterElements(current.clusterId); - txEntries = iDatabase.getTransaction().getRecordEntriesByClusterIds(new int[] { iClusterId }); + txEntries = iDatabase.getTransaction().getNewRecordEntriesByClusterIds(new int[] { iClusterId }); if (txEntries != null) // ADJUST TOTAL ELEMENT BASED ON CURRENT TRANSACTION'S ENTRIES diff --git a/core/src/main/java/com/orientechnologies/orient/core/iterator/ORecordIteratorClusters.java b/core/src/main/java/com/orientechnologies/orient/core/iterator/ORecordIteratorClusters.java index aea639f5116..c8695a7a50d 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/iterator/ORecordIteratorClusters.java +++ b/core/src/main/java/com/orientechnologies/orient/core/iterator/ORecordIteratorClusters.java @@ -367,7 +367,7 @@ protected void config() { totalAvailableRecords = database.countClusterElements(clusterIds); - txEntries = database.getTransaction().getRecordEntriesByClusterIds(clusterIds); + txEntries = database.getTransaction().getNewRecordEntriesByClusterIds(clusterIds); if (txEntries != null) // ADJUST TOTAL ELEMENT BASED ON CURRENT TRANSACTION'S ENTRIES diff --git a/core/src/main/java/com/orientechnologies/orient/core/tx/OTransaction.java b/core/src/main/java/com/orientechnologies/orient/core/tx/OTransaction.java index 2e80f344c9e..7bc9a7e4bd4 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/tx/OTransaction.java +++ b/core/src/main/java/com/orientechnologies/orient/core/tx/OTransaction.java @@ -63,7 +63,7 @@ public void saveRecord(ORecordInternal<?> iContent, String iClusterName, OPERATI public List<ORecordOperation> getRecordEntriesByClass(String iClassName); - public List<ORecordOperation> getRecordEntriesByClusterIds(int[] iIds); + public List<ORecordOperation> getNewRecordEntriesByClusterIds(int[] iIds); public ORecordInternal<?> getRecord(ORID iRid); diff --git a/core/src/main/java/com/orientechnologies/orient/core/tx/OTransactionNoTx.java b/core/src/main/java/com/orientechnologies/orient/core/tx/OTransactionNoTx.java index 08b6f936ec4..c54ddc43dc8 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/tx/OTransactionNoTx.java +++ b/core/src/main/java/com/orientechnologies/orient/core/tx/OTransactionNoTx.java @@ -117,7 +117,7 @@ public List<ORecordOperation> getRecordEntriesByClass(String iClassName) { return null; } - public List<ORecordOperation> getRecordEntriesByClusterIds(int[] iIds) { + public List<ORecordOperation> getNewRecordEntriesByClusterIds(int[] iIds) { return null; } diff --git a/core/src/main/java/com/orientechnologies/orient/core/tx/OTransactionRealAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/tx/OTransactionRealAbstract.java index 047a11ddf76..2994820da20 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/tx/OTransactionRealAbstract.java +++ b/core/src/main/java/com/orientechnologies/orient/core/tx/OTransactionRealAbstract.java @@ -161,19 +161,21 @@ public List<ORecordOperation> getRecordEntriesByClass(final String iClassName) { /** * Called by cluster iterator. */ - public List<ORecordOperation> getRecordEntriesByClusterIds(final int[] iIds) { + public List<ORecordOperation> getNewRecordEntriesByClusterIds(final int[] iIds) { final List<ORecordOperation> result = new ArrayList<ORecordOperation>(); if (iIds == null) // RETURN ALL THE RECORDS for (ORecordOperation entry : recordEntries.values()) { - result.add(entry); + if (entry.type == ORecordOperation.CREATED) + result.add(entry); } else // FILTER RECORDS BY ID for (ORecordOperation entry : recordEntries.values()) { for (int id : iIds) { - if (entry.getRecord() != null && entry.getRecord().getIdentity().getClusterId() == id) { + if (entry.getRecord() != null && entry.getRecord().getIdentity().getClusterId() == id + && entry.type == ORecordOperation.CREATED) { result.add(entry); break; }
e63ed754f1483af587dc3372467d2bc58ee8b785
kotlin
rename JetTypeMapper constants--
p
https://github.com/JetBrains/kotlin
diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/ClosureCodegen.java b/compiler/backend/src/org/jetbrains/jet/codegen/ClosureCodegen.java index d9776cd9eec58..265a64f84ca79 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/ClosureCodegen.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/ClosureCodegen.java @@ -78,14 +78,14 @@ private static JvmMethodSignature erasedInvokeSignature(FunctionDescriptor fd) { for (int i = 0; i < paramCount; ++i) { signatureWriter.writeParameterType(JvmMethodParameterKind.VALUE); - signatureWriter.writeAsmType(JetTypeMapper.TYPE_OBJECT, true); + signatureWriter.writeAsmType(JetTypeMapper.OBJECT_TYPE, true); signatureWriter.writeParameterTypeEnd(); } signatureWriter.writeParametersEnd(); signatureWriter.writeReturnType(); - signatureWriter.writeAsmType(JetTypeMapper.TYPE_OBJECT, true); + signatureWriter.writeAsmType(JetTypeMapper.OBJECT_TYPE, true); signatureWriter.writeReturnTypeEnd(); return signatureWriter.makeJvmMethodSignature("invoke"); @@ -242,24 +242,24 @@ private void generateBridge(String className, FunctionDescriptor funDescriptor, final ReceiverDescriptor receiver = funDescriptor.getReceiverParameter(); int count = 1; if (receiver.exists()) { - StackValue.local(count, JetTypeMapper.TYPE_OBJECT).put(JetTypeMapper.TYPE_OBJECT, iv); - StackValue.onStack(JetTypeMapper.TYPE_OBJECT) + StackValue.local(count, JetTypeMapper.OBJECT_TYPE).put(JetTypeMapper.OBJECT_TYPE, iv); + StackValue.onStack(JetTypeMapper.OBJECT_TYPE) .upcast(typeMapper.mapType(receiver.getType(), MapTypeMode.VALUE), iv); count++; } final List<ValueParameterDescriptor> params = funDescriptor.getValueParameters(); for (ValueParameterDescriptor param : params) { - StackValue.local(count, JetTypeMapper.TYPE_OBJECT).put(JetTypeMapper.TYPE_OBJECT, iv); - StackValue.onStack(JetTypeMapper.TYPE_OBJECT) + StackValue.local(count, JetTypeMapper.OBJECT_TYPE).put(JetTypeMapper.OBJECT_TYPE, iv); + StackValue.onStack(JetTypeMapper.OBJECT_TYPE) .upcast(typeMapper.mapType(param.getType(), MapTypeMode.VALUE), iv); count++; } iv.invokevirtual(className, "invoke", delegate.getDescriptor()); - StackValue.onStack(delegate.getReturnType()).put(JetTypeMapper.TYPE_OBJECT, iv); + StackValue.onStack(delegate.getReturnType()).put(JetTypeMapper.OBJECT_TYPE, iv); - iv.areturn(JetTypeMapper.TYPE_OBJECT); + iv.areturn(JetTypeMapper.OBJECT_TYPE); FunctionCodegen.endVisit(mv, "bridge", fun); } @@ -285,7 +285,7 @@ else if (state.getClassBuilderMode() == ClassBuilderMode.FULL) { int k = 1; for (int i = 0; i != argTypes.length; ++i) { - StackValue.local(0, JetTypeMapper.TYPE_OBJECT).put(JetTypeMapper.TYPE_OBJECT, iv); + StackValue.local(0, JetTypeMapper.OBJECT_TYPE).put(JetTypeMapper.OBJECT_TYPE, iv); final Pair<String, Type> nameAndType = args.get(i); final Type type = nameAndType.second; StackValue.local(k, type).put(type, iv); diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/CodegenContext.java b/compiler/backend/src/org/jetbrains/jet/codegen/CodegenContext.java index 429a24f9e9193..2a582b5827639 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/CodegenContext.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/CodegenContext.java @@ -27,7 +27,7 @@ import java.util.HashMap; import java.util.Map; -import static org.jetbrains.jet.codegen.JetTypeMapper.TYPE_OBJECT; +import static org.jetbrains.jet.codegen.JetTypeMapper.OBJECT_TYPE; /* * @author max @@ -169,7 +169,7 @@ public FrameMap prepareFrame(JetTypeMapper mapper) { FrameMap frameMap = new FrameMap(); if (getContextKind() != OwnerKind.NAMESPACE) { - frameMap.enterTemp(TYPE_OBJECT); // 0 slot for this + frameMap.enterTemp(OBJECT_TYPE); // 0 slot for this } CallableDescriptor receiverDescriptor = getReceiverDescriptor(); diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/CodegenContexts.java b/compiler/backend/src/org/jetbrains/jet/codegen/CodegenContexts.java index bd39491830226..619b143948437 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/CodegenContexts.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/CodegenContexts.java @@ -86,7 +86,7 @@ public String toString() { return "ROOT"; } }; - private static final StackValue local1 = StackValue.local(1, JetTypeMapper.TYPE_OBJECT); + private static final StackValue local1 = StackValue.local(1, JetTypeMapper.OBJECT_TYPE); public abstract static class ReceiverContext extends CodegenContext { final CallableDescriptor receiverDescriptor; diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/ConstructorFrameMap.java b/compiler/backend/src/org/jetbrains/jet/codegen/ConstructorFrameMap.java index f2671c4fed38d..0e0a5cadda2c4 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/ConstructorFrameMap.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/ConstructorFrameMap.java @@ -33,10 +33,10 @@ public class ConstructorFrameMap extends FrameMap { private int myOuterThisIndex = -1; public ConstructorFrameMap(CallableMethod callableMethod, @Nullable ConstructorDescriptor descriptor, boolean hasThis0) { - enterTemp(JetTypeMapper.TYPE_OBJECT); // this + enterTemp(JetTypeMapper.OBJECT_TYPE); // this if (descriptor != null) { if (hasThis0) { - myOuterThisIndex = enterTemp(JetTypeMapper.TYPE_OBJECT); // outer class instance + myOuterThisIndex = enterTemp(JetTypeMapper.OBJECT_TYPE); // outer class instance } } @@ -45,7 +45,7 @@ public ConstructorFrameMap(CallableMethod callableMethod, @Nullable ConstructorD if (descriptor != null && (descriptor.getContainingDeclaration().getKind() == ClassKind.ENUM_CLASS || descriptor.getContainingDeclaration().getKind() == ClassKind.ENUM_ENTRY)) { - enterTemp(JetTypeMapper.TYPE_OBJECT); // name + enterTemp(JetTypeMapper.OBJECT_TYPE); // name enterTemp(Type.INT_TYPE); // ordinal } diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/FunctionCodegen.java b/compiler/backend/src/org/jetbrains/jet/codegen/FunctionCodegen.java index 225eccadb7de3..0ccbc84a91e72 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/FunctionCodegen.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/FunctionCodegen.java @@ -245,8 +245,8 @@ else if (functionDescriptor instanceof SimpleFunctionDescriptor) { else if (kind instanceof OwnerKind.DelegateKind) { OwnerKind.DelegateKind dk = (OwnerKind.DelegateKind) kind; InstructionAdapter iv = new InstructionAdapter(mv); - iv.load(0, JetTypeMapper.TYPE_OBJECT); - dk.getDelegate().put(JetTypeMapper.TYPE_OBJECT, iv); + iv.load(0, JetTypeMapper.OBJECT_TYPE); + dk.getDelegate().put(JetTypeMapper.OBJECT_TYPE, iv); for (int i = 0; i < argTypes.length; i++) { Type argType = argTypes[i]; iv.load(i + 1, argType); @@ -495,7 +495,7 @@ private static void generateDefaultImpl( FrameMap frameMap = owner.prepareFrame(state.getInjector().getJetTypeMapper()); if (kind instanceof OwnerKind.StaticDelegateKind) { - frameMap.leaveTemp(JetTypeMapper.TYPE_OBJECT); + frameMap.leaveTemp(JetTypeMapper.OBJECT_TYPE); } ExpressionCodegen codegen = new ExpressionCodegen(mv, frameMap, jvmSignature.getReturnType(), owner, state); @@ -645,15 +645,15 @@ else if (state.getClassBuilderMode() == ClassBuilderMode.FULL) { Type[] argTypes = overridden.getArgumentTypes(); Type[] originalArgTypes = jvmSignature.getArgumentTypes(); InstructionAdapter iv = new InstructionAdapter(mv); - iv.load(0, JetTypeMapper.TYPE_OBJECT); + iv.load(0, JetTypeMapper.OBJECT_TYPE); for (int i = 0, reg = 1; i < argTypes.length; i++) { Type argType = argTypes[i]; iv.load(reg, argType); if (argType.getSort() == Type.OBJECT) { - StackValue.onStack(JetTypeMapper.TYPE_OBJECT).put(originalArgTypes[i], iv); + StackValue.onStack(JetTypeMapper.OBJECT_TYPE).put(originalArgTypes[i], iv); } else if (argType.getSort() == Type.ARRAY) { - StackValue.onStack(JetTypeMapper.ARRAY_GENERIC_TYPE).put(originalArgTypes[i], iv); + StackValue.onStack(JetTypeMapper.JAVA_ARRAY_GENERIC_TYPE).put(originalArgTypes[i], iv); } //noinspection AssignmentToForLoopParameter @@ -699,22 +699,22 @@ else if (state.getClassBuilderMode() == ClassBuilderMode.FULL) { Type[] argTypes = method.getArgumentTypes(); InstructionAdapter iv = new InstructionAdapter(mv); - iv.load(0, JetTypeMapper.TYPE_OBJECT); + iv.load(0, JetTypeMapper.OBJECT_TYPE); for (int i = 0, reg = 1; i < argTypes.length; i++) { Type argType = argTypes[i]; iv.load(reg, argType); if (argType.getSort() == Type.OBJECT) { - StackValue.onStack(JetTypeMapper.TYPE_OBJECT).put(method.getArgumentTypes()[i], iv); + StackValue.onStack(JetTypeMapper.OBJECT_TYPE).put(method.getArgumentTypes()[i], iv); } else if (argType.getSort() == Type.ARRAY) { - StackValue.onStack(JetTypeMapper.ARRAY_GENERIC_TYPE).put(method.getArgumentTypes()[i], iv); + StackValue.onStack(JetTypeMapper.JAVA_ARRAY_GENERIC_TYPE).put(method.getArgumentTypes()[i], iv); } //noinspection AssignmentToForLoopParameter reg += argType.getSize(); } - iv.load(0, JetTypeMapper.TYPE_OBJECT); + iv.load(0, JetTypeMapper.OBJECT_TYPE); field.put(field.type, iv); ClassDescriptor classDescriptor = (ClassDescriptor) overriddenDescriptor.getContainingDeclaration(); String internalName = diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/ImplementationBodyCodegen.java b/compiler/backend/src/org/jetbrains/jet/codegen/ImplementationBodyCodegen.java index ee3011a08ab18..3fe4135ef910c 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/ImplementationBodyCodegen.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/ImplementationBodyCodegen.java @@ -47,7 +47,7 @@ import java.util.*; import static org.jetbrains.asm4.Opcodes.*; -import static org.jetbrains.jet.codegen.JetTypeMapper.TYPE_OBJECT; +import static org.jetbrains.jet.codegen.JetTypeMapper.OBJECT_TYPE; /** * @author max @@ -400,7 +400,7 @@ else if (state.getClassBuilderMode() == ClassBuilderMode.FULL) { InstructionAdapter iv = new InstructionAdapter(mv); - iv.load(0, JetTypeMapper.TYPE_OBJECT); + iv.load(0, JetTypeMapper.OBJECT_TYPE); for (int i = 1, reg = 1; i < argTypes.length; i++) { Type argType = argTypes[i]; iv.load(reg, argType); @@ -437,7 +437,7 @@ else if (state.getClassBuilderMode() == ClassBuilderMode.FULL) { InstructionAdapter iv = new InstructionAdapter(mv); - iv.load(0, JetTypeMapper.TYPE_OBJECT); + iv.load(0, JetTypeMapper.OBJECT_TYPE); if (original.getVisibility() == Visibilities.PRIVATE) { iv.getfield(typeMapper.getOwner(original, OwnerKind.IMPLEMENTATION).getInternalName(), original.getName().getName(), originalMethod.getReturnType().getDescriptor()); @@ -472,7 +472,7 @@ else if (state.getClassBuilderMode() == ClassBuilderMode.FULL) { InstructionAdapter iv = new InstructionAdapter(mv); - iv.load(0, JetTypeMapper.TYPE_OBJECT); + iv.load(0, JetTypeMapper.OBJECT_TYPE); Type[] argTypes = method.getArgumentTypes(); for (int i = 1, reg = 1; i < argTypes.length; i++) { Type argType = argTypes[i]; @@ -701,7 +701,7 @@ else if (superCall instanceof JetDelegatorToSuperClass) { if (closure != null) { int k = hasOuterThis ? 2 : 1; if (closure.captureReceiver != null) { - iv.load(0, JetTypeMapper.TYPE_OBJECT); + iv.load(0, JetTypeMapper.OBJECT_TYPE); final Type asmType = typeMapper.mapType(closure.captureReceiver.getDefaultType(), MapTypeMode.IMPL); iv.load(1, asmType); iv.putfield(typeMapper.mapType(descriptor.getDefaultType(), MapTypeMode.VALUE).getInternalName(), "receiver$0", @@ -715,7 +715,7 @@ else if (superCall instanceof JetDelegatorToSuperClass) { if (sharedVarType == null) { sharedVarType = typeMapper.mapType(((VariableDescriptor) varDescr).getType(), MapTypeMode.VALUE); } - iv.load(0, JetTypeMapper.TYPE_OBJECT); + iv.load(0, JetTypeMapper.OBJECT_TYPE); iv.load(k, StackValue.refType(sharedVarType)); k += StackValue.refType(sharedVarType).getSize(); iv.putfield(typeMapper.mapType(descriptor.getDefaultType(), MapTypeMode.VALUE).getInternalName(), @@ -765,7 +765,7 @@ private void genSuperCallToDelegatorToSuperClass(InstructionAdapter iv) { assert superType != null; ClassDescriptor superClassDescriptor = (ClassDescriptor) superType.getConstructor().getDeclarationDescriptor(); if (typeMapper.hasThis0(superClassDescriptor)) { - iv.load(1, JetTypeMapper.TYPE_OBJECT); + iv.load(1, JetTypeMapper.OBJECT_TYPE); parameterTypes.add(typeMapper.mapType( typeMapper.getClosureAnnotator().getEclosingClassDescriptor(descriptor).getDefaultType(), MapTypeMode.VALUE)); } @@ -778,7 +778,7 @@ private void genSuperCallToDelegatorToSuperClass(InstructionAdapter iv) { private void genSimpleSuperCall(InstructionAdapter iv) { iv.load(0, Type.getType("L" + superClass + ";")); if (descriptor.getKind() == ClassKind.ENUM_CLASS || descriptor.getKind() == ClassKind.ENUM_ENTRY) { - iv.load(1, JetTypeMapper.JL_STRING_TYPE); + iv.load(1, JetTypeMapper.JAVA_STRING_TYPE); iv.load(2, Type.INT_TYPE); iv.invokespecial(superClass, "<init>", "(Ljava/lang/String;I)V"); } @@ -1025,7 +1025,7 @@ else if (state.getClassBuilderMode() == ClassBuilderMode.FULL) { Type[] argTypes = function.getArgumentTypes(); List<Type> originalArgTypes = jvmSignature.getValueParameterTypes(); InstructionAdapter iv = new InstructionAdapter(mv); - iv.load(0, JetTypeMapper.TYPE_OBJECT); + iv.load(0, JetTypeMapper.OBJECT_TYPE); for (int i = 0, reg = 1; i < argTypes.length; i++) { Type argType = argTypes[i]; iv.load(reg, argType); @@ -1065,9 +1065,9 @@ private void generateDelegatorToConstructorCall( ) { ClassDescriptor classDecl = constructorDescriptor.getContainingDeclaration(); - iv.load(0, TYPE_OBJECT); + iv.load(0, OBJECT_TYPE); if (classDecl.getKind() == ClassKind.ENUM_CLASS || classDecl.getKind() == ClassKind.ENUM_ENTRY) { - iv.load(1, JetTypeMapper.TYPE_OBJECT); + iv.load(1, JetTypeMapper.OBJECT_TYPE); iv.load(2, Type.INT_TYPE); } @@ -1183,7 +1183,7 @@ private void initializeEnumConstants(InstructionAdapter iv) { } iv.dup(); iv.putstatic(myAsmType.getInternalName(), enumConstant.getName(), "L" + myAsmType.getInternalName() + ";"); - iv.astore(TYPE_OBJECT); + iv.astore(OBJECT_TYPE); } iv.putstatic(myAsmType.getInternalName(), "$VALUES", arrayAsmType.getDescriptor()); } @@ -1206,7 +1206,7 @@ public static void generateInitializers( Type type = typeMapper.mapType(jetType, MapTypeMode.VALUE); if (skipDefaultValue(propertyDescriptor, value, type)) continue; } - iv.load(0, JetTypeMapper.TYPE_OBJECT); + iv.load(0, JetTypeMapper.OBJECT_TYPE); Type type = codegen.expressionType(initializer); if (jetType.isNullable()) { type = JetTypeMapper.boxType(type); diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/JetTypeMapper.java b/compiler/backend/src/org/jetbrains/jet/codegen/JetTypeMapper.java index 84d2ab5b4e2b5..1e26a784248f2 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/JetTypeMapper.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/JetTypeMapper.java @@ -40,10 +40,10 @@ import org.jetbrains.jet.lang.types.lang.JetStandardLibrary; import org.jetbrains.jet.lang.types.lang.JetStandardLibraryNames; -import javax.annotation.PostConstruct; import javax.inject.Inject; import java.util.ArrayList; import java.util.Collections; +import java.util.Iterator; import java.util.List; import static org.jetbrains.asm4.Opcodes.*; @@ -53,36 +53,35 @@ * @author alex.tkachman */ public class JetTypeMapper { - public static final Type TYPE_OBJECT = Type.getObjectType("java/lang/Object"); - public static final Type TYPE_THROWABLE = Type.getObjectType("java/lang/Throwable"); - public static final Type TYPE_NOTHING = Type.getObjectType("jet/Nothing"); - public static final Type JL_NUMBER_TYPE = Type.getObjectType("java/lang/Number"); - public static final Type JL_STRING_BUILDER = Type.getObjectType("java/lang/StringBuilder"); - public static final Type JL_STRING_TYPE = Type.getObjectType("java/lang/String"); - public static final Type JL_ENUM_TYPE = Type.getObjectType("java/lang/Enum"); - public static final Type JL_CHAR_SEQUENCE_TYPE = Type.getObjectType("java/lang/CharSequence"); - public static final Type JL_COMPARABLE_TYPE = Type.getObjectType("java/lang/Comparable"); - public static final Type JL_ITERABLE_TYPE = Type.getObjectType("java/lang/Iterable"); - public static final Type JL_ITERATOR_TYPE = Type.getObjectType("java/util/Iterator"); - public static final Type JL_CLASS_TYPE = Type.getObjectType("java/lang/Class"); - public static final Type JL_BOOLEAN_TYPE = Type.getObjectType("java/lang/Boolean"); - - public static final Type ARRAY_GENERIC_TYPE = Type.getType(Object[].class); - public static final Type TUPLE0_TYPE = Type.getObjectType("jet/Tuple0"); - - public static final Type TYPE_ITERATOR = Type.getObjectType("jet/Iterator"); - public static final Type TYPE_INT_RANGE = Type.getObjectType("jet/IntRange"); - public static final Type TYPE_SHARED_VAR = Type.getObjectType("jet/runtime/SharedVar$Object"); - public static final Type TYPE_SHARED_INT = Type.getObjectType("jet/runtime/SharedVar$Int"); - public static final Type TYPE_SHARED_DOUBLE = Type.getObjectType("jet/runtime/SharedVar$Double"); - public static final Type TYPE_SHARED_FLOAT = Type.getObjectType("jet/runtime/SharedVar$Float"); - public static final Type TYPE_SHARED_BYTE = Type.getObjectType("jet/runtime/SharedVar$Byte"); - public static final Type TYPE_SHARED_SHORT = Type.getObjectType("jet/runtime/SharedVar$Short"); - public static final Type TYPE_SHARED_CHAR = Type.getObjectType("jet/runtime/SharedVar$Char"); - public static final Type TYPE_SHARED_LONG = Type.getObjectType("jet/runtime/SharedVar$Long"); - public static final Type TYPE_SHARED_BOOLEAN = Type.getObjectType("jet/runtime/SharedVar$Boolean"); - public static final Type TYPE_FUNCTION0 = Type.getObjectType("jet/Function0"); - public static final Type TYPE_FUNCTION1 = Type.getObjectType("jet/Function1"); + public static final Type OBJECT_TYPE = Type.getType(Object.class); + public static final Type JAVA_NUMBER_TYPE = Type.getType(Number.class); + public static final Type JAVA_STRING_BUILDER_TYPE = Type.getType(StringBuilder.class); + public static final Type JAVA_STRING_TYPE = Type.getType(String.class); + public static final Type JAVA_ENUM_TYPE = Type.getType(Enum.class); + public static final Type JAVA_CHAR_SEQUENCE_TYPE = Type.getType(CharSequence.class); + public static final Type JAVA_COMPARABLE_TYPE = Type.getType(Comparable.class); + public static final Type JAVA_THROWABLE_TYPE = Type.getType(Throwable.class); + public static final Type JAVA_ITERABLE_TYPE = Type.getType(Iterable.class); + public static final Type JAVA_ITERATOR_TYPE = Type.getType(Iterator.class); + public static final Type JAVA_CLASS_TYPE = Type.getType(Class.class); + public static final Type JAVA_BOOLEAN_TYPE = Type.getType(Boolean.class); + public static final Type JAVA_ARRAY_GENERIC_TYPE = Type.getType(Object[].class); + + public static final Type JET_NOTHING_TYPE = Type.getObjectType("jet/Nothing"); + public static final Type JET_TUPLE0_TYPE = Type.getObjectType("jet/Tuple0"); + public static final Type JET_FUNCTION0_TYPE = Type.getObjectType("jet/Function0"); + public static final Type JET_FUNCTION1_TYPE = Type.getObjectType("jet/Function1"); + public static final Type JET_ITERATOR_TYPE = Type.getObjectType("jet/Iterator"); + public static final Type JET_INT_RANGE_TYPE = Type.getObjectType("jet/IntRange"); + public static final Type JET_SHARED_VAR_TYPE = Type.getObjectType("jet/runtime/SharedVar$Object"); + public static final Type JET_SHARED_INT_TYPE = Type.getObjectType("jet/runtime/SharedVar$Int"); + public static final Type JET_SHARED_DOUBLE_TYPE = Type.getObjectType("jet/runtime/SharedVar$Double"); + public static final Type JET_SHARED_FLOAT_TYPE = Type.getObjectType("jet/runtime/SharedVar$Float"); + public static final Type JET_SHARED_BYTE_TYPE = Type.getObjectType("jet/runtime/SharedVar$Byte"); + public static final Type JET_SHARED_SHORT_TYPE = Type.getObjectType("jet/runtime/SharedVar$Short"); + public static final Type JET_SHARED_CHAR_TYPE = Type.getObjectType("jet/runtime/SharedVar$Char"); + public static final Type JET_SHARED_LONG_TYPE = Type.getObjectType("jet/runtime/SharedVar$Long"); + public static final Type JET_SHARED_BOOLEAN_TYPE = Type.getObjectType("jet/runtime/SharedVar$Boolean"); public BindingContext bindingContext; private ClosureAnnotator closureAnnotator; @@ -254,7 +253,7 @@ else if (jetType.equals(JetStandardClasses.getNothingType())) { if (signatureVisitor != null) { signatureVisitor.writeNothing(true); } - return TYPE_OBJECT; + return OBJECT_TYPE; } return mapType(jetType, signatureVisitor, MapTypeMode.VALUE); } @@ -413,7 +412,7 @@ else if (kind == MapTypeMode.IMPL) { r = Type.getType("[" + boxType(mapType(memberType, kind)).getDescriptor()); } else { - r = ARRAY_GENERIC_TYPE; + r = JAVA_ARRAY_GENERIC_TYPE; } checkValidType(r); return r; diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/KotlinToJavaTypesMap.java b/compiler/backend/src/org/jetbrains/jet/codegen/KotlinToJavaTypesMap.java index c0d653b31c14d..73421141c3390 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/KotlinToJavaTypesMap.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/KotlinToJavaTypesMap.java @@ -25,7 +25,6 @@ import org.jetbrains.jet.lang.resolve.DescriptorUtils; import org.jetbrains.jet.lang.resolve.java.JvmClassName; import org.jetbrains.jet.lang.resolve.java.JvmPrimitiveType; -import org.jetbrains.jet.lang.resolve.name.FqNameBase; import org.jetbrains.jet.lang.resolve.name.FqNameUnsafe; import org.jetbrains.jet.lang.types.JetType; import org.jetbrains.jet.lang.types.lang.PrimitiveType; @@ -83,7 +82,7 @@ private void registerNullable(@NotNull ClassName className, @NotNull Type nullab } public void init() { - register(NOTHING, TYPE_NOTHING); + register(NOTHING, JET_NOTHING_TYPE); for (JvmPrimitiveType jvmPrimitiveType : JvmPrimitiveType.values()) { ClassName className = jvmPrimitiveType.getPrimitiveType().getClassName(); @@ -92,17 +91,17 @@ public void init() { registerNullable(className, jvmPrimitiveType.getWrapper().getAsmType()); } - register(ANY, TYPE_OBJECT); - register(NUMBER, JL_NUMBER_TYPE); - register(STRING, JL_STRING_TYPE); - register(CHAR_SEQUENCE, JL_CHAR_SEQUENCE_TYPE); - register(THROWABLE, TYPE_THROWABLE); - register(COMPARABLE, JL_COMPARABLE_TYPE); - register(ENUM, JL_ENUM_TYPE); - register(ITERABLE, JL_ITERABLE_TYPE); - register(ITERATOR, JL_ITERATOR_TYPE); - register(MUTABLE_ITERABLE, JL_ITERABLE_TYPE); - register(MUTABLE_ITERATOR, JL_ITERATOR_TYPE); + register(ANY, OBJECT_TYPE); + register(NUMBER, JAVA_NUMBER_TYPE); + register(STRING, JAVA_STRING_TYPE); + register(CHAR_SEQUENCE, JAVA_CHAR_SEQUENCE_TYPE); + register(THROWABLE, JAVA_THROWABLE_TYPE); + register(COMPARABLE, JAVA_COMPARABLE_TYPE); + register(ENUM, JAVA_ENUM_TYPE); + register(ITERABLE, JAVA_ITERABLE_TYPE); + register(ITERATOR, JAVA_ITERATOR_TYPE); + register(MUTABLE_ITERABLE, JAVA_ITERABLE_TYPE); + register(MUTABLE_ITERATOR, JAVA_ITERATOR_TYPE); for (JvmPrimitiveType jvmPrimitiveType : JvmPrimitiveType.values()) { PrimitiveType primitiveType = jvmPrimitiveType.getPrimitiveType(); diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/PropertyCodegen.java b/compiler/backend/src/org/jetbrains/jet/codegen/PropertyCodegen.java index dad268eb5f8f6..c2b8229baae89 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/PropertyCodegen.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/PropertyCodegen.java @@ -215,7 +215,7 @@ public void generateDefaultGetter(PropertyDescriptor propertyDescriptor, int fla else { InstructionAdapter iv = new InstructionAdapter(mv); if (kind != OwnerKind.NAMESPACE) { - iv.load(0, JetTypeMapper.TYPE_OBJECT); + iv.load(0, JetTypeMapper.OBJECT_TYPE); } final Type type = state.getInjector().getJetTypeMapper().mapType(propertyDescriptor.getType(), MapTypeMode.VALUE); @@ -225,7 +225,7 @@ public void generateDefaultGetter(PropertyDescriptor propertyDescriptor, int fla if (kind instanceof OwnerKind.DelegateKind) { OwnerKind.DelegateKind dk = (OwnerKind.DelegateKind) kind; - dk.getDelegate().put(JetTypeMapper.TYPE_OBJECT, iv); + dk.getDelegate().put(JetTypeMapper.OBJECT_TYPE, iv); iv.invokeinterface(dk.getOwnerClass(), getterName, descriptor); } else { @@ -320,7 +320,7 @@ public void generateDefaultSetter(PropertyDescriptor propertyDescriptor, int fla final Type type = state.getInjector().getJetTypeMapper().mapType(propertyDescriptor.getType(), MapTypeMode.VALUE); int paramCode = 0; if (kind != OwnerKind.NAMESPACE) { - iv.load(0, JetTypeMapper.TYPE_OBJECT); + iv.load(0, JetTypeMapper.OBJECT_TYPE); paramCode = 1; } @@ -330,8 +330,8 @@ public void generateDefaultSetter(PropertyDescriptor propertyDescriptor, int fla if (kind instanceof OwnerKind.DelegateKind) { OwnerKind.DelegateKind dk = (OwnerKind.DelegateKind) kind; - iv.load(0, JetTypeMapper.TYPE_OBJECT); - dk.getDelegate().put(JetTypeMapper.TYPE_OBJECT, iv); + iv.load(0, JetTypeMapper.OBJECT_TYPE); + dk.getDelegate().put(JetTypeMapper.OBJECT_TYPE, iv); iv.load(paramCode, type); iv.invokeinterface(dk.getOwnerClass(), setterName(propertyDescriptor.getName()), descriptor); diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/ScriptCodegen.java b/compiler/backend/src/org/jetbrains/jet/codegen/ScriptCodegen.java index 9b0b2f829b4ff..5bc78204c0001 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/ScriptCodegen.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/ScriptCodegen.java @@ -38,7 +38,7 @@ import javax.inject.Inject; import java.util.List; -import static org.jetbrains.jet.codegen.JetTypeMapper.TYPE_OBJECT; +import static org.jetbrains.jet.codegen.JetTypeMapper.OBJECT_TYPE; /** * @author Stepan Koltsov @@ -159,7 +159,7 @@ private void genConstructor( FrameMap frameMap = context.prepareFrame(jetTypeMapper); for (ScriptDescriptor importedScript : importedScripts) { - frameMap.enter(importedScript, TYPE_OBJECT); + frameMap.enter(importedScript, OBJECT_TYPE); } Type[] argTypes = jvmSignature.getAsmMethod().getArgumentTypes(); diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/StackValue.java b/compiler/backend/src/org/jetbrains/jet/codegen/StackValue.java index 4abae0902b253..de80b0316f248 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/StackValue.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/StackValue.java @@ -35,7 +35,7 @@ import java.util.List; -import static org.jetbrains.jet.codegen.JetTypeMapper.TYPE_OBJECT; +import static org.jetbrains.jet.codegen.JetTypeMapper.OBJECT_TYPE; /** * @author yole @@ -267,15 +267,15 @@ else if (toType == Type.DOUBLE_TYPE) { v.iconst(0); } } - else if (toType.equals(JetTypeMapper.TUPLE0_TYPE) && !fromType.equals(JetTypeMapper.TUPLE0_TYPE)) { + else if (toType.equals(JetTypeMapper.JET_TUPLE0_TYPE) && !fromType.equals(JetTypeMapper.JET_TUPLE0_TYPE)) { pop(fromType, v); putTuple0Instance(v); } - else if (toType.getSort() == Type.OBJECT && fromType.equals(TYPE_OBJECT) || toType.getSort() == Type.ARRAY) { + else if (toType.getSort() == Type.OBJECT && fromType.equals(OBJECT_TYPE) || toType.getSort() == Type.ARRAY) { v.checkcast(toType); } else if (toType.getSort() == Type.OBJECT) { - if (fromType.getSort() == Type.OBJECT && !toType.equals(TYPE_OBJECT)) { + if (fromType.getSort() == Type.OBJECT && !toType.equals(OBJECT_TYPE)) { v.checkcast(toType); } else { @@ -283,7 +283,7 @@ else if (toType.getSort() == Type.OBJECT) { } } else if (fromType.getSort() == Type.OBJECT && toType.getSort() <= Type.DOUBLE) { - if (fromType.equals(TYPE_OBJECT)) { + if (fromType.equals(OBJECT_TYPE)) { if (toType.getSort() == Type.BOOLEAN) { v.checkcast(JvmPrimitiveType.BOOLEAN.getWrapper().getAsmType()); } @@ -291,7 +291,7 @@ else if (toType.getSort() == Type.CHAR) { v.checkcast(JvmPrimitiveType.CHAR.getWrapper().getAsmType()); } else { - v.checkcast(JetTypeMapper.JL_NUMBER_TYPE); + v.checkcast(JetTypeMapper.JAVA_NUMBER_TYPE); } } unbox(toType, v); @@ -477,11 +477,11 @@ public void put(Type type, InstructionAdapter v) { if (type == Type.VOID_TYPE) { return; } - if (type.equals(JetTypeMapper.TUPLE0_TYPE)) { + if (type.equals(JetTypeMapper.JET_TUPLE0_TYPE)) { putTuple0Instance(v); return; } - if (type != Type.BOOLEAN_TYPE && !type.equals(TYPE_OBJECT) && !type.equals(JetTypeMapper.JL_BOOLEAN_TYPE)) { + if (type != Type.BOOLEAN_TYPE && !type.equals(OBJECT_TYPE) && !type.equals(JetTypeMapper.JAVA_BOOLEAN_TYPE)) { throw new UnsupportedOperationException("don't know how to put a compare as a non-boolean type " + type); } putAsBoolean(v); @@ -570,7 +570,7 @@ public void put(Type type, InstructionAdapter v) { myOperand.put(type, v); // the operand will remove itself from the stack if needed return; } - if (type != Type.BOOLEAN_TYPE && !type.equals(TYPE_OBJECT) && !type.equals(JetTypeMapper.JL_BOOLEAN_TYPE)) { + if (type != Type.BOOLEAN_TYPE && !type.equals(OBJECT_TYPE) && !type.equals(JetTypeMapper.JAVA_BOOLEAN_TYPE)) { throw new UnsupportedOperationException("don't know how to put a compare as a non-boolean type"); } putAsBoolean(v); @@ -718,10 +718,10 @@ public void dupReceiver(InstructionAdapter v) { int firstTypeParamIndex = -1; for (int i = typeParameters.size() - 1; i >= 0; --i) { if (typeParameters.get(i).isReified()) { - frame.enterTemp(TYPE_OBJECT); + frame.enterTemp(OBJECT_TYPE); lastIndex++; size++; - v.store(firstTypeParamIndex = lastIndex - 1, TYPE_OBJECT); + v.store(firstTypeParamIndex = lastIndex - 1, OBJECT_TYPE); } } @@ -739,10 +739,10 @@ public void dupReceiver(InstructionAdapter v) { ReceiverDescriptor thisObject = resolvedGetCall.getThisObject(); int thisIndex = -1; if (thisObject.exists()) { - frame.enterTemp(TYPE_OBJECT); + frame.enterTemp(OBJECT_TYPE); lastIndex++; size++; - v.store((thisIndex = lastIndex) - 1, TYPE_OBJECT); + v.store((thisIndex = lastIndex) - 1, OBJECT_TYPE); } // for setter @@ -756,7 +756,7 @@ public void dupReceiver(InstructionAdapter v) { } else { realReceiverIndex = thisIndex; - realReceiverType = TYPE_OBJECT; + realReceiverType = OBJECT_TYPE; } } else { @@ -771,7 +771,7 @@ public void dupReceiver(InstructionAdapter v) { if (resolvedSetCall.getThisObject().exists()) { if (resolvedSetCall.getReceiverArgument().exists()) { - codegen.generateFromResolvedCall(resolvedSetCall.getThisObject(), TYPE_OBJECT); + codegen.generateFromResolvedCall(resolvedSetCall.getThisObject(), OBJECT_TYPE); } v.load(realReceiverIndex - realReceiverType.getSize(), realReceiverType); } @@ -794,7 +794,7 @@ public void dupReceiver(InstructionAdapter v) { // restoring original if (thisIndex != -1) { - v.load(thisIndex - 1, TYPE_OBJECT); + v.load(thisIndex - 1, OBJECT_TYPE); } if (receiverIndex != -1) { @@ -806,7 +806,7 @@ public void dupReceiver(InstructionAdapter v) { index = firstTypeParamIndex; for (int i = 0; i != typeParameters.size(); ++i) { if (typeParameters.get(i).isReified()) { - v.load(index - 1, TYPE_OBJECT); + v.load(index - 1, OBJECT_TYPE); index--; } } @@ -821,7 +821,7 @@ public void dupReceiver(InstructionAdapter v) { } for (int i = 0; i < size; i++) { - frame.leaveTemp(TYPE_OBJECT); + frame.leaveTemp(OBJECT_TYPE); } } } @@ -1022,7 +1022,7 @@ public int getIndex() { @Override public void put(Type type, InstructionAdapter v) { - v.load(index, TYPE_OBJECT); + v.load(index, OBJECT_TYPE); Type refType = refType(this.type); Type sharedType = sharedTypeForType(this.type); v.visitFieldInsn(Opcodes.GETFIELD, sharedType.getInternalName(), "ref", refType.getDescriptor()); @@ -1030,13 +1030,13 @@ public void put(Type type, InstructionAdapter v) { coerce(this.type, type, v); if (isReleaseOnPut) { v.aconst(null); - v.store(index, TYPE_OBJECT); + v.store(index, OBJECT_TYPE); } } @Override public void store(Type topOfStackType, InstructionAdapter v) { - v.load(index, TYPE_OBJECT); + v.load(index, OBJECT_TYPE); v.swap(); Type refType = refType(this.type); Type sharedType = sharedTypeForType(this.type); @@ -1048,31 +1048,31 @@ public static Type sharedTypeForType(Type type) { switch (type.getSort()) { case Type.OBJECT: case Type.ARRAY: - return JetTypeMapper.TYPE_SHARED_VAR; + return JetTypeMapper.JET_SHARED_VAR_TYPE; case Type.BYTE: - return JetTypeMapper.TYPE_SHARED_BYTE; + return JetTypeMapper.JET_SHARED_BYTE_TYPE; case Type.SHORT: - return JetTypeMapper.TYPE_SHARED_SHORT; + return JetTypeMapper.JET_SHARED_SHORT_TYPE; case Type.CHAR: - return JetTypeMapper.TYPE_SHARED_CHAR; + return JetTypeMapper.JET_SHARED_CHAR_TYPE; case Type.INT: - return JetTypeMapper.TYPE_SHARED_INT; + return JetTypeMapper.JET_SHARED_INT_TYPE; case Type.LONG: - return JetTypeMapper.TYPE_SHARED_LONG; + return JetTypeMapper.JET_SHARED_LONG_TYPE; case Type.BOOLEAN: - return JetTypeMapper.TYPE_SHARED_BOOLEAN; + return JetTypeMapper.JET_SHARED_BOOLEAN_TYPE; case Type.FLOAT: - return JetTypeMapper.TYPE_SHARED_FLOAT; + return JetTypeMapper.JET_SHARED_FLOAT_TYPE; case Type.DOUBLE: - return JetTypeMapper.TYPE_SHARED_DOUBLE; + return JetTypeMapper.JET_SHARED_DOUBLE_TYPE; default: throw new UnsupportedOperationException(); @@ -1081,7 +1081,7 @@ public static Type sharedTypeForType(Type type) { public static Type refType(Type type) { if (type.getSort() == Type.OBJECT || type.getSort() == Type.ARRAY) { - return TYPE_OBJECT; + return OBJECT_TYPE; } return type; @@ -1141,7 +1141,7 @@ public void put(Type type, InstructionAdapter v) { @Override public void store(Type topOfStackType, InstructionAdapter v) { - prefix.put(TYPE_OBJECT, v); + prefix.put(OBJECT_TYPE, v); suffix.store(topOfStackType, v); } } @@ -1151,7 +1151,7 @@ private static class ThisOuter extends StackValue { private final ClassDescriptor descriptor; public ThisOuter(ExpressionCodegen codegen, ClassDescriptor descriptor) { - super(TYPE_OBJECT); + super(OBJECT_TYPE); this.codegen = codegen; this.descriptor = descriptor; } diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ArrayGet.java b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ArrayGet.java index d5aa57215dfe2..641393cf443e7 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ArrayGet.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ArrayGet.java @@ -42,7 +42,7 @@ public StackValue generate( StackValue receiver, @NotNull GenerationState state ) { - receiver.put(JetTypeMapper.TYPE_OBJECT, v); + receiver.put(JetTypeMapper.OBJECT_TYPE, v); Type type = JetTypeMapper.correctElementType(receiver.type); codegen.gen(arguments.get(0), Type.INT_TYPE); diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ArrayIndices.java b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ArrayIndices.java index 27ba2ad13cb9d..0342c1154de81 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ArrayIndices.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ArrayIndices.java @@ -39,9 +39,9 @@ public StackValue generate( StackValue receiver, @NotNull GenerationState state ) { - receiver.put(JetTypeMapper.TYPE_OBJECT, v); + receiver.put(JetTypeMapper.OBJECT_TYPE, v); v.arraylength(); v.invokestatic("jet/IntRange", "count", "(I)Ljet/IntRange;"); - return StackValue.onStack(JetTypeMapper.TYPE_INT_RANGE); + return StackValue.onStack(JetTypeMapper.JET_INT_RANGE_TYPE); } } diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ArrayIterator.java b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ArrayIterator.java index 12d5c00c57bac..ec6871783f657 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ArrayIterator.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ArrayIterator.java @@ -50,7 +50,7 @@ public StackValue generate( StackValue receiver, @NotNull GenerationState state ) { - receiver.put(JetTypeMapper.TYPE_OBJECT, v); + receiver.put(JetTypeMapper.OBJECT_TYPE, v); JetCallExpression call = (JetCallExpression) element; FunctionDescriptor funDescriptor = (FunctionDescriptor) codegen.getBindingContext() .get(BindingContext.REFERENCE_TARGET, (JetSimpleNameExpression) call.getCalleeExpression()); @@ -58,7 +58,7 @@ public StackValue generate( ClassDescriptor containingDeclaration = (ClassDescriptor) funDescriptor.getContainingDeclaration().getOriginal(); if (JetStandardLibraryNames.ARRAY.is(containingDeclaration)) { v.invokestatic("jet/runtime/ArrayIterator", "iterator", "([Ljava/lang/Object;)Ljava/util/Iterator;"); - return StackValue.onStack(JetTypeMapper.TYPE_ITERATOR); + return StackValue.onStack(JetTypeMapper.JET_ITERATOR_TYPE); } else { for (JvmPrimitiveType jvmPrimitiveType : JvmPrimitiveType.values()) { diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ArraySet.java b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ArraySet.java index fbb246db59a86..c6ce6e15fbb91 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ArraySet.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ArraySet.java @@ -42,7 +42,7 @@ public StackValue generate( StackValue receiver, @NotNull GenerationState state ) { - receiver.put(JetTypeMapper.TYPE_OBJECT, v); + receiver.put(JetTypeMapper.OBJECT_TYPE, v); Type type = JetTypeMapper.correctElementType(receiver.type); codegen.gen(arguments.get(0), Type.INT_TYPE); diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ArraySize.java b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ArraySize.java index 8b77a3f991505..27cac29da8334 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ArraySize.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ArraySize.java @@ -42,7 +42,7 @@ public StackValue generate( StackValue receiver, @NotNull GenerationState state ) { - receiver.put(JetTypeMapper.TYPE_OBJECT, v); + receiver.put(JetTypeMapper.OBJECT_TYPE, v); v.arraylength(); return StackValue.onStack(Type.INT_TYPE); diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/Concat.java b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/Concat.java index c115466b72402..6496a7e45a7fb 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/Concat.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/Concat.java @@ -48,7 +48,7 @@ public StackValue generate( codegen.invokeAppend(arguments.get(1)); } else { // LHS.plus(RHS) - receiver.put(JetTypeMapper.TYPE_OBJECT, v); + receiver.put(JetTypeMapper.OBJECT_TYPE, v); codegen.generateStringBuilderConstructor(); v.swap(); // StringBuilder LHS codegen.invokeAppendMethod(expectedType); // StringBuilder(LHS) @@ -56,7 +56,7 @@ public StackValue generate( } v.invokevirtual("java/lang/StringBuilder", "toString", "()Ljava/lang/String;"); - StackValue.onStack(JetTypeMapper.JL_STRING_TYPE).put(expectedType, v); + StackValue.onStack(JetTypeMapper.JAVA_STRING_TYPE).put(expectedType, v); return StackValue.onStack(expectedType); } } diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/EnumName.java b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/EnumName.java index a5cdeec1e7973..4dc5c75640be0 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/EnumName.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/EnumName.java @@ -40,9 +40,9 @@ public StackValue generate( StackValue receiver, @NotNull GenerationState state ) { - receiver.put(JetTypeMapper.TYPE_OBJECT, v); + receiver.put(JetTypeMapper.OBJECT_TYPE, v); v.invokevirtual("java/lang/Enum", "name", "()Ljava/lang/String;"); - StackValue.onStack(JetTypeMapper.JL_STRING_TYPE).put(expectedType, v); + StackValue.onStack(JetTypeMapper.JAVA_STRING_TYPE).put(expectedType, v); return StackValue.onStack(expectedType); } } diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/EnumOrdinal.java b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/EnumOrdinal.java index 48985a78d6252..77d5827f0acf9 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/EnumOrdinal.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/EnumOrdinal.java @@ -40,7 +40,7 @@ public StackValue generate( StackValue receiver, @NotNull GenerationState state ) { - receiver.put(JetTypeMapper.TYPE_OBJECT, v); + receiver.put(JetTypeMapper.OBJECT_TYPE, v); v.invokevirtual("java/lang/Enum", "ordinal", "()I"); StackValue.onStack(Type.INT_TYPE).put(expectedType, v); return StackValue.onStack(expectedType); diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/EnumValueOf.java b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/EnumValueOf.java index 94eef1995505a..8cc1eee6fa0ca 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/EnumValueOf.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/EnumValueOf.java @@ -50,7 +50,7 @@ public StackValue generate( Type type = state.getInjector().getJetTypeMapper().mapType( returnType, MapTypeMode.VALUE); assert arguments != null; - codegen.gen(arguments.get(0), JetTypeMapper.JL_STRING_TYPE); + codegen.gen(arguments.get(0), JetTypeMapper.JAVA_STRING_TYPE); v.invokestatic(type.getInternalName(), "valueOf", "(Ljava/lang/String;)" + type.getDescriptor()); StackValue.onStack(type).put(expectedType, v); return StackValue.onStack(expectedType); diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/Equals.java b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/Equals.java index 54b5287e5cbe9..99f0eeaab6cfa 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/Equals.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/Equals.java @@ -50,7 +50,7 @@ public StackValue generate( boolean leftNullable = true; JetExpression rightExpr; if (element instanceof JetCallExpression) { - receiver.put(JetTypeMapper.TYPE_OBJECT, v); + receiver.put(JetTypeMapper.OBJECT_TYPE, v); JetCallExpression jetCallExpression = (JetCallExpression) element; JetExpression calleeExpression = jetCallExpression.getCalleeExpression(); if (calleeExpression != null) { @@ -66,16 +66,16 @@ public StackValue generate( JetType leftType = codegen.getBindingContext().get(BindingContext.EXPRESSION_TYPE, leftExpr); assert leftType != null; leftNullable = leftType.isNullable(); - codegen.gen(leftExpr).put(JetTypeMapper.TYPE_OBJECT, v); + codegen.gen(leftExpr).put(JetTypeMapper.OBJECT_TYPE, v); rightExpr = arguments.get(1); } JetType rightType = codegen.getBindingContext().get(BindingContext.EXPRESSION_TYPE, rightExpr); - codegen.gen(rightExpr).put(JetTypeMapper.TYPE_OBJECT, v); + codegen.gen(rightExpr).put(JetTypeMapper.OBJECT_TYPE, v); assert rightType != null; return codegen - .generateEqualsForExpressionsOnStack(JetTokens.EQEQ, JetTypeMapper.TYPE_OBJECT, JetTypeMapper.TYPE_OBJECT, leftNullable, + .generateEqualsForExpressionsOnStack(JetTokens.EQEQ, JetTypeMapper.OBJECT_TYPE, JetTypeMapper.OBJECT_TYPE, leftNullable, rightType.isNullable()); } } diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/HashCode.java b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/HashCode.java index e4c97194e3fa7..d12a0c7eb092d 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/HashCode.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/HashCode.java @@ -44,7 +44,7 @@ public StackValue generate( StackValue receiver, @NotNull GenerationState state ) { - receiver.put(JetTypeMapper.TYPE_OBJECT, v); + receiver.put(JetTypeMapper.OBJECT_TYPE, v); v.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/Object", "hashCode", "()I"); return StackValue.onStack(Type.INT_TYPE); } diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/IdentityEquals.java b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/IdentityEquals.java index 092c007c52a09..20c73524fcf07 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/IdentityEquals.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/IdentityEquals.java @@ -46,15 +46,15 @@ public StackValue generate( @NotNull GenerationState state ) { if (element instanceof JetCallExpression) { - receiver.put(JetTypeMapper.TYPE_OBJECT, v); - codegen.gen(arguments.get(0)).put(JetTypeMapper.TYPE_OBJECT, v); + receiver.put(JetTypeMapper.OBJECT_TYPE, v); + codegen.gen(arguments.get(0)).put(JetTypeMapper.OBJECT_TYPE, v); } else { assert element instanceof JetBinaryExpression; JetBinaryExpression e = (JetBinaryExpression) element; - codegen.gen(e.getLeft()).put(JetTypeMapper.TYPE_OBJECT, v); - codegen.gen(e.getRight()).put(JetTypeMapper.TYPE_OBJECT, v); + codegen.gen(e.getLeft()).put(JetTypeMapper.OBJECT_TYPE, v); + codegen.gen(e.getRight()).put(JetTypeMapper.OBJECT_TYPE, v); } - return StackValue.cmp(JetTokens.EQEQEQ, JetTypeMapper.TYPE_OBJECT); + return StackValue.cmp(JetTokens.EQEQEQ, JetTypeMapper.OBJECT_TYPE); } } diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/IteratorNext.java b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/IteratorNext.java index 2d4e72aa8cc9b..5b0f99ccd9ed0 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/IteratorNext.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/IteratorNext.java @@ -70,7 +70,7 @@ else if (expectedType == Type.DOUBLE_TYPE) { else { throw new UnsupportedOperationException(); } - receiver.put(JetTypeMapper.TYPE_OBJECT, v); + receiver.put(JetTypeMapper.OBJECT_TYPE, v); v.invokevirtual("jet/" + name + "Iterator", "next" + name, "()" + expectedType.getDescriptor()); return StackValue.onStack(expectedType); } diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/JavaClassFunction.java b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/JavaClassFunction.java index cadb011d4a5aa..93c361912d7eb 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/JavaClassFunction.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/JavaClassFunction.java @@ -57,6 +57,6 @@ public StackValue generate( else { v.aconst(type); } - return StackValue.onStack(JetTypeMapper.JL_CLASS_TYPE); + return StackValue.onStack(JetTypeMapper.JAVA_CLASS_TYPE); } } diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/JavaClassProperty.java b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/JavaClassProperty.java index 419c65374c5c4..42da8af0975ac 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/JavaClassProperty.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/JavaClassProperty.java @@ -53,6 +53,6 @@ public StackValue generate( else { v.invokevirtual("java/lang/Object", "getClass", "()Ljava/lang/Class;"); } - return StackValue.onStack(JetTypeMapper.JL_CLASS_TYPE); + return StackValue.onStack(JetTypeMapper.JAVA_CLASS_TYPE); } } diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/StringGetChar.java b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/StringGetChar.java index d0d3874bf4c3b..08b5db7e31440 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/StringGetChar.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/StringGetChar.java @@ -43,7 +43,7 @@ public StackValue generate( @NotNull GenerationState state ) { if (receiver != null) { - receiver.put(JetTypeMapper.TYPE_OBJECT, v); + receiver.put(JetTypeMapper.OBJECT_TYPE, v); } if (arguments != null) { codegen.gen(arguments.get(0)).put(Type.INT_TYPE, v); diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/StringLength.java b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/StringLength.java index a7183c4ea3391..5504c0db82131 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/StringLength.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/StringLength.java @@ -42,7 +42,7 @@ public StackValue generate( StackValue receiver, @NotNull GenerationState state ) { - receiver.put(JetTypeMapper.TYPE_OBJECT, v); + receiver.put(JetTypeMapper.OBJECT_TYPE, v); v.invokeinterface("java/lang/CharSequence", "length", "()I"); return StackValue.onStack(Type.INT_TYPE); } diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/StringPlus.java b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/StringPlus.java index c889a9305988b..114731920546d 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/StringPlus.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/StringPlus.java @@ -43,14 +43,14 @@ public StackValue generate( @NotNull GenerationState state ) { if (receiver == null || receiver == StackValue.none()) { - codegen.gen(arguments.get(0)).put(JetTypeMapper.JL_STRING_TYPE, v); - codegen.gen(arguments.get(1)).put(JetTypeMapper.TYPE_OBJECT, v); + codegen.gen(arguments.get(0)).put(JetTypeMapper.JAVA_STRING_TYPE, v); + codegen.gen(arguments.get(1)).put(JetTypeMapper.OBJECT_TYPE, v); } else { - receiver.put(JetTypeMapper.JL_STRING_TYPE, v); - codegen.gen(arguments.get(0)).put(JetTypeMapper.TYPE_OBJECT, v); + receiver.put(JetTypeMapper.JAVA_STRING_TYPE, v); + codegen.gen(arguments.get(0)).put(JetTypeMapper.OBJECT_TYPE, v); } v.invokestatic("jet/runtime/Intrinsics", "stringPlus", "(Ljava/lang/String;Ljava/lang/Object;)Ljava/lang/String;"); - return StackValue.onStack(JetTypeMapper.JL_STRING_TYPE); + return StackValue.onStack(JetTypeMapper.JAVA_STRING_TYPE); } } diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/StupidSync.java b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/StupidSync.java index 618543e664563..d0b72fa352e6d 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/StupidSync.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/StupidSync.java @@ -42,9 +42,9 @@ public StackValue generate( StackValue receiver, @NotNull GenerationState state ) { - codegen.pushMethodArguments((JetCallExpression) element, Arrays.asList(JetTypeMapper.TYPE_OBJECT, JetTypeMapper.TYPE_FUNCTION0)); + codegen.pushMethodArguments((JetCallExpression) element, Arrays.asList(JetTypeMapper.OBJECT_TYPE, JetTypeMapper.JET_FUNCTION0_TYPE)); v.invokestatic("jet/runtime/Intrinsics", "stupidSync", "(Ljava/lang/Object;Ljet/Function0;)Ljava/lang/Object;"); - StackValue.onStack(JetTypeMapper.TYPE_OBJECT).put(expectedType, v); + StackValue.onStack(JetTypeMapper.OBJECT_TYPE).put(expectedType, v); return StackValue.onStack(expectedType); } } diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ToString.java b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ToString.java index 34b1507b67486..601255291db28 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ToString.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/ToString.java @@ -42,8 +42,8 @@ public StackValue generate( StackValue receiver, @NotNull GenerationState state ) { - receiver.put(JetTypeMapper.TYPE_OBJECT, v); + receiver.put(JetTypeMapper.OBJECT_TYPE, v); v.invokestatic("java/lang/String", "valueOf", "(Ljava/lang/Object;)Ljava/lang/String;"); - return StackValue.onStack(JetTypeMapper.JL_STRING_TYPE); + return StackValue.onStack(JetTypeMapper.JAVA_STRING_TYPE); } } diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/signature/BothSignatureWriter.java b/compiler/backend/src/org/jetbrains/jet/codegen/signature/BothSignatureWriter.java index d08c6e2d61011..142a0c0bc0230 100644 --- a/compiler/backend/src/org/jetbrains/jet/codegen/signature/BothSignatureWriter.java +++ b/compiler/backend/src/org/jetbrains/jet/codegen/signature/BothSignatureWriter.java @@ -196,7 +196,7 @@ public void writeNothing(boolean nullable) { jetSignatureWriter.visitClassType("jet/Nothing", nullable, false); jetSignatureWriter.visitEnd(); if (nullable) { - writeAsmType0(JetTypeMapper.TYPE_OBJECT); + writeAsmType0(JetTypeMapper.OBJECT_TYPE); } else { writeAsmType0(Type.VOID_TYPE);
961455cd9d5a09f99612eb6a0edb575b9c308b82
hbase
[jira] [HBASE-5074] Support checksums in HBase- block cache--Author: Dhruba--Summary:-HFile is enhanced to store a checksum for each block. HDFS checksum verification-is avoided while reading data into the block cache. On a checksum verification-failure, we retry the file system read request with hdfs checksums switched on-(thanks Todd).--I have a benchmark that shows that it reduces iops on the disk by about 40%. In-this experiment, the entire memory on the regionserver is allocated to the-regionserver's jvm and the OS buffer cache size is negligible. I also measured-negligible (<5%) additional cpu usage while using hbase-level checksums.--The salient points of this patch:--1. Each hfile's trailer used to have a 4 byte version number. I enhanced this so-that these 4 bytes can be interpreted as a (major version number, minor-version). Pre-existing hfiles have a minor version of 0. The new hfile format-has a minor version of 1 (thanks Mikhail). The hfile major version remains-unchanged at 2. The reason I did not introduce a new major version number is-because the code changes needed to store/read checksums do not differ much from-existing V2 writers/readers.--2. Introduced a HFileSystem object which is a encapsulates the FileSystem-objects needed to access data from hfiles and hlogs. HDFS FileSystem objects-already had the ability to switch off checksum verifications for reads.--3. The majority of the code changes are located in hbase.io.hfie package. The-retry of a read on an initial checksum failure occurs inside the hbase.io.hfile-package itself. The code changes to hbase.regionserver package are minor.--4. The format of a hfileblock is the header followed by the data followed by the-checksum(s). Each 16 K (configurable) size of data has a 4 byte checksum. The-hfileblock header has two additional fields: a 4 byte value to store the-bytesPerChecksum and a 4 byte value to store the size of the user data-(excluding the checksum data). This is well explained in the associated-javadocs.--5. I added a test to test backward compatibility. I will be writing more unit-tests that triggers checksum verification failures aggressively. I have left a-few redundant log messages in the code (just for easier debugging) and will-remove them in later stage of this patch. I will also be adding metrics on-number of checksum verification failures/success in a later version of this-diff.--6. By default, hbase-level checksums are switched on and hdfs level checksums-are switched off for hfile-reads. No changes to Hlog code path here.--Test Plan: The default setting is to switch on hbase checksums for hfile-reads,-thus all existing tests actually validate the new code pieces. I will be writing-more unit tests for triggering checksum verification failures.--Reviewers: mbautin--Reviewed By: mbautin--CC: JIRA, tedyu, mbautin, dhruba, todd, stack--Differential Revision: https://reviews.facebook.net/D1521--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1298641 13f79535-47bb-0310-9956-ffa450edef68-
a
https://github.com/apache/hbase
diff --git a/src/main/java/org/apache/hadoop/hbase/HConstants.java b/src/main/java/org/apache/hadoop/hbase/HConstants.java index 3d7a23bad9a2..92ead028c8d9 100644 --- a/src/main/java/org/apache/hadoop/hbase/HConstants.java +++ b/src/main/java/org/apache/hadoop/hbase/HConstants.java @@ -304,7 +304,7 @@ public enum OperationStatusCode { /** The regioninfo column qualifier */ public static final byte [] REGIONINFO_QUALIFIER = - Bytes.toBytes(REGIONINFO_QUALIFIER_STR); + Bytes.toBytes(REGIONINFO_QUALIFIER_STR); /** The server column qualifier */ public static final byte [] SERVER_QUALIFIER = Bytes.toBytes("server"); @@ -610,6 +610,35 @@ public static enum Modify { /** Host name of the local machine */ public static final String LOCALHOST = "localhost"; + /** + * If this parameter is set to true, then hbase will read + * data and then verify checksums. Checksum verification + * inside hdfs will be switched off. However, if the hbase-checksum + * verification fails, then it will switch back to using + * hdfs checksums for verifiying data that is being read from storage. + * + * If this parameter is set to false, then hbase will not + * verify any checksums, instead it will depend on checksum verification + * being done in the hdfs client. + */ + public static final String HBASE_CHECKSUM_VERIFICATION = + "hbase.regionserver.checksum.verify"; + + /** + * The name of the configuration parameter that specifies + * the number of bytes in a newly created checksum chunk. + */ + public static final String BYTES_PER_CHECKSUM = + "hbase.hstore.bytes.per.checksum"; + + /** + * The name of the configuration parameter that specifies + * the name of an algorithm that is used to compute checksums + * for newly created blocks. + */ + public static final String CHECKSUM_TYPE_NAME = + "hbase.hstore.checksum.algorithm"; + private HConstants() { // Can't be instantiated with this ctor. } diff --git a/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java b/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java new file mode 100644 index 000000000000..d6a47053d3c8 --- /dev/null +++ b/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java @@ -0,0 +1,177 @@ +/* + * Copyright The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.fs; + +import java.io.IOException; +import java.net.URI; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.FilterFileSystem; +import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.util.ReflectionUtils; +import org.apache.hadoop.util.Progressable; + +/** + * An encapsulation for the FileSystem object that hbase uses to access + * data. This class allows the flexibility of using + * separate filesystem objects for reading and writing hfiles and hlogs. + * In future, if we want to make hlogs be in a different filesystem, + * this is the place to make it happen. + */ +public class HFileSystem extends FilterFileSystem { + + private final FileSystem noChecksumFs; // read hfile data from storage + private final boolean useHBaseChecksum; + + /** + * Create a FileSystem object for HBase regionservers. + * @param conf The configuration to be used for the filesystem + * @param useHBaseChecksums if true, then use + * checksum verfication in hbase, otherwise + * delegate checksum verification to the FileSystem. + */ + public HFileSystem(Configuration conf, boolean useHBaseChecksum) + throws IOException { + + // Create the default filesystem with checksum verification switched on. + // By default, any operation to this FilterFileSystem occurs on + // the underlying filesystem that has checksums switched on. + this.fs = FileSystem.get(conf); + this.useHBaseChecksum = useHBaseChecksum; + + fs.initialize(getDefaultUri(conf), conf); + + // If hbase checksum verification is switched on, then create a new + // filesystem object that has cksum verification turned off. + // We will avoid verifying checksums in the fs client, instead do it + // inside of hbase. + if (useHBaseChecksum) { + this.noChecksumFs = newInstanceFileSystem(conf); + this.noChecksumFs.setVerifyChecksum(false); + } else { + this.noChecksumFs = fs; + } + } + + /** + * Wrap a FileSystem object within a HFileSystem. The noChecksumFs and + * writefs are both set to be the same specified fs. + * Do not verify hbase-checksums while reading data from filesystem. + * @param fs Set the noChecksumFs and writeFs to this specified filesystem. + */ + public HFileSystem(FileSystem fs) { + this.fs = fs; + this.noChecksumFs = fs; + this.useHBaseChecksum = false; + } + + /** + * Returns the filesystem that is specially setup for + * doing reads from storage. This object avoids doing + * checksum verifications for reads. + * @return The FileSystem object that can be used to read data + * from files. + */ + public FileSystem getNoChecksumFs() { + return noChecksumFs; + } + + /** + * Returns the underlying filesystem + * @return The underlying FileSystem for this FilterFileSystem object. + */ + public FileSystem getBackingFs() throws IOException { + return fs; + } + + /** + * Are we verifying checksums in HBase? + * @return True, if hbase is configured to verify checksums, + * otherwise false. + */ + public boolean useHBaseChecksum() { + return useHBaseChecksum; + } + + /** + * Close this filesystem object + */ + @Override + public void close() throws IOException { + super.close(); + if (this.noChecksumFs != fs) { + this.noChecksumFs.close(); + } + } + + /** + * Returns a brand new instance of the FileSystem. It does not use + * the FileSystem.Cache. In newer versions of HDFS, we can directly + * invoke FileSystem.newInstance(Configuration). + * + * @param conf Configuration + * @return A new instance of the filesystem + */ + private static FileSystem newInstanceFileSystem(Configuration conf) + throws IOException { + URI uri = FileSystem.getDefaultUri(conf); + Class<?> clazz = conf.getClass("fs." + uri.getScheme() + ".impl", null); + if (clazz == null) { + throw new IOException("No FileSystem for scheme: " + uri.getScheme()); + } + FileSystem fs = (FileSystem)ReflectionUtils.newInstance(clazz, conf); + fs.initialize(uri, conf); + return fs; + } + + /** + * Create a new HFileSystem object, similar to FileSystem.get(). + * This returns a filesystem object that avoids checksum + * verification in the filesystem for hfileblock-reads. + * For these blocks, checksum verification is done by HBase. + */ + static public FileSystem get(Configuration conf) throws IOException { + return new HFileSystem(conf, true); + } + + /** + * Wrap a LocalFileSystem within a HFileSystem. + */ + static public FileSystem getLocalFs(Configuration conf) throws IOException { + return new HFileSystem(FileSystem.getLocal(conf)); + } + + /** + * The org.apache.hadoop.fs.FilterFileSystem does not yet support + * createNonRecursive. This is a hadoop bug and when it is fixed in Hadoop, + * this definition will go away. + */ + public FSDataOutputStream createNonRecursive(Path f, + boolean overwrite, + int bufferSize, short replication, long blockSize, + Progressable progress) throws IOException { + return fs.createNonRecursive(f, overwrite, bufferSize, replication, + blockSize, progress); + } +} diff --git a/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java b/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java index 1f756b00c40e..20d7b49860fc 100644 --- a/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java +++ b/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java @@ -27,6 +27,7 @@ import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo; import org.apache.hadoop.hbase.regionserver.metrics.SchemaConfigured; @@ -42,9 +43,13 @@ public abstract class AbstractHFileReader extends SchemaConfigured /** Filesystem-level block reader for this HFile format version. */ protected HFileBlock.FSReader fsBlockReader; - /** Stream to read from. */ + /** Stream to read from. Does checksum verifications in file system */ protected FSDataInputStream istream; + /** The file system stream of the underlying {@link HFile} that + * does not do checksum verification in the file system */ + protected FSDataInputStream istreamNoFsChecksum; + /** * True if we should close the input stream when done. We don't close it if we * didn't open it. @@ -99,10 +104,21 @@ public abstract class AbstractHFileReader extends SchemaConfigured protected FileInfo fileInfo; + /** The filesystem used for accesing data */ + protected HFileSystem hfs; + protected AbstractHFileReader(Path path, FixedFileTrailer trailer, final FSDataInputStream fsdis, final long fileSize, final boolean closeIStream, final CacheConfig cacheConf) { + this(path, trailer, fsdis, fsdis, fileSize, closeIStream, cacheConf, null); + } + + protected AbstractHFileReader(Path path, FixedFileTrailer trailer, + final FSDataInputStream fsdis, final FSDataInputStream fsdisNoFsChecksum, + final long fileSize, + final boolean closeIStream, + final CacheConfig cacheConf, final HFileSystem hfs) { super(null, path); this.trailer = trailer; this.compressAlgo = trailer.getCompressionCodec(); @@ -112,6 +128,8 @@ protected AbstractHFileReader(Path path, FixedFileTrailer trailer, this.closeIStream = closeIStream; this.path = path; this.name = path.getName(); + this.hfs = hfs; + this.istreamNoFsChecksum = fsdisNoFsChecksum; } @SuppressWarnings("serial") @@ -343,5 +361,4 @@ public Path getPath() { public DataBlockEncoding getEncodingOnDisk() { return dataBlockEncoder.getEncodingOnDisk(); } - } diff --git a/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java b/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java new file mode 100644 index 000000000000..4ef1be71998b --- /dev/null +++ b/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java @@ -0,0 +1,233 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.io.hfile; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.zip.Checksum; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.DataOutputBuffer; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.ChecksumFactory; +import org.apache.hadoop.hbase.util.ChecksumType; + +/** + * Utility methods to compute and validate checksums. + */ +public class ChecksumUtil { + + /** This is used to reserve space in a byte buffer */ + private static byte[] DUMMY_VALUE = new byte[128 * HFileBlock.CHECKSUM_SIZE]; + + /** + * This is used by unit tests to make checksum failures throw an + * exception instead of returning null. Returning a null value from + * checksum validation will cause the higher layer to retry that + * read with hdfs-level checksums. Instead, we would like checksum + * failures to cause the entire unit test to fail. + */ + private static boolean generateExceptions = false; + + /** + * Generates a checksum for all the data in indata. The checksum is + * written to outdata. + * @param indata input data stream + * @param startOffset starting offset in the indata stream from where to + * compute checkums from + * @param endOffset ending offset in the indata stream upto + * which checksums needs to be computed + * @param outData the output buffer where checksum values are written + * @param outOffset the starting offset in the outdata where the + * checksum values are written + * @param checksumType type of checksum + * @param bytesPerChecksum number of bytes per checksum value + */ + static void generateChecksums(byte[] indata, + int startOffset, int endOffset, + byte[] outdata, int outOffset, + ChecksumType checksumType, + int bytesPerChecksum) throws IOException { + + if (checksumType == ChecksumType.NULL) { + return; // No checkums for this block. + } + + Checksum checksum = checksumType.getChecksumObject(); + int bytesLeft = endOffset - startOffset; + int chunkNum = 0; + + while (bytesLeft > 0) { + // generate the checksum for one chunk + checksum.reset(); + int count = Math.min(bytesLeft, bytesPerChecksum); + checksum.update(indata, startOffset, count); + + // write the checksum value to the output buffer. + int cksumValue = (int)checksum.getValue(); + outOffset = Bytes.putInt(outdata, outOffset, cksumValue); + chunkNum++; + startOffset += count; + bytesLeft -= count; + } + } + + /** + * Validates that the data in the specified HFileBlock matches the + * checksum. Generates the checksum for the data and + * then validate that it matches the value stored in the header. + * If there is a checksum mismatch, then return false. Otherwise + * return true. + * The header is extracted from the specified HFileBlock while the + * data-to-be-verified is extracted from 'data'. + */ + static boolean validateBlockChecksum(Path path, HFileBlock block, + byte[] data, int hdrSize) throws IOException { + + // If this is an older version of the block that does not have + // checksums, then return false indicating that checksum verification + // did not succeed. Actually, this methiod should never be called + // when the minorVersion is 0, thus this is a defensive check for a + // cannot-happen case. Since this is a cannot-happen case, it is + // better to return false to indicate a checksum validation failure. + if (block.getMinorVersion() < HFileBlock.MINOR_VERSION_WITH_CHECKSUM) { + return false; + } + + // Get a checksum object based on the type of checksum that is + // set in the HFileBlock header. A ChecksumType.NULL indicates that + // the caller is not interested in validating checksums, so we + // always return true. + ChecksumType cktype = ChecksumType.codeToType(block.getChecksumType()); + if (cktype == ChecksumType.NULL) { + return true; // No checkums validations needed for this block. + } + Checksum checksumObject = cktype.getChecksumObject(); + checksumObject.reset(); + + // read in the stored value of the checksum size from the header. + int bytesPerChecksum = block.getBytesPerChecksum(); + + // bytesPerChecksum is always larger than the size of the header + if (bytesPerChecksum < hdrSize) { + String msg = "Unsupported value of bytesPerChecksum. " + + " Minimum is " + hdrSize + + " but the configured value is " + bytesPerChecksum; + HFile.LOG.warn(msg); + return false; // cannot happen case, unable to verify checksum + } + // Extract the header and compute checksum for the header. + ByteBuffer hdr = block.getBufferWithHeader(); + checksumObject.update(hdr.array(), hdr.arrayOffset(), hdrSize); + + int off = hdrSize; + int consumed = hdrSize; + int bytesLeft = block.getOnDiskDataSizeWithHeader() - off; + int cksumOffset = block.getOnDiskDataSizeWithHeader(); + + // validate each chunk + while (bytesLeft > 0) { + int thisChunkSize = bytesPerChecksum - consumed; + int count = Math.min(bytesLeft, thisChunkSize); + checksumObject.update(data, off, count); + + int storedChecksum = Bytes.toInt(data, cksumOffset); + if (storedChecksum != (int)checksumObject.getValue()) { + String msg = "File " + path + + " Stored checksum value of " + storedChecksum + + " at offset " + cksumOffset + + " does not match computed checksum " + + checksumObject.getValue() + + ", total data size " + data.length + + " Checksum data range offset " + off + " len " + count + + HFileBlock.toStringHeader(block.getBufferReadOnly()); + HFile.LOG.warn(msg); + if (generateExceptions) { + throw new IOException(msg); // this is only for unit tests + } else { + return false; // checksum validation failure + } + } + cksumOffset += HFileBlock.CHECKSUM_SIZE; + bytesLeft -= count; + off += count; + consumed = 0; + checksumObject.reset(); + } + return true; // checksum is valid + } + + /** + * Returns the number of bytes needed to store the checksums for + * a specified data size + * @param datasize number of bytes of data + * @param bytesPerChecksum number of bytes in a checksum chunk + * @return The number of bytes needed to store the checksum values + */ + static long numBytes(long datasize, int bytesPerChecksum) { + return numChunks(datasize, bytesPerChecksum) * + HFileBlock.CHECKSUM_SIZE; + } + + /** + * Returns the number of checksum chunks needed to store the checksums for + * a specified data size + * @param datasize number of bytes of data + * @param bytesPerChecksum number of bytes in a checksum chunk + * @return The number of checksum chunks + */ + static long numChunks(long datasize, int bytesPerChecksum) { + long numChunks = datasize/bytesPerChecksum; + if (datasize % bytesPerChecksum != 0) { + numChunks++; + } + return numChunks; + } + + /** + * Write dummy checksums to the end of the specified bytes array + * to reserve space for writing checksums later + * @param baos OutputStream to write dummy checkum values + * @param numBytes Number of bytes of data for which dummy checksums + * need to be generated + * @param bytesPerChecksum Number of bytes per checksum value + */ + static void reserveSpaceForChecksums(ByteArrayOutputStream baos, + int numBytes, int bytesPerChecksum) throws IOException { + long numChunks = numChunks(numBytes, bytesPerChecksum); + long bytesLeft = numChunks * HFileBlock.CHECKSUM_SIZE; + while (bytesLeft > 0) { + long count = Math.min(bytesLeft, DUMMY_VALUE.length); + baos.write(DUMMY_VALUE, 0, (int)count); + bytesLeft -= count; + } + } + + /** + * Mechanism to throw an exception in case of hbase checksum + * failure. This is used by unit tests only. + * @param value Setting this to true will cause hbase checksum + * verification failures to generate exceptions. + */ + public static void generateExceptionForChecksumFailureForTest(boolean value) { + generateExceptions = value; + } +} + diff --git a/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java b/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java index efd145dc8b2e..086da3c33266 100644 --- a/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java +++ b/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java @@ -44,6 +44,13 @@ * variable parts of the file. Also includes basic metadata on this file. The * trailer size is fixed within a given {@link HFile} format version only, but * we always store the version number as the last four-byte integer of the file. + * The version number itself is split into two portions, a major + * version and a minor version. + * The last three bytes of a file is the major + * version and a single preceding byte is the minor number. The major version + * determines which readers/writers to use to read/write a hfile while a minor + * version determines smaller changes in hfile format that do not need a new + * reader/writer type. */ @InterfaceAudience.Private public class FixedFileTrailer { @@ -108,12 +115,16 @@ public class FixedFileTrailer { /** Raw key comparator class name in version 2 */ private String comparatorClassName = RawComparator.class.getName(); - /** The {@link HFile} format version. */ - private final int version; + /** The {@link HFile} format major version. */ + private final int majorVersion; - FixedFileTrailer(int version) { - this.version = version; - HFile.checkFormatVersion(version); + /** The {@link HFile} format minor version. */ + private final int minorVersion; + + FixedFileTrailer(int majorVersion, int minorVersion) { + this.majorVersion = majorVersion; + this.minorVersion = minorVersion; + HFile.checkFormatVersion(majorVersion); } private static int[] computeTrailerSizeByVersion() { @@ -121,7 +132,8 @@ private static int[] computeTrailerSizeByVersion() { for (int version = MIN_FORMAT_VERSION; version <= MAX_FORMAT_VERSION; ++version) { - FixedFileTrailer fft = new FixedFileTrailer(version); + FixedFileTrailer fft = new FixedFileTrailer(version, + HFileBlock.MINOR_VERSION_NO_CHECKSUM); DataOutputStream dos = new DataOutputStream(new NullOutputStream()); try { fft.serialize(dos); @@ -151,7 +163,7 @@ static int getTrailerSize(int version) { } public int getTrailerSize() { - return getTrailerSize(version); + return getTrailerSize(majorVersion); } /** @@ -163,7 +175,7 @@ public int getTrailerSize() { * @throws IOException */ void serialize(DataOutputStream outputStream) throws IOException { - HFile.checkFormatVersion(version); + HFile.checkFormatVersion(majorVersion); ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutput baosDos = new DataOutputStream(baos); @@ -173,7 +185,7 @@ void serialize(DataOutputStream outputStream) throws IOException { baosDos.writeLong(loadOnOpenDataOffset); baosDos.writeInt(dataIndexCount); - if (version == 1) { + if (majorVersion == 1) { // This used to be metaIndexOffset, but it was not used in version 1. baosDos.writeLong(0); } else { @@ -182,7 +194,7 @@ void serialize(DataOutputStream outputStream) throws IOException { baosDos.writeInt(metaIndexCount); baosDos.writeLong(totalUncompressedBytes); - if (version == 1) { + if (majorVersion == 1) { baosDos.writeInt((int) Math.min(Integer.MAX_VALUE, entryCount)); } else { // This field is long from version 2 onwards. @@ -190,14 +202,16 @@ void serialize(DataOutputStream outputStream) throws IOException { } baosDos.writeInt(compressionCodec.ordinal()); - if (version > 1) { + if (majorVersion > 1) { baosDos.writeInt(numDataIndexLevels); baosDos.writeLong(firstDataBlockOffset); baosDos.writeLong(lastDataBlockOffset); Bytes.writeStringFixedSize(baosDos, comparatorClassName, MAX_COMPARATOR_NAME_LENGTH); } - baosDos.writeInt(version); + + // serialize the major and minor versions + baosDos.writeInt(materializeVersion(majorVersion, minorVersion)); outputStream.write(baos.toByteArray()); } @@ -212,7 +226,7 @@ void serialize(DataOutputStream outputStream) throws IOException { * @throws IOException */ void deserialize(DataInputStream inputStream) throws IOException { - HFile.checkFormatVersion(version); + HFile.checkFormatVersion(majorVersion); BlockType.TRAILER.readAndCheck(inputStream); @@ -220,7 +234,7 @@ void deserialize(DataInputStream inputStream) throws IOException { loadOnOpenDataOffset = inputStream.readLong(); dataIndexCount = inputStream.readInt(); - if (version == 1) { + if (majorVersion == 1) { inputStream.readLong(); // Read and skip metaIndexOffset. } else { uncompressedDataIndexSize = inputStream.readLong(); @@ -228,9 +242,9 @@ void deserialize(DataInputStream inputStream) throws IOException { metaIndexCount = inputStream.readInt(); totalUncompressedBytes = inputStream.readLong(); - entryCount = version == 1 ? inputStream.readInt() : inputStream.readLong(); + entryCount = majorVersion == 1 ? inputStream.readInt() : inputStream.readLong(); compressionCodec = Compression.Algorithm.values()[inputStream.readInt()]; - if (version > 1) { + if (majorVersion > 1) { numDataIndexLevels = inputStream.readInt(); firstDataBlockOffset = inputStream.readLong(); lastDataBlockOffset = inputStream.readLong(); @@ -238,7 +252,9 @@ void deserialize(DataInputStream inputStream) throws IOException { Bytes.readStringFixedSize(inputStream, MAX_COMPARATOR_NAME_LENGTH); } - expectVersion(inputStream.readInt()); + int version = inputStream.readInt(); + expectMajorVersion(extractMajorVersion(version)); + expectMinorVersion(extractMinorVersion(version)); } private void append(StringBuilder sb, String s) { @@ -257,14 +273,15 @@ public String toString() { append(sb, "totalUncomressedBytes=" + totalUncompressedBytes); append(sb, "entryCount=" + entryCount); append(sb, "compressionCodec=" + compressionCodec); - if (version == 2) { + if (majorVersion == 2) { append(sb, "uncompressedDataIndexSize=" + uncompressedDataIndexSize); append(sb, "numDataIndexLevels=" + numDataIndexLevels); append(sb, "firstDataBlockOffset=" + firstDataBlockOffset); append(sb, "lastDataBlockOffset=" + lastDataBlockOffset); append(sb, "comparatorClassName=" + comparatorClassName); } - append(sb, "version=" + version); + append(sb, "majorVersion=" + majorVersion); + append(sb, "minorVersion=" + minorVersion); return sb.toString(); } @@ -301,31 +318,44 @@ public static FixedFileTrailer readFromStream(FSDataInputStream istream, buf.position(buf.limit() - Bytes.SIZEOF_INT); int version = buf.getInt(); + // Extract the major and minor versions. + int majorVersion = extractMajorVersion(version); + int minorVersion = extractMinorVersion(version); + try { - HFile.checkFormatVersion(version); + HFile.checkFormatVersion(majorVersion); } catch (IllegalArgumentException iae) { // In this context, an invalid version might indicate a corrupt HFile. throw new IOException(iae); } - int trailerSize = getTrailerSize(version); + int trailerSize = getTrailerSize(majorVersion); - FixedFileTrailer fft = new FixedFileTrailer(version); + FixedFileTrailer fft = new FixedFileTrailer(majorVersion, minorVersion); fft.deserialize(new DataInputStream(new ByteArrayInputStream(buf.array(), buf.arrayOffset() + bufferSize - trailerSize, trailerSize))); return fft; } - public void expectVersion(int expected) { - if (version != expected) { - throw new IllegalArgumentException("Invalid HFile version: " + version + public void expectMajorVersion(int expected) { + if (majorVersion != expected) { + throw new IllegalArgumentException("Invalid HFile major version: " + + majorVersion + " (expected: " + expected + ")"); } } - public void expectAtLeastVersion(int lowerBound) { - if (version < lowerBound) { - throw new IllegalArgumentException("Invalid HFile version: " + version + public void expectMinorVersion(int expected) { + if (minorVersion != expected) { + throw new IllegalArgumentException("Invalid HFile minor version: " + + minorVersion + " (expected: " + expected + ")"); + } + } + + public void expectAtLeastMajorVersion(int lowerBound) { + if (majorVersion < lowerBound) { + throw new IllegalArgumentException("Invalid HFile major version: " + + majorVersion + " (expected: " + lowerBound + " or higher)."); } } @@ -375,11 +405,11 @@ public long getEntryCount() { } public void setEntryCount(long newEntryCount) { - if (version == 1) { + if (majorVersion == 1) { int intEntryCount = (int) Math.min(Integer.MAX_VALUE, newEntryCount); if (intEntryCount != newEntryCount) { LOG.info("Warning: entry count is " + newEntryCount + " but writing " - + intEntryCount + " into the version " + version + " trailer"); + + intEntryCount + " into the version " + majorVersion + " trailer"); } entryCount = intEntryCount; return; @@ -396,42 +426,52 @@ public void setCompressionCodec(Compression.Algorithm compressionCodec) { } public int getNumDataIndexLevels() { - expectAtLeastVersion(2); + expectAtLeastMajorVersion(2); return numDataIndexLevels; } public void setNumDataIndexLevels(int numDataIndexLevels) { - expectAtLeastVersion(2); + expectAtLeastMajorVersion(2); this.numDataIndexLevels = numDataIndexLevels; } public long getLastDataBlockOffset() { - expectAtLeastVersion(2); + expectAtLeastMajorVersion(2); return lastDataBlockOffset; } public void setLastDataBlockOffset(long lastDataBlockOffset) { - expectAtLeastVersion(2); + expectAtLeastMajorVersion(2); this.lastDataBlockOffset = lastDataBlockOffset; } public long getFirstDataBlockOffset() { - expectAtLeastVersion(2); + expectAtLeastMajorVersion(2); return firstDataBlockOffset; } public void setFirstDataBlockOffset(long firstDataBlockOffset) { - expectAtLeastVersion(2); + expectAtLeastMajorVersion(2); this.firstDataBlockOffset = firstDataBlockOffset; } - public int getVersion() { - return version; + /** + * Returns the major version of this HFile format + */ + public int getMajorVersion() { + return majorVersion; + } + + /** + * Returns the minor version of this HFile format + */ + int getMinorVersion() { + return minorVersion; } @SuppressWarnings("rawtypes") public void setComparatorClass(Class<? extends RawComparator> klass) { - expectAtLeastVersion(2); + expectAtLeastMajorVersion(2); comparatorClassName = klass.getName(); } @@ -458,20 +498,43 @@ public static RawComparator<byte[]> createComparator( } RawComparator<byte[]> createComparator() throws IOException { - expectAtLeastVersion(2); + expectAtLeastMajorVersion(2); return createComparator(comparatorClassName); } public long getUncompressedDataIndexSize() { - if (version == 1) + if (majorVersion == 1) return 0; return uncompressedDataIndexSize; } public void setUncompressedDataIndexSize( long uncompressedDataIndexSize) { - expectAtLeastVersion(2); + expectAtLeastMajorVersion(2); this.uncompressedDataIndexSize = uncompressedDataIndexSize; } + /** + * Extracts the major version for a 4-byte serialized version data. + * The major version is the 3 least significant bytes + */ + private static int extractMajorVersion(int serializedVersion) { + return (serializedVersion & 0x00ffffff); + } + + /** + * Extracts the minor version for a 4-byte serialized version data. + * The major version are the 3 the most significant bytes + */ + private static int extractMinorVersion(int serializedVersion) { + return (serializedVersion >>> 24); + } + + /** + * Create a 4 byte serialized version number by combining the + * minor and major version numbers. + */ + private static int materializeVersion(int majorVersion, int minorVersion) { + return ((majorVersion & 0x00ffffff) | (minorVersion << 24)); + } } diff --git a/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java b/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java index 07cf830d5262..1a4f7f1d3afb 100644 --- a/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java +++ b/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java @@ -43,10 +43,12 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KeyComparator; +import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.HbaseMapWritable; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics; import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics.SchemaAware; +import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.BloomFilterWriter; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; @@ -156,6 +158,12 @@ public class HFile { */ public final static int MIN_NUM_HFILE_PATH_LEVELS = 5; + /** + * The number of bytes per checksum. + */ + public static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024; + public static final ChecksumType DEFAULT_CHECKSUM_TYPE = ChecksumType.CRC32; + // For measuring latency of "sequential" reads and writes static final AtomicInteger readOps = new AtomicInteger(); static final AtomicLong readTimeNano = new AtomicLong(); @@ -166,6 +174,9 @@ public class HFile { static final AtomicInteger preadOps = new AtomicInteger(); static final AtomicLong preadTimeNano = new AtomicLong(); + // For measuring number of checksum failures + static final AtomicLong checksumFailures = new AtomicLong(); + // for test purpose public static volatile AtomicLong dataBlockReadCnt = new AtomicLong(0); @@ -195,6 +206,14 @@ public static final long getWriteTimeMs() { return writeTimeNano.getAndSet(0) / 1000000; } + /** + * Number of checksum verification failures. It also + * clears the counter. + */ + public static final long getChecksumFailuresCount() { + return checksumFailures.getAndSet(0); + } + /** API required to write an {@link HFile} */ public interface Writer extends Closeable { @@ -247,6 +266,8 @@ public static abstract class WriterFactory { HFile.DEFAULT_COMPRESSION_ALGORITHM; protected HFileDataBlockEncoder encoder = NoOpDataBlockEncoder.INSTANCE; protected KeyComparator comparator; + protected ChecksumType checksumType = HFile.DEFAULT_CHECKSUM_TYPE; + protected int bytesPerChecksum = DEFAULT_BYTES_PER_CHECKSUM; WriterFactory(Configuration conf, CacheConfig cacheConf) { this.conf = conf; @@ -296,6 +317,17 @@ public WriterFactory withComparator(KeyComparator comparator) { return this; } + public WriterFactory withChecksumType(ChecksumType checksumType) { + Preconditions.checkNotNull(checksumType); + this.checksumType = checksumType; + return this; + } + + public WriterFactory withBytesPerChecksum(int bytesPerChecksum) { + this.bytesPerChecksum = bytesPerChecksum; + return this; + } + public Writer create() throws IOException { if ((path != null ? 1 : 0) + (ostream != null ? 1 : 0) != 1) { throw new AssertionError("Please specify exactly one of " + @@ -305,14 +337,15 @@ public Writer create() throws IOException { ostream = AbstractHFileWriter.createOutputStream(conf, fs, path); } return createWriter(fs, path, ostream, blockSize, - compression, encoder, comparator); + compression, encoder, comparator, checksumType, bytesPerChecksum); } protected abstract Writer createWriter(FileSystem fs, Path path, FSDataOutputStream ostream, int blockSize, Compression.Algorithm compress, HFileDataBlockEncoder dataBlockEncoder, - KeyComparator comparator) throws IOException; + KeyComparator comparator, ChecksumType checksumType, + int bytesPerChecksum) throws IOException; } /** The configuration key for HFile version to use for new files */ @@ -431,20 +464,22 @@ ByteBuffer getMetaBlock(String metaBlockName, } private static Reader pickReaderVersion(Path path, FSDataInputStream fsdis, + FSDataInputStream fsdisNoFsChecksum, long size, boolean closeIStream, CacheConfig cacheConf, - DataBlockEncoding preferredEncodingInCache) + DataBlockEncoding preferredEncodingInCache, HFileSystem hfs) throws IOException { FixedFileTrailer trailer = FixedFileTrailer.readFromStream(fsdis, size); - switch (trailer.getVersion()) { + switch (trailer.getMajorVersion()) { case 1: return new HFileReaderV1(path, trailer, fsdis, size, closeIStream, cacheConf); case 2: - return new HFileReaderV2(path, trailer, fsdis, size, closeIStream, - cacheConf, preferredEncodingInCache); + return new HFileReaderV2(path, trailer, fsdis, fsdisNoFsChecksum, + size, closeIStream, + cacheConf, preferredEncodingInCache, hfs); default: throw new IOException("Cannot instantiate reader for HFile version " + - trailer.getVersion()); + trailer.getMajorVersion()); } } @@ -452,9 +487,26 @@ public static Reader createReaderWithEncoding( FileSystem fs, Path path, CacheConfig cacheConf, DataBlockEncoding preferredEncodingInCache) throws IOException { final boolean closeIStream = true; - return pickReaderVersion(path, fs.open(path), + HFileSystem hfs = null; + FSDataInputStream fsdis = fs.open(path); + FSDataInputStream fsdisNoFsChecksum = fsdis; + // If the fs is not an instance of HFileSystem, then create an + // instance of HFileSystem that wraps over the specified fs. + // In this case, we will not be able to avoid checksumming inside + // the filesystem. + if (!(fs instanceof HFileSystem)) { + hfs = new HFileSystem(fs); + } else { + hfs = (HFileSystem)fs; + // open a stream to read data without checksum verification in + // the filesystem + if (hfs != null) { + fsdisNoFsChecksum = hfs.getNoChecksumFs().open(path); + } + } + return pickReaderVersion(path, fsdis, fsdisNoFsChecksum, fs.getFileStatus(path).getLen(), closeIStream, cacheConf, - preferredEncodingInCache); + preferredEncodingInCache, hfs); } public static Reader createReader( @@ -463,12 +515,15 @@ public static Reader createReader( DataBlockEncoding.NONE); } - public static Reader createReaderFromStream(Path path, + /** + * This factory method is used only by unit tests + */ + static Reader createReaderFromStream(Path path, FSDataInputStream fsdis, long size, CacheConfig cacheConf) throws IOException { final boolean closeIStream = false; - return pickReaderVersion(path, fsdis, size, closeIStream, cacheConf, - DataBlockEncoding.NONE); + return pickReaderVersion(path, fsdis, fsdis, size, closeIStream, cacheConf, + DataBlockEncoding.NONE, null); } /* diff --git a/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java index 093af408537a..8a31b3708435 100644 --- a/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java +++ b/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java @@ -29,17 +29,23 @@ import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; +import java.util.zip.Checksum; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; import org.apache.hadoop.hbase.regionserver.MemStore; import org.apache.hadoop.hbase.regionserver.metrics.SchemaConfigured; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.CompoundBloomFilter; +import org.apache.hadoop.hbase.util.ChecksumFactory; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.io.IOUtils; @@ -66,6 +72,12 @@ * <li>Uncompressed block size, header not included (4 bytes) * <li>The offset of the previous block of the same type (8 bytes). This is * used to be able to navigate to the previous block without going to the block + * <li>For minorVersions >=1, there is an additional 4 byte field + * bytesPerChecksum that records the number of bytes in a checksum chunk. + * <li>For minorVersions >=1, there is a 4 byte value to store the size of + * data on disk (excluding the checksums) + * <li>For minorVersions >=1, a series of 4 byte checksums, one each for + * the number of bytes specified by bytesPerChecksum. * index. * <li>Compressed data (or uncompressed data if compression is disabled). The * compression algorithm is the same for all the blocks in the {@link HFile}, @@ -78,12 +90,32 @@ @InterfaceAudience.Private public class HFileBlock extends SchemaConfigured implements Cacheable { + /** Minor versions starting with this number have hbase checksums */ + static final int MINOR_VERSION_WITH_CHECKSUM = 1; + + /** minor version that does not support checksums */ + static final int MINOR_VERSION_NO_CHECKSUM = 0; + + /** + * On a checksum failure on a Reader, these many suceeding read + * requests switch back to using hdfs checksums before auto-reenabling + * hbase checksum verification. + */ + static final int CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD = 3; + + /** The size data structures with minor version is 0 */ + static final int HEADER_SIZE_NO_CHECKSUM = MAGIC_LENGTH + 2 * Bytes.SIZEOF_INT + + Bytes.SIZEOF_LONG; + public static final boolean FILL_HEADER = true; public static final boolean DONT_FILL_HEADER = false; - /** The size of a version 2 {@link HFile} block header */ - public static final int HEADER_SIZE = MAGIC_LENGTH + 2 * Bytes.SIZEOF_INT - + Bytes.SIZEOF_LONG; + /** The size of a version 2 {@link HFile} block header, minor version 1. + * There is a 1 byte checksum type, followed by a 4 byte bytesPerChecksum + * followed by another 4 byte value to store sizeofDataOnDisk. + */ + static final int HEADER_SIZE = HEADER_SIZE_NO_CHECKSUM + Bytes.SIZEOF_BYTE + + 2 * Bytes.SIZEOF_INT; /** * The size of block header when blockType is {@link BlockType#ENCODED_DATA}. @@ -93,7 +125,9 @@ public class HFileBlock extends SchemaConfigured implements Cacheable { + DataBlockEncoding.ID_SIZE; /** Just an array of bytes of the right size. */ - public static final byte[] DUMMY_HEADER = new byte[HEADER_SIZE]; + static final byte[] DUMMY_HEADER = new byte[HEADER_SIZE]; + static final byte[] DUMMY_HEADER_NO_CHECKSUM = + new byte[HEADER_SIZE_NO_CHECKSUM]; public static final int BYTE_BUFFER_HEAP_SIZE = (int) ClassSize.estimateBase( ByteBuffer.wrap(new byte[0], 0, 0).getClass(), false); @@ -101,6 +135,11 @@ public class HFileBlock extends SchemaConfigured implements Cacheable { static final int EXTRA_SERIALIZATION_SPACE = Bytes.SIZEOF_LONG + Bytes.SIZEOF_INT; + /** + * Each checksum value is an integer that can be stored in 4 bytes. + */ + static final int CHECKSUM_SIZE = Bytes.SIZEOF_INT; + private static final CacheableDeserializer<Cacheable> blockDeserializer = new CacheableDeserializer<Cacheable>() { public HFileBlock deserialize(ByteBuffer buf) throws IOException{ @@ -109,7 +148,8 @@ public HFileBlock deserialize(ByteBuffer buf) throws IOException{ buf.limit(buf.limit() - HFileBlock.EXTRA_SERIALIZATION_SPACE).rewind(); newByteBuffer.put(buf); - HFileBlock ourBuffer = new HFileBlock(newByteBuffer); + HFileBlock ourBuffer = new HFileBlock(newByteBuffer, + MINOR_VERSION_NO_CHECKSUM); buf.position(buf.limit()); buf.limit(buf.limit() + HFileBlock.EXTRA_SERIALIZATION_SPACE); @@ -120,10 +160,32 @@ public HFileBlock deserialize(ByteBuffer buf) throws IOException{ }; private BlockType blockType; + + /** Size on disk without the header. It includes checksum data too. */ private int onDiskSizeWithoutHeader; + + /** Size of pure data. Does not include header or checksums */ private final int uncompressedSizeWithoutHeader; + + /** The offset of the previous block on disk */ private final long prevBlockOffset; + + /** The Type of checksum, better to store the byte than an object */ + private final byte checksumType; + + /** The number of bytes for which a checksum is computed */ + private final int bytesPerChecksum; + + /** Size on disk of header and data. Does not include checksum data */ + private final int onDiskDataSizeWithHeader; + + /** The minor version of the hfile. */ + private final int minorVersion; + + /** The in-memory representation of the hfile block */ private ByteBuffer buf; + + /** Whether there is a memstore timestamp after every key/value */ private boolean includesMemstoreTS; /** @@ -142,7 +204,7 @@ public HFileBlock deserialize(ByteBuffer buf) throws IOException{ /** * Creates a new {@link HFile} block from the given fields. This constructor * is mostly used when the block data has already been read and uncompressed, - * and is sitting in a byte buffer. + * and is sitting in a byte buffer. * * @param blockType the type of this block, see {@link BlockType} * @param onDiskSizeWithoutHeader compressed size of the block if compression @@ -157,10 +219,17 @@ public HFileBlock deserialize(ByteBuffer buf) throws IOException{ * @param fillHeader true to fill in the first {@link #HEADER_SIZE} bytes of * the buffer based on the header fields provided * @param offset the file offset the block was read from + * @param minorVersion the minor version of this block + * @param bytesPerChecksum the number of bytes per checksum chunk + * @param checksumType the checksum algorithm to use + * @param onDiskDataSizeWithHeader size of header and data on disk not + * including checksum data */ - public HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader, + HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader, long prevBlockOffset, ByteBuffer buf, - boolean fillHeader, long offset, boolean includesMemstoreTS) { + boolean fillHeader, long offset, boolean includesMemstoreTS, + int minorVersion, int bytesPerChecksum, byte checksumType, + int onDiskDataSizeWithHeader) { this.blockType = blockType; this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader; this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader; @@ -170,20 +239,37 @@ public HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader, overwriteHeader(); this.offset = offset; this.includesMemstoreTS = includesMemstoreTS; + this.minorVersion = minorVersion; + this.bytesPerChecksum = bytesPerChecksum; + this.checksumType = checksumType; + this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader; } /** * Creates a block from an existing buffer starting with a header. Rewinds * and takes ownership of the buffer. By definition of rewind, ignores the * buffer position, but if you slice the buffer beforehand, it will rewind - * to that point. + * to that point. The reason this has a minorNumber and not a majorNumber is + * because majorNumbers indicate the format of a HFile whereas minorNumbers + * indicate the format inside a HFileBlock. */ - private HFileBlock(ByteBuffer b) throws IOException { + HFileBlock(ByteBuffer b, int minorVersion) throws IOException { b.rewind(); blockType = BlockType.read(b); onDiskSizeWithoutHeader = b.getInt(); uncompressedSizeWithoutHeader = b.getInt(); prevBlockOffset = b.getLong(); + this.minorVersion = minorVersion; + if (minorVersion >= MINOR_VERSION_WITH_CHECKSUM) { + this.checksumType = b.get(); + this.bytesPerChecksum = b.getInt(); + this.onDiskDataSizeWithHeader = b.getInt(); + } else { + this.checksumType = ChecksumType.NULL.getCode(); + this.bytesPerChecksum = 0; + this.onDiskDataSizeWithHeader = onDiskSizeWithoutHeader + + HEADER_SIZE_NO_CHECKSUM; + } buf = b; buf.rewind(); } @@ -198,25 +284,26 @@ public short getDataBlockEncodingId() { throw new IllegalArgumentException("Querying encoder ID of a block " + "of type other than " + BlockType.ENCODED_DATA + ": " + blockType); } - return buf.getShort(HEADER_SIZE); + return buf.getShort(headerSize()); } /** - * @return the on-disk size of the block with header size included + * @return the on-disk size of the block with header size included. This + * includes the header, the data and the checksum data. */ - public int getOnDiskSizeWithHeader() { - return onDiskSizeWithoutHeader + HEADER_SIZE; + int getOnDiskSizeWithHeader() { + return onDiskSizeWithoutHeader + headerSize(); } /** * Returns the size of the compressed part of the block in case compression * is used, or the uncompressed size of the data part otherwise. Header size - * is not included. + * and checksum data size is not included. * - * @return the on-disk size of the data part of the block, header not - * included + * @return the on-disk size of the data part of the block, header and + * checksum not included. */ - public int getOnDiskSizeWithoutHeader() { + int getOnDiskSizeWithoutHeader() { return onDiskSizeWithoutHeader; } @@ -224,7 +311,7 @@ public int getOnDiskSizeWithoutHeader() { * @return the uncompressed size of the data part of the block, header not * included */ - public int getUncompressedSizeWithoutHeader() { + public int getUncompressedSizeWithoutHeader() { return uncompressedSizeWithoutHeader; } @@ -251,25 +338,27 @@ private void overwriteHeader() { /** * Returns a buffer that does not include the header. The array offset points * to the start of the block data right after the header. The underlying data - * array is not copied. + * array is not copied. Checksum data is not included in the returned buffer. * * @return the buffer with header skipped */ - public ByteBuffer getBufferWithoutHeader() { - return ByteBuffer.wrap(buf.array(), buf.arrayOffset() + HEADER_SIZE, - buf.limit() - HEADER_SIZE).slice(); + ByteBuffer getBufferWithoutHeader() { + return ByteBuffer.wrap(buf.array(), buf.arrayOffset() + headerSize(), + buf.limit() - headerSize() - totalChecksumBytes()).slice(); } /** * Returns the buffer this block stores internally. The clients must not * modify the buffer object. This method has to be public because it is * used in {@link CompoundBloomFilter} to avoid object creation on every - * Bloom filter lookup, but has to be used with caution. + * Bloom filter lookup, but has to be used with caution. Checksum data + * is not included in the returned buffer. * * @return the buffer of this block for read-only operations */ public ByteBuffer getBufferReadOnly() { - return buf; + return ByteBuffer.wrap(buf.array(), buf.arrayOffset(), + buf.limit() - totalChecksumBytes()).slice(); } /** @@ -278,7 +367,7 @@ public ByteBuffer getBufferReadOnly() { * * @return the byte buffer with header included */ - public ByteBuffer getBufferWithHeader() { + ByteBuffer getBufferWithHeader() { ByteBuffer dupBuf = buf.duplicate(); dupBuf.rewind(); return dupBuf; @@ -288,11 +377,11 @@ public ByteBuffer getBufferWithHeader() { * Deserializes fields of the given writable using the data portion of this * block. Does not check that all the block data has been read. */ - public void readInto(Writable w) throws IOException { + void readInto(Writable w) throws IOException { Preconditions.checkNotNull(w); - if (Writables.getWritable(buf.array(), buf.arrayOffset() + HEADER_SIZE, - buf.limit() - HEADER_SIZE, w) == null) { + if (Writables.getWritable(buf.array(), buf.arrayOffset() + headerSize(), + buf.limit() - headerSize(), w) == null) { throw new IOException("Failed to deserialize block " + this + " into a " + w.getClass().getSimpleName()); } @@ -330,8 +419,17 @@ void sanityCheck() throws IOException { "uncompressedSizeWithoutHeader"); sanityCheckAssertion(buf.getLong(), prevBlockOffset, "prevBlocKOffset"); + if (minorVersion >= MINOR_VERSION_WITH_CHECKSUM) { + sanityCheckAssertion(buf.get(), checksumType, "checksumType"); + sanityCheckAssertion(buf.getInt(), bytesPerChecksum, "bytesPerChecksum"); + sanityCheckAssertion(buf.getInt(), onDiskDataSizeWithHeader, + "onDiskDataSizeWithHeader"); + } - int expectedBufLimit = uncompressedSizeWithoutHeader + HEADER_SIZE; + int cksumBytes = totalChecksumBytes(); + int hdrSize = headerSize(); + int expectedBufLimit = uncompressedSizeWithoutHeader + headerSize() + + cksumBytes; if (buf.limit() != expectedBufLimit) { throw new AssertionError("Expected buffer limit " + expectedBufLimit + ", got " + buf.limit()); @@ -339,11 +437,11 @@ void sanityCheck() throws IOException { // We might optionally allocate HEADER_SIZE more bytes to read the next // block's, header, so there are two sensible values for buffer capacity. - if (buf.capacity() != uncompressedSizeWithoutHeader + HEADER_SIZE && - buf.capacity() != uncompressedSizeWithoutHeader + 2 * HEADER_SIZE) { + int size = uncompressedSizeWithoutHeader + hdrSize + cksumBytes; + if (buf.capacity() != size && + buf.capacity() != size + hdrSize) { throw new AssertionError("Invalid buffer capacity: " + buf.capacity() + - ", expected " + (uncompressedSizeWithoutHeader + HEADER_SIZE) + - " or " + (uncompressedSizeWithoutHeader + 2 * HEADER_SIZE)); + ", expected " + size + " or " + (size + hdrSize)); } } @@ -358,8 +456,8 @@ public String toString() { + ", prevBlockOffset=" + prevBlockOffset + ", dataBeginsWith=" - + Bytes.toStringBinary(buf.array(), buf.arrayOffset() + HEADER_SIZE, - Math.min(32, buf.limit() - buf.arrayOffset() - HEADER_SIZE)) + + Bytes.toStringBinary(buf.array(), buf.arrayOffset() + headerSize(), + Math.min(32, buf.limit() - buf.arrayOffset() - headerSize())) + ", fileOffset=" + offset; } @@ -379,31 +477,36 @@ private void validateOnDiskSizeWithoutHeader( /** * Always allocates a new buffer of the correct size. Copies header bytes - * from the existing buffer. Does not change header fields. + * from the existing buffer. Does not change header fields. + * Reserve room to keep checksum bytes too. * * @param extraBytes whether to reserve room in the buffer to read the next * block's header */ private void allocateBuffer(boolean extraBytes) { - int capacityNeeded = HEADER_SIZE + uncompressedSizeWithoutHeader + - (extraBytes ? HEADER_SIZE : 0); + int cksumBytes = totalChecksumBytes(); + int capacityNeeded = headerSize() + uncompressedSizeWithoutHeader + + cksumBytes + + (extraBytes ? headerSize() : 0); ByteBuffer newBuf = ByteBuffer.allocate(capacityNeeded); // Copy header bytes. System.arraycopy(buf.array(), buf.arrayOffset(), newBuf.array(), - newBuf.arrayOffset(), HEADER_SIZE); + newBuf.arrayOffset(), headerSize()); buf = newBuf; - buf.limit(HEADER_SIZE + uncompressedSizeWithoutHeader); + buf.limit(headerSize() + uncompressedSizeWithoutHeader + cksumBytes); } /** An additional sanity-check in case no compression is being used. */ public void assumeUncompressed() throws IOException { - if (onDiskSizeWithoutHeader != uncompressedSizeWithoutHeader) { + if (onDiskSizeWithoutHeader != uncompressedSizeWithoutHeader + + totalChecksumBytes()) { throw new IOException("Using no compression but " + "onDiskSizeWithoutHeader=" + onDiskSizeWithoutHeader + ", " - + "uncompressedSizeWithoutHeader=" + uncompressedSizeWithoutHeader); + + "uncompressedSizeWithoutHeader=" + uncompressedSizeWithoutHeader + + ", numChecksumbytes=" + totalChecksumBytes()); } } @@ -432,7 +535,7 @@ public long getOffset() { */ public DataInputStream getByteStream() { return new DataInputStream(new ByteArrayInputStream(buf.array(), - buf.arrayOffset() + HEADER_SIZE, buf.limit() - HEADER_SIZE)); + buf.arrayOffset() + headerSize(), buf.limit() - headerSize())); } @Override @@ -443,7 +546,10 @@ public long heapSize() { // Block type and byte buffer references 2 * ClassSize.REFERENCE + // On-disk size, uncompressed size, and next block's on-disk size - 3 * Bytes.SIZEOF_INT + + // bytePerChecksum, onDiskDataSize and minorVersion + 6 * Bytes.SIZEOF_INT + + // Checksum type + 1 * Bytes.SIZEOF_BYTE + // This and previous block offset 2 * Bytes.SIZEOF_LONG + // "Include memstore timestamp" flag @@ -570,14 +676,30 @@ private enum State { /** * Bytes to be written to the file system, including the header. Compressed - * if compression is turned on. + * if compression is turned on. It also includes the checksum data that + * immediately follows the block data. (header + data + checksums) */ private byte[] onDiskBytesWithHeader; + /** + * The size of the data on disk that does not include the checksums. + * (header + data) + */ + private int onDiskDataSizeWithHeader; + + /** + * The size of the checksum data on disk. It is used only if data is + * not compressed. If data is compressed, then the checksums are already + * part of onDiskBytesWithHeader. If data is uncompressed, then this + * variable stores the checksum data for this block. + */ + private byte[] onDiskChecksum; + /** * Valid in the READY state. Contains the header and the uncompressed (but * potentially encoded, if this is a data block) bytes, so the length is * {@link #uncompressedSizeWithoutHeader} + {@link HFileBlock#HEADER_SIZE}. + * Does not store checksums. */ private byte[] uncompressedBytesWithHeader; @@ -599,12 +721,19 @@ private enum State { /** Whether we are including memstore timestamp after every key/value */ private boolean includesMemstoreTS; + /** Checksum settings */ + private ChecksumType checksumType; + private int bytesPerChecksum; + /** * @param compressionAlgorithm compression algorithm to use * @param dataBlockEncoderAlgo data block encoding algorithm to use + * @param checksumType type of checksum + * @param bytesPerChecksum bytes per checksum */ public Writer(Compression.Algorithm compressionAlgorithm, - HFileDataBlockEncoder dataBlockEncoder, boolean includesMemstoreTS) { + HFileDataBlockEncoder dataBlockEncoder, boolean includesMemstoreTS, + ChecksumType checksumType, int bytesPerChecksum) { compressAlgo = compressionAlgorithm == null ? NONE : compressionAlgorithm; this.dataBlockEncoder = dataBlockEncoder != null ? dataBlockEncoder : NoOpDataBlockEncoder.INSTANCE; @@ -622,12 +751,19 @@ public Writer(Compression.Algorithm compressionAlgorithm, "for algorithm " + compressionAlgorithm, e); } } + if (bytesPerChecksum < HEADER_SIZE) { + throw new RuntimeException("Unsupported value of bytesPerChecksum. " + + " Minimum is " + HEADER_SIZE + " but the configured value is " + + bytesPerChecksum); + } prevOffsetByType = new long[BlockType.values().length]; for (int i = 0; i < prevOffsetByType.length; ++i) prevOffsetByType[i] = -1; this.includesMemstoreTS = includesMemstoreTS; + this.checksumType = checksumType; + this.bytesPerChecksum = bytesPerChecksum; } /** @@ -703,16 +839,18 @@ private void finishBlock() throws IOException { state = State.BLOCK_READY; encodeDataBlockForDisk(); - doCompression(); - putHeader(uncompressedBytesWithHeader, 0, onDiskBytesWithHeader.length, - uncompressedBytesWithHeader.length); + doCompressionAndChecksumming(); } /** * Do compression if it is enabled, or re-use the uncompressed buffer if * it is not. Fills in the compressed block's header if doing compression. + * Also, compute the checksums. In the case of no-compression, write the + * checksums to its own seperate data structure called onDiskChecksum. In + * the case when compression is enabled, the checksums are written to the + * outputbyte stream 'baos'. */ - private void doCompression() throws IOException { + private void doCompressionAndChecksumming() throws IOException { // do the compression if (compressAlgo != NONE) { compressedByteStream.reset(); @@ -726,11 +864,53 @@ private void doCompression() throws IOException { compressionStream.flush(); compressionStream.finish(); + // generate checksums + onDiskDataSizeWithHeader = compressedByteStream.size(); // data size + + // reserve space for checksums in the output byte stream + ChecksumUtil.reserveSpaceForChecksums(compressedByteStream, + onDiskDataSizeWithHeader, bytesPerChecksum); + + onDiskBytesWithHeader = compressedByteStream.toByteArray(); putHeader(onDiskBytesWithHeader, 0, onDiskBytesWithHeader.length, - uncompressedBytesWithHeader.length); + uncompressedBytesWithHeader.length, onDiskDataSizeWithHeader); + + // generate checksums for header and data. The checksums are + // part of onDiskBytesWithHeader itself. + ChecksumUtil.generateChecksums( + onDiskBytesWithHeader, 0, onDiskDataSizeWithHeader, + onDiskBytesWithHeader, onDiskDataSizeWithHeader, + checksumType, bytesPerChecksum); + + // Checksums are already part of onDiskBytesWithHeader + onDiskChecksum = HConstants.EMPTY_BYTE_ARRAY; + + //set the header for the uncompressed bytes (for cache-on-write) + putHeader(uncompressedBytesWithHeader, 0, + onDiskBytesWithHeader.length + onDiskChecksum.length, + uncompressedBytesWithHeader.length, onDiskDataSizeWithHeader); + } else { + // If we are not using any compression, then the + // checksums are written to its own array onDiskChecksum. onDiskBytesWithHeader = uncompressedBytesWithHeader; + + onDiskDataSizeWithHeader = onDiskBytesWithHeader.length; + int numBytes = (int)ChecksumUtil.numBytes( + uncompressedBytesWithHeader.length, + bytesPerChecksum); + onDiskChecksum = new byte[numBytes]; + + //set the header for the uncompressed bytes + putHeader(uncompressedBytesWithHeader, 0, + onDiskBytesWithHeader.length + onDiskChecksum.length, + uncompressedBytesWithHeader.length, onDiskDataSizeWithHeader); + + ChecksumUtil.generateChecksums( + uncompressedBytesWithHeader, 0, uncompressedBytesWithHeader.length, + onDiskChecksum, 0, + checksumType, bytesPerChecksum); } } @@ -749,7 +929,7 @@ private void encodeDataBlockForDisk() throws IOException { HEADER_SIZE).slice(); Pair<ByteBuffer, BlockType> encodingResult = dataBlockEncoder.beforeWriteToDisk(rawKeyValues, - includesMemstoreTS); + includesMemstoreTS, DUMMY_HEADER); BlockType encodedBlockType = encodingResult.getSecond(); if (encodedBlockType == BlockType.ENCODED_DATA) { @@ -772,16 +952,21 @@ private void encodeDataBlockForDisk() throws IOException { /** * Put the header into the given byte array at the given offset. - * @param onDiskSize size of the block on disk + * @param onDiskSize size of the block on disk header + data + checksum * @param uncompressedSize size of the block after decompression (but - * before optional data block decoding) + * before optional data block decoding) including header + * @param onDiskDataSize size of the block on disk with header + * and data but not including the checksums */ private void putHeader(byte[] dest, int offset, int onDiskSize, - int uncompressedSize) { + int uncompressedSize, int onDiskDataSize) { offset = blockType.put(dest, offset); offset = Bytes.putInt(dest, offset, onDiskSize - HEADER_SIZE); offset = Bytes.putInt(dest, offset, uncompressedSize - HEADER_SIZE); - Bytes.putLong(dest, offset, prevOffset); + offset = Bytes.putLong(dest, offset, prevOffset); + offset = Bytes.putByte(dest, offset, checksumType.getCode()); + offset = Bytes.putInt(dest, offset, bytesPerChecksum); + offset = Bytes.putInt(dest, offset, onDiskDataSizeWithHeader); } /** @@ -816,19 +1001,45 @@ public void writeHeaderAndData(FSDataOutputStream out) throws IOException { private void writeHeaderAndData(DataOutputStream out) throws IOException { ensureBlockReady(); out.write(onDiskBytesWithHeader); + if (compressAlgo == NONE) { + if (onDiskChecksum == HConstants.EMPTY_BYTE_ARRAY) { + throw new IOException("A " + blockType + + " without compression should have checksums " + + " stored separately."); + } + out.write(onDiskChecksum); + } } /** * Returns the header or the compressed data (or uncompressed data when not * using compression) as a byte array. Can be called in the "writing" state * or in the "block ready" state. If called in the "writing" state, - * transitions the writer to the "block ready" state. + * transitions the writer to the "block ready" state. This returns + * the header + data + checksums stored on disk. * * @return header and data as they would be stored on disk in a byte array * @throws IOException */ - public byte[] getHeaderAndData() throws IOException { + byte[] getHeaderAndDataForTest() throws IOException { ensureBlockReady(); + if (compressAlgo == NONE) { + if (onDiskChecksum == HConstants.EMPTY_BYTE_ARRAY) { + throw new IOException("A " + blockType + + " without compression should have checksums " + + " stored separately."); + } + // This is not very optimal, because we are doing an extra copy. + // But this method is used only by unit tests. + byte[] output = new byte[onDiskBytesWithHeader.length + + onDiskChecksum.length]; + System.arraycopy(onDiskBytesWithHeader, 0, + output, 0, onDiskBytesWithHeader.length); + System.arraycopy(onDiskChecksum, 0, + output, onDiskBytesWithHeader.length, + onDiskChecksum.length); + return output; + } return onDiskBytesWithHeader; } @@ -851,9 +1062,9 @@ public void releaseCompressor() { * * @return the on-disk size of the block, not including the header. */ - public int getOnDiskSizeWithoutHeader() { + int getOnDiskSizeWithoutHeader() { expectState(State.BLOCK_READY); - return onDiskBytesWithHeader.length - HEADER_SIZE; + return onDiskBytesWithHeader.length + onDiskChecksum.length - HEADER_SIZE; } /** @@ -861,17 +1072,17 @@ public int getOnDiskSizeWithoutHeader() { * "block ready" state. * * @return the on-disk size of the block ready to be written, including the - * header size + * header size, the data and the checksum data. */ - public int getOnDiskSizeWithHeader() { + int getOnDiskSizeWithHeader() { expectState(State.BLOCK_READY); - return onDiskBytesWithHeader.length; + return onDiskBytesWithHeader.length + onDiskChecksum.length; } /** * The uncompressed size of the block data. Does not include header size. */ - public int getUncompressedSizeWithoutHeader() { + int getUncompressedSizeWithoutHeader() { expectState(State.BLOCK_READY); return uncompressedBytesWithHeader.length - HEADER_SIZE; } @@ -879,7 +1090,7 @@ public int getUncompressedSizeWithoutHeader() { /** * The uncompressed size of the block data, including header size. */ - public int getUncompressedSizeWithHeader() { + int getUncompressedSizeWithHeader() { expectState(State.BLOCK_READY); return uncompressedBytesWithHeader.length; } @@ -906,13 +1117,13 @@ public int blockSizeWritten() { * Returns the header followed by the uncompressed data, even if using * compression. This is needed for storing uncompressed blocks in the block * cache. Can be called in the "writing" state or the "block ready" state. + * Returns only the header and data, does not include checksum data. * * @return uncompressed block bytes for caching on write */ - private byte[] getUncompressedDataWithHeader() { + ByteBuffer getUncompressedBufferWithHeader() { expectState(State.BLOCK_READY); - - return uncompressedBytesWithHeader; + return ByteBuffer.wrap(uncompressedBytesWithHeader); } private void expectState(State expectedState) { @@ -922,17 +1133,6 @@ private void expectState(State expectedState) { } } - /** - * Similar to {@link #getUncompressedBufferWithHeader()} but returns a byte - * buffer. - * - * @return uncompressed block for caching on write in the form of a buffer - */ - public ByteBuffer getUncompressedBufferWithHeader() { - byte[] b = getUncompressedDataWithHeader(); - return ByteBuffer.wrap(b, 0, b.length); - } - /** * Takes the given {@link BlockWritable} instance, creates a new block of * its appropriate type, writes the writable into this block, and flushes @@ -949,13 +1149,21 @@ public void writeBlock(BlockWritable bw, FSDataOutputStream out) writeHeaderAndData(out); } + /** + * Creates a new HFileBlock. Checksums have already been validated, so + * the byte buffer passed into the constructor of this newly created + * block does not have checksum data even though the header minor + * version is MINOR_VERSION_WITH_CHECKSUM. This is indicated by setting a + * 0 value in bytesPerChecksum. + */ public HFileBlock getBlockForCaching() { return new HFileBlock(blockType, getOnDiskSizeWithoutHeader(), getUncompressedSizeWithoutHeader(), prevOffset, getUncompressedBufferWithHeader(), DONT_FILL_HEADER, startOffset, - includesMemstoreTS); + includesMemstoreTS, MINOR_VERSION_WITH_CHECKSUM, + 0, ChecksumType.NULL.getCode(), // no checksums in cached data + onDiskBytesWithHeader.length + onDiskChecksum.length); } - } /** Something that can be written into a block. */ @@ -1024,10 +1232,15 @@ HFileBlock readBlockData(long offset, long onDiskSize, * A common implementation of some methods of {@link FSReader} and some * tools for implementing HFile format version-specific block readers. */ - public abstract static class AbstractFSReader implements FSReader { + private abstract static class AbstractFSReader implements FSReader { + + /** The file system stream of the underlying {@link HFile} that + * does checksum validations in the filesystem */ + protected final FSDataInputStream istream; - /** The file system stream of the underlying {@link HFile} */ - protected FSDataInputStream istream; + /** The file system stream of the underlying {@link HFile} that + * does not do checksum verification in the file system */ + protected final FSDataInputStream istreamNoFsChecksum; /** Compression algorithm used by the {@link HFile} */ protected Compression.Algorithm compressAlgo; @@ -1035,14 +1248,34 @@ public abstract static class AbstractFSReader implements FSReader { /** The size of the file we are reading from, or -1 if unknown. */ protected long fileSize; + /** The minor version of this reader */ + private int minorVersion; + + /** The size of the header */ + protected int hdrSize; + + /** The filesystem used to access data */ + protected HFileSystem hfs; + + /** The path (if any) where this data is coming from */ + protected Path path; + /** The default buffer size for our buffered streams */ public static final int DEFAULT_BUFFER_SIZE = 1 << 20; - public AbstractFSReader(FSDataInputStream istream, Algorithm compressAlgo, - long fileSize) { + public AbstractFSReader(FSDataInputStream istream, + FSDataInputStream istreamNoFsChecksum, + Algorithm compressAlgo, + long fileSize, int minorVersion, HFileSystem hfs, Path path) + throws IOException { this.istream = istream; this.compressAlgo = compressAlgo; this.fileSize = fileSize; + this.minorVersion = minorVersion; + this.hfs = hfs; + this.path = path; + this.hdrSize = headerSize(minorVersion); + this.istreamNoFsChecksum = istreamNoFsChecksum; } @Override @@ -1083,25 +1316,27 @@ public DataInputStream nextBlockAsStream(BlockType blockType) * @param peekIntoNextBlock whether to read the next block's on-disk size * @param fileOffset position in the stream to read at * @param pread whether we should do a positional read + * @param istream The input source of data * @return the on-disk size of the next block with header size included, or * -1 if it could not be determined * @throws IOException */ - protected int readAtOffset(byte[] dest, int destOffset, int size, + protected int readAtOffset(FSDataInputStream istream, + byte[] dest, int destOffset, int size, boolean peekIntoNextBlock, long fileOffset, boolean pread) throws IOException { if (peekIntoNextBlock && - destOffset + size + HEADER_SIZE > dest.length) { + destOffset + size + hdrSize > dest.length) { // We are asked to read the next block's header as well, but there is // not enough room in the array. throw new IOException("Attempted to read " + size + " bytes and " + - HEADER_SIZE + " bytes of next header into a " + dest.length + + hdrSize + " bytes of next header into a " + dest.length + "-byte array at offset " + destOffset); } if (pread) { // Positional read. Better for random reads. - int extraSize = peekIntoNextBlock ? HEADER_SIZE : 0; + int extraSize = peekIntoNextBlock ? hdrSize : 0; int ret = istream.read(fileOffset, dest, destOffset, size + extraSize); if (ret < size) { @@ -1131,14 +1366,14 @@ protected int readAtOffset(byte[] dest, int destOffset, int size, } // Try to read the next block header. - if (!readWithExtra(istream, dest, destOffset, size, HEADER_SIZE)) + if (!readWithExtra(istream, dest, destOffset, size, hdrSize)) return -1; } } assert peekIntoNextBlock; return Bytes.toInt(dest, destOffset + size + BlockType.MAGIC_LENGTH) + - HEADER_SIZE; + hdrSize; } /** @@ -1149,14 +1384,12 @@ protected int readAtOffset(byte[] dest, int destOffset, int size, * @param bufferedBoundedStream * a stream to read compressed data from, bounded to the exact * amount of compressed data - * @param compressedSize - * compressed data size, header not included * @param uncompressedSize * uncompressed data size, header not included * @throws IOException */ protected void decompress(byte[] dest, int destOffset, - InputStream bufferedBoundedStream, int compressedSize, + InputStream bufferedBoundedStream, int uncompressedSize) throws IOException { Decompressor decompressor = null; try { @@ -1189,6 +1422,12 @@ protected InputStream createBufferedBoundedStream(long offset, offset, size, pread), Math.min(DEFAULT_BUFFER_SIZE, size)); } + /** + * @return The minorVersion of this HFile + */ + protected int getMinorVersion() { + return minorVersion; + } } /** @@ -1198,14 +1437,15 @@ protected InputStream createBufferedBoundedStream(long offset, * reader returns blocks represented in the uniform version 2 format in * memory. */ - public static class FSReaderV1 extends AbstractFSReader { + static class FSReaderV1 extends AbstractFSReader { /** Header size difference between version 1 and 2 */ - private static final int HEADER_DELTA = HEADER_SIZE - MAGIC_LENGTH; + private static final int HEADER_DELTA = HEADER_SIZE_NO_CHECKSUM - + MAGIC_LENGTH; public FSReaderV1(FSDataInputStream istream, Algorithm compressAlgo, - long fileSize) { - super(istream, compressAlgo, fileSize); + long fileSize) throws IOException { + super(istream, istream, compressAlgo, fileSize, 0, null, null); } /** @@ -1264,7 +1504,7 @@ public HFileBlock readBlockData(long offset, long onDiskSizeWithMagic, // The first MAGIC_LENGTH bytes of what this will read will be // overwritten. - readAtOffset(buf.array(), buf.arrayOffset() + HEADER_DELTA, + readAtOffset(istream, buf.array(), buf.arrayOffset() + HEADER_DELTA, onDiskSize, false, offset, pread); onDiskSizeWithoutHeader = uncompressedSizeWithMagic - MAGIC_LENGTH; @@ -1272,7 +1512,7 @@ public HFileBlock readBlockData(long offset, long onDiskSizeWithMagic, InputStream bufferedBoundedStream = createBufferedBoundedStream( offset, onDiskSize, pread); decompress(buf.array(), buf.arrayOffset() + HEADER_DELTA, - bufferedBoundedStream, onDiskSize, uncompressedSizeWithMagic); + bufferedBoundedStream, uncompressedSizeWithMagic); // We don't really have a good way to exclude the "magic record" size // from the compressed block's size, since it is compressed as well. @@ -1287,7 +1527,8 @@ public HFileBlock readBlockData(long offset, long onDiskSizeWithMagic, // since the magic record gets moved to the header. HFileBlock b = new HFileBlock(newBlockType, onDiskSizeWithoutHeader, uncompressedSizeWithMagic - MAGIC_LENGTH, -1L, buf, FILL_HEADER, - offset, MemStore.NO_PERSISTENT_TS); + offset, MemStore.NO_PERSISTENT_TS, 0, 0, ChecksumType.NULL.getCode(), + onDiskSizeWithoutHeader + HEADER_SIZE_NO_CHECKSUM); return b; } } @@ -1303,7 +1544,20 @@ private static class PrefetchedHeader { } /** Reads version 2 blocks from the filesystem. */ - public static class FSReaderV2 extends AbstractFSReader { + static class FSReaderV2 extends AbstractFSReader { + + // The configuration states that we should validate hbase checksums + private final boolean useHBaseChecksumConfigured; + + // Record the current state of this reader with respect to + // validating checkums in HBase. This is originally set the same + // value as useHBaseChecksumConfigured, but can change state as and when + // we encounter checksum verification failures. + private volatile boolean useHBaseChecksum; + + // In the case of a checksum failure, do these many succeeding + // reads without hbase checksum verification. + private volatile int checksumOffCount = -1; /** Whether we include memstore timestamp in data blocks */ protected boolean includesMemstoreTS; @@ -1320,9 +1574,40 @@ public PrefetchedHeader initialValue() { } }; - public FSReaderV2(FSDataInputStream istream, Algorithm compressAlgo, - long fileSize) { - super(istream, compressAlgo, fileSize); + public FSReaderV2(FSDataInputStream istream, + FSDataInputStream istreamNoFsChecksum, Algorithm compressAlgo, + long fileSize, int minorVersion, HFileSystem hfs, Path path) + throws IOException { + super(istream, istreamNoFsChecksum, compressAlgo, fileSize, + minorVersion, hfs, path); + + if (hfs != null) { + // Check the configuration to determine whether hbase-level + // checksum verification is needed or not. + useHBaseChecksum = hfs.useHBaseChecksum(); + } else { + // The configuration does not specify anything about hbase checksum + // validations. Set it to true here assuming that we will verify + // hbase checksums for all reads. For older files that do not have + // stored checksums, this flag will be reset later. + useHBaseChecksum = true; + } + + // for older versions, hbase did not store checksums. + if (getMinorVersion() < MINOR_VERSION_WITH_CHECKSUM) { + useHBaseChecksum = false; + } + this.useHBaseChecksumConfigured = useHBaseChecksum; + } + + /** + * A constructor that reads files with the latest minor version. + * This is used by unit tests only. + */ + FSReaderV2(FSDataInputStream istream, Algorithm compressAlgo, + long fileSize) throws IOException { + this(istream, istream, compressAlgo, fileSize, + HFileReaderV2.MAX_MINOR_VERSION, null, null); } /** @@ -1339,6 +1624,101 @@ public FSReaderV2(FSDataInputStream istream, Algorithm compressAlgo, @Override public HFileBlock readBlockData(long offset, long onDiskSizeWithHeaderL, int uncompressedSize, boolean pread) throws IOException { + + // It is ok to get a reference to the stream here without any + // locks because it is marked final. + FSDataInputStream is = this.istreamNoFsChecksum; + + // get a copy of the current state of whether to validate + // hbase checksums or not for this read call. This is not + // thread-safe but the one constaint is that if we decide + // to skip hbase checksum verification then we are + // guaranteed to use hdfs checksum verification. + boolean doVerificationThruHBaseChecksum = this.useHBaseChecksum; + if (!doVerificationThruHBaseChecksum) { + is = this.istream; + } + + HFileBlock blk = readBlockDataInternal(is, offset, + onDiskSizeWithHeaderL, + uncompressedSize, pread, + doVerificationThruHBaseChecksum); + if (blk == null) { + HFile.LOG.warn("HBase checksum verification failed for file " + + path + " at offset " + + offset + " filesize " + fileSize + + ". Retrying read with HDFS checksums turned on..."); + + if (!doVerificationThruHBaseChecksum) { + String msg = "HBase checksum verification failed for file " + + path + " at offset " + + offset + " filesize " + fileSize + + " but this cannot happen because doVerify is " + + doVerificationThruHBaseChecksum; + HFile.LOG.warn(msg); + throw new IOException(msg); // cannot happen case here + } + HFile.checksumFailures.incrementAndGet(); // update metrics + + // If we have a checksum failure, we fall back into a mode where + // the next few reads use HDFS level checksums. We aim to make the + // next CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD reads avoid + // hbase checksum verification, but since this value is set without + // holding any locks, it can so happen that we might actually do + // a few more than precisely this number. + this.checksumOffCount = CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD; + this.useHBaseChecksum = false; + doVerificationThruHBaseChecksum = false; + is = this.istream; + blk = readBlockDataInternal(is, offset, onDiskSizeWithHeaderL, + uncompressedSize, pread, + doVerificationThruHBaseChecksum); + if (blk != null) { + HFile.LOG.warn("HDFS checksum verification suceeded for file " + + path + " at offset " + + offset + " filesize " + fileSize); + } + } + if (blk == null && !doVerificationThruHBaseChecksum) { + String msg = "readBlockData failed, possibly due to " + + "checksum verification failed for file " + path + + " at offset " + offset + " filesize " + fileSize; + HFile.LOG.warn(msg); + throw new IOException(msg); + } + + // If there is a checksum mismatch earlier, then retry with + // HBase checksums switched off and use HDFS checksum verification. + // This triggers HDFS to detect and fix corrupt replicas. The + // next checksumOffCount read requests will use HDFS checksums. + // The decrementing of this.checksumOffCount is not thread-safe, + // but it is harmless because eventually checksumOffCount will be + // a negative number. + if (!this.useHBaseChecksum && this.useHBaseChecksumConfigured) { + if (this.checksumOffCount-- < 0) { + this.useHBaseChecksum = true; // auto re-enable hbase checksums + } + } + return blk; + } + + /** + * Reads a version 2 block. + * + * @param offset the offset in the stream to read at + * @param onDiskSizeWithHeaderL the on-disk size of the block, including + * the header, or -1 if unknown + * @param uncompressedSize the uncompressed size of the the block. Always + * expected to be -1. This parameter is only used in version 1. + * @param pread whether to use a positional read + * @param verifyChecksum Whether to use HBase checksums. + * If HBase checksum is switched off, then use HDFS checksum. + * @return the HFileBlock or null if there is a HBase checksum mismatch + */ + private HFileBlock readBlockDataInternal(FSDataInputStream is, long offset, + long onDiskSizeWithHeaderL, + int uncompressedSize, boolean pread, boolean verifyChecksum) + throws IOException { if (offset < 0) { throw new IOException("Invalid offset=" + offset + " trying to read " + "block (onDiskSize=" + onDiskSizeWithHeaderL @@ -1349,10 +1729,10 @@ public HFileBlock readBlockData(long offset, long onDiskSizeWithHeaderL, "the uncompressed size parameter"); } - if ((onDiskSizeWithHeaderL < HEADER_SIZE && onDiskSizeWithHeaderL != -1) + if ((onDiskSizeWithHeaderL < hdrSize && onDiskSizeWithHeaderL != -1) || onDiskSizeWithHeaderL >= Integer.MAX_VALUE) { throw new IOException("Invalid onDisksize=" + onDiskSizeWithHeaderL - + ": expected to be at least " + HEADER_SIZE + + ": expected to be at least " + hdrSize + " and at most " + Integer.MAX_VALUE + ", or -1 (offset=" + offset + ", uncompressedSize=" + uncompressedSize + ")"); } @@ -1369,7 +1749,7 @@ public HFileBlock readBlockData(long offset, long onDiskSizeWithHeaderL, // block's header (e.g. this block's header) when reading the previous // block. This is the faster and more preferable case. - int onDiskSizeWithoutHeader = onDiskSizeWithHeader - HEADER_SIZE; + int onDiskSizeWithoutHeader = onDiskSizeWithHeader - hdrSize; assert onDiskSizeWithoutHeader >= 0; // See if we can avoid reading the header. This is desirable, because @@ -1380,39 +1760,42 @@ public HFileBlock readBlockData(long offset, long onDiskSizeWithHeaderL, ? prefetchedHeader.header : null; // Size that we have to skip in case we have already read the header. - int preReadHeaderSize = header == null ? 0 : HEADER_SIZE; + int preReadHeaderSize = header == null ? 0 : hdrSize; if (compressAlgo == Compression.Algorithm.NONE) { // Just read the whole thing. Allocate enough space to read the // next block's header too. ByteBuffer headerAndData = ByteBuffer.allocate(onDiskSizeWithHeader - + HEADER_SIZE); + + hdrSize); headerAndData.limit(onDiskSizeWithHeader); if (header != null) { System.arraycopy(header, 0, headerAndData.array(), 0, - HEADER_SIZE); + hdrSize); } - int nextBlockOnDiskSizeWithHeader = readAtOffset( + int nextBlockOnDiskSizeWithHeader = readAtOffset(is, headerAndData.array(), headerAndData.arrayOffset() + preReadHeaderSize, onDiskSizeWithHeader - preReadHeaderSize, true, offset + preReadHeaderSize, pread); - b = new HFileBlock(headerAndData); + b = new HFileBlock(headerAndData, getMinorVersion()); b.assumeUncompressed(); b.validateOnDiskSizeWithoutHeader(onDiskSizeWithoutHeader); b.nextBlockOnDiskSizeWithHeader = nextBlockOnDiskSizeWithHeader; - + if (verifyChecksum && + !validateBlockChecksum(b, headerAndData.array(), hdrSize)) { + return null; // checksum mismatch + } if (b.nextBlockOnDiskSizeWithHeader > 0) setNextBlockHeader(offset, b); } else { // Allocate enough space to fit the next block's header too. - byte[] onDiskBlock = new byte[onDiskSizeWithHeader + HEADER_SIZE]; + byte[] onDiskBlock = new byte[onDiskSizeWithHeader + hdrSize]; - int nextBlockOnDiskSize = readAtOffset(onDiskBlock, + int nextBlockOnDiskSize = readAtOffset(is, onDiskBlock, preReadHeaderSize, onDiskSizeWithHeader - preReadHeaderSize, true, offset + preReadHeaderSize, pread); @@ -1420,32 +1803,38 @@ public HFileBlock readBlockData(long offset, long onDiskSizeWithHeaderL, header = onDiskBlock; try { - b = new HFileBlock(ByteBuffer.wrap(header, 0, HEADER_SIZE)); + b = new HFileBlock(ByteBuffer.wrap(header, 0, hdrSize), + getMinorVersion()); } catch (IOException ex) { // Seen in load testing. Provide comprehensive debug info. throw new IOException("Failed to read compressed block at " + offset + ", onDiskSizeWithoutHeader=" + onDiskSizeWithHeader + ", preReadHeaderSize=" + preReadHeaderSize + ", header.length=" + header.length + ", header bytes: " - + Bytes.toStringBinary(header, 0, HEADER_SIZE), ex); + + Bytes.toStringBinary(header, 0, hdrSize), ex); } b.validateOnDiskSizeWithoutHeader(onDiskSizeWithoutHeader); b.nextBlockOnDiskSizeWithHeader = nextBlockOnDiskSize; + if (verifyChecksum && + !validateBlockChecksum(b, onDiskBlock, hdrSize)) { + return null; // checksum mismatch + } DataInputStream dis = new DataInputStream(new ByteArrayInputStream( - onDiskBlock, HEADER_SIZE, onDiskSizeWithoutHeader)); + onDiskBlock, hdrSize, onDiskSizeWithoutHeader)); // This will allocate a new buffer but keep header bytes. b.allocateBuffer(b.nextBlockOnDiskSizeWithHeader > 0); - decompress(b.buf.array(), b.buf.arrayOffset() + HEADER_SIZE, dis, - onDiskSizeWithoutHeader, b.uncompressedSizeWithoutHeader); + decompress(b.buf.array(), b.buf.arrayOffset() + hdrSize, dis, + b.uncompressedSizeWithoutHeader); // Copy next block's header bytes into the new block if we have them. if (nextBlockOnDiskSize > 0) { System.arraycopy(onDiskBlock, onDiskSizeWithHeader, b.buf.array(), - b.buf.arrayOffset() + HEADER_SIZE - + b.uncompressedSizeWithoutHeader, HEADER_SIZE); + b.buf.arrayOffset() + hdrSize + + b.uncompressedSizeWithoutHeader + b.totalChecksumBytes(), + hdrSize); setNextBlockHeader(offset, b); } @@ -1467,12 +1856,12 @@ public HFileBlock readBlockData(long offset, long onDiskSizeWithHeaderL, if (headerBuf == null) { // Unfortunately, we still have to do a separate read operation to // read the header. - headerBuf = ByteBuffer.allocate(HEADER_SIZE);; - readAtOffset(headerBuf.array(), headerBuf.arrayOffset(), HEADER_SIZE, + headerBuf = ByteBuffer.allocate(hdrSize); + readAtOffset(is, headerBuf.array(), headerBuf.arrayOffset(), hdrSize, false, offset, pread); } - b = new HFileBlock(headerBuf); + b = new HFileBlock(headerBuf, getMinorVersion()); // This will also allocate enough room for the next block's header. b.allocateBuffer(true); @@ -1482,10 +1871,15 @@ public HFileBlock readBlockData(long offset, long onDiskSizeWithHeaderL, // Avoid creating bounded streams and using a "codec" that does // nothing. b.assumeUncompressed(); - b.nextBlockOnDiskSizeWithHeader = readAtOffset(b.buf.array(), - b.buf.arrayOffset() + HEADER_SIZE, - b.uncompressedSizeWithoutHeader, true, offset + HEADER_SIZE, + b.nextBlockOnDiskSizeWithHeader = readAtOffset(is, b.buf.array(), + b.buf.arrayOffset() + hdrSize, + b.uncompressedSizeWithoutHeader + b.totalChecksumBytes(), + true, offset + hdrSize, pread); + if (verifyChecksum && + !validateBlockChecksum(b, b.buf.array(), hdrSize)) { + return null; // checksum mismatch + } if (b.nextBlockOnDiskSizeWithHeader > 0) { setNextBlockHeader(offset, b); @@ -1493,26 +1887,30 @@ public HFileBlock readBlockData(long offset, long onDiskSizeWithHeaderL, } else { // Allocate enough space for the block's header and compressed data. byte[] compressedBytes = new byte[b.getOnDiskSizeWithHeader() - + HEADER_SIZE]; - - b.nextBlockOnDiskSizeWithHeader = readAtOffset(compressedBytes, - HEADER_SIZE, b.onDiskSizeWithoutHeader, true, offset - + HEADER_SIZE, pread); + + hdrSize]; + + b.nextBlockOnDiskSizeWithHeader = readAtOffset(is, compressedBytes, + hdrSize, b.onDiskSizeWithoutHeader, true, offset + + hdrSize, pread); + if (verifyChecksum && + !validateBlockChecksum(b, compressedBytes, hdrSize)) { + return null; // checksum mismatch + } DataInputStream dis = new DataInputStream(new ByteArrayInputStream( - compressedBytes, HEADER_SIZE, b.onDiskSizeWithoutHeader)); + compressedBytes, hdrSize, b.onDiskSizeWithoutHeader)); - decompress(b.buf.array(), b.buf.arrayOffset() + HEADER_SIZE, dis, - b.onDiskSizeWithoutHeader, b.uncompressedSizeWithoutHeader); + decompress(b.buf.array(), b.buf.arrayOffset() + hdrSize, dis, + b.uncompressedSizeWithoutHeader); if (b.nextBlockOnDiskSizeWithHeader > 0) { // Copy the next block's header into the new block. - int nextHeaderOffset = b.buf.arrayOffset() + HEADER_SIZE - + b.uncompressedSizeWithoutHeader; + int nextHeaderOffset = b.buf.arrayOffset() + hdrSize + + b.uncompressedSizeWithoutHeader + b.totalChecksumBytes(); System.arraycopy(compressedBytes, - compressedBytes.length - HEADER_SIZE, + compressedBytes.length - hdrSize, b.buf.array(), nextHeaderOffset, - HEADER_SIZE); + hdrSize); setNextBlockHeader(offset, b); } @@ -1527,10 +1925,10 @@ public HFileBlock readBlockData(long offset, long onDiskSizeWithHeaderL, private void setNextBlockHeader(long offset, HFileBlock b) { PrefetchedHeader prefetchedHeader = prefetchedHeaderForThread.get(); prefetchedHeader.offset = offset + b.getOnDiskSizeWithHeader(); - int nextHeaderOffset = b.buf.arrayOffset() + HEADER_SIZE - + b.uncompressedSizeWithoutHeader; + int nextHeaderOffset = b.buf.arrayOffset() + hdrSize + + b.uncompressedSizeWithoutHeader + b.totalChecksumBytes(); System.arraycopy(b.buf.array(), nextHeaderOffset, - prefetchedHeader.header, 0, HEADER_SIZE); + prefetchedHeader.header, 0, hdrSize); } void setIncludesMemstoreTS(boolean enabled) { @@ -1540,6 +1938,18 @@ void setIncludesMemstoreTS(boolean enabled) { void setDataBlockEncoder(HFileDataBlockEncoder encoder) { this.dataBlockEncoder = encoder; } + + /** + * Generates the checksum for the header as well as the data and + * then validates that it matches the value stored in the header. + * If there is a checksum mismatch, then return false. Otherwise + * return true. + */ + protected boolean validateBlockChecksum(HFileBlock block, + byte[] data, int hdrSize) throws IOException { + return ChecksumUtil.validateBlockChecksum(path, block, + data, hdrSize); + } } @Override @@ -1618,5 +2028,87 @@ public DataBlockEncoding getDataBlockEncoding() { return DataBlockEncoding.NONE; } + byte getChecksumType() { + return this.checksumType; + } + + int getBytesPerChecksum() { + return this.bytesPerChecksum; + } + + int getOnDiskDataSizeWithHeader() { + return this.onDiskDataSizeWithHeader; + } + + int getMinorVersion() { + return this.minorVersion; + } + + /** + * Calcuate the number of bytes required to store all the checksums + * for this block. Each checksum value is a 4 byte integer. + */ + int totalChecksumBytes() { + // If the hfile block has minorVersion 0, then there are no checksum + // data to validate. Similarly, a zero value in this.bytesPerChecksum + // indicates that cached blocks do not have checksum data because + // checksums were already validated when the block was read from disk. + if (minorVersion < MINOR_VERSION_WITH_CHECKSUM || this.bytesPerChecksum == 0) { + return 0; + } + return (int)ChecksumUtil.numBytes(onDiskDataSizeWithHeader, bytesPerChecksum); + } + + /** + * Returns the size of this block header. + */ + public int headerSize() { + return headerSize(this.minorVersion); + } + + /** + * Maps a minor version to the size of the header. + */ + static private int headerSize(int minorVersion) { + if (minorVersion < MINOR_VERSION_WITH_CHECKSUM) { + return HEADER_SIZE_NO_CHECKSUM; + } + return HEADER_SIZE; + } + + /** + * Convert the contents of the block header into a human readable string. + * This is mostly helpful for debugging. This assumes that the block + * has minor version > 0. + */ + static String toStringHeader(ByteBuffer buf) throws IOException { + int offset = buf.arrayOffset(); + byte[] b = buf.array(); + long magic = Bytes.toLong(b, offset); + BlockType bt = BlockType.read(buf); + offset += Bytes.SIZEOF_LONG; + int compressedBlockSizeNoHeader = Bytes.toInt(b, offset); + offset += Bytes.SIZEOF_INT; + int uncompressedBlockSizeNoHeader = Bytes.toInt(b, offset); + offset += Bytes.SIZEOF_INT; + long prevBlockOffset = Bytes.toLong(b, offset); + offset += Bytes.SIZEOF_LONG; + byte cksumtype = b[offset]; + offset += Bytes.SIZEOF_BYTE; + long bytesPerChecksum = Bytes.toInt(b, offset); + offset += Bytes.SIZEOF_INT; + long onDiskDataSizeWithHeader = Bytes.toInt(b, offset); + offset += Bytes.SIZEOF_INT; + return " Header dump: magic: " + magic + + " blockType " + bt + + " compressedBlockSizeNoHeader " + + compressedBlockSizeNoHeader + + " uncompressedBlockSizeNoHeader " + + uncompressedBlockSizeNoHeader + + " prevBlockOffset " + prevBlockOffset + + " checksumType " + ChecksumType.codeToType(cksumtype) + + " bytesPerChecksum " + bytesPerChecksum + + " onDiskDataSizeWithHeader " + onDiskDataSizeWithHeader; + } } diff --git a/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java b/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java index 3376e78095a4..55d958d61691 100644 --- a/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java +++ b/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java @@ -51,11 +51,12 @@ public HFileBlock diskToCacheFormat(HFileBlock block, * Should be called before an encoded or unencoded data block is written to * disk. * @param in KeyValues next to each other + * @param dummyHeader A dummy header to be written as a placeholder * @return a non-null on-heap buffer containing the contents of the * HFileBlock with unfilled header and block type */ public Pair<ByteBuffer, BlockType> beforeWriteToDisk( - ByteBuffer in, boolean includesMemstoreTS); + ByteBuffer in, boolean includesMemstoreTS, byte[] dummyHeader); /** * Decides whether we should use a scanner over encoded blocks. diff --git a/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java b/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java index bd4f61273d06..8371bf291356 100644 --- a/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java +++ b/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java @@ -154,14 +154,14 @@ public HFileBlock diskToCacheFormat(HFileBlock block, boolean isCompaction) { */ @Override public Pair<ByteBuffer, BlockType> beforeWriteToDisk(ByteBuffer in, - boolean includesMemstoreTS) { + boolean includesMemstoreTS, byte[] dummyHeader) { if (onDisk == DataBlockEncoding.NONE) { // there is no need to encode the block before writing it to disk return new Pair<ByteBuffer, BlockType>(in, BlockType.DATA); } ByteBuffer encodedBuffer = encodeBufferToHFileBlockBuffer(in, - onDisk, includesMemstoreTS); + onDisk, includesMemstoreTS, dummyHeader); return new Pair<ByteBuffer, BlockType>(encodedBuffer, BlockType.ENCODED_DATA); } @@ -175,12 +175,13 @@ public boolean useEncodedScanner(boolean isCompaction) { } private ByteBuffer encodeBufferToHFileBlockBuffer(ByteBuffer in, - DataBlockEncoding algo, boolean includesMemstoreTS) { + DataBlockEncoding algo, boolean includesMemstoreTS, + byte[] dummyHeader) { ByteArrayOutputStream encodedStream = new ByteArrayOutputStream(); DataOutputStream dataOut = new DataOutputStream(encodedStream); DataBlockEncoder encoder = algo.getEncoder(); try { - encodedStream.write(HFileBlock.DUMMY_HEADER); + encodedStream.write(dummyHeader); algo.writeIdInBytes(dataOut); encoder.compressKeyValues(dataOut, in, includesMemstoreTS); @@ -194,13 +195,16 @@ private ByteBuffer encodeBufferToHFileBlockBuffer(ByteBuffer in, private HFileBlock encodeDataBlock(HFileBlock block, DataBlockEncoding algo, boolean includesMemstoreTS) { ByteBuffer compressedBuffer = encodeBufferToHFileBlockBuffer( - block.getBufferWithoutHeader(), algo, includesMemstoreTS); - int sizeWithoutHeader = compressedBuffer.limit() - HFileBlock.HEADER_SIZE; + block.getBufferWithoutHeader(), algo, includesMemstoreTS, + HFileBlock.DUMMY_HEADER); + int sizeWithoutHeader = compressedBuffer.limit() - block.headerSize(); HFileBlock encodedBlock = new HFileBlock(BlockType.ENCODED_DATA, block.getOnDiskSizeWithoutHeader(), sizeWithoutHeader, block.getPrevBlockOffset(), compressedBuffer, HFileBlock.FILL_HEADER, block.getOffset(), - includesMemstoreTS); + includesMemstoreTS, block.getMinorVersion(), + block.getBytesPerChecksum(), block.getChecksumType(), + block.getOnDiskDataSizeWithHeader()); block.passSchemaMetricsTo(encodedBlock); return encodedBlock; } diff --git a/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java b/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java index 75cec7ef1e59..efa4603236f3 100644 --- a/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java +++ b/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java @@ -65,10 +65,10 @@ public class HFileReaderV1 extends AbstractHFileReader { public HFileReaderV1(Path path, FixedFileTrailer trailer, final FSDataInputStream fsdis, final long size, final boolean closeIStream, - final CacheConfig cacheConf) { + final CacheConfig cacheConf) throws IOException { super(path, trailer, fsdis, size, closeIStream, cacheConf); - trailer.expectVersion(1); + trailer.expectMajorVersion(1); fsBlockReader = new HFileBlock.FSReaderV1(fsdis, compressAlgo, fileSize); } diff --git a/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java b/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java index 4dcae08546e2..8b7809623377 100644 --- a/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java +++ b/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java @@ -31,6 +31,7 @@ import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory; @@ -73,6 +74,12 @@ private boolean shouldIncludeMemstoreTS() { */ private List<HFileBlock> loadOnOpenBlocks = new ArrayList<HFileBlock>(); + /** Minimum minor version supported by this HFile format */ + static final int MIN_MINOR_VERSION = 0; + + /** Maximum minor version supported by this HFile format */ + static final int MAX_MINOR_VERSION = 1; + /** * Opens a HFile. You must load the index before you can use it by calling * {@link #loadFileInfo()}. @@ -89,14 +96,18 @@ private boolean shouldIncludeMemstoreTS() { * still use its on-disk encoding in cache. */ public HFileReaderV2(Path path, FixedFileTrailer trailer, - final FSDataInputStream fsdis, final long size, + final FSDataInputStream fsdis, final FSDataInputStream fsdisNoFsChecksum, + final long size, final boolean closeIStream, final CacheConfig cacheConf, - DataBlockEncoding preferredEncodingInCache) + DataBlockEncoding preferredEncodingInCache, final HFileSystem hfs) throws IOException { - super(path, trailer, fsdis, size, closeIStream, cacheConf); - trailer.expectVersion(2); + super(path, trailer, fsdis, fsdisNoFsChecksum, size, + closeIStream, cacheConf, hfs); + trailer.expectMajorVersion(2); + validateMinorVersion(path, trailer.getMinorVersion()); HFileBlock.FSReaderV2 fsBlockReaderV2 = new HFileBlock.FSReaderV2(fsdis, - compressAlgo, fileSize); + fsdisNoFsChecksum, + compressAlgo, fileSize, trailer.getMinorVersion(), hfs, path); this.fsBlockReader = fsBlockReaderV2; // upcast // Comparator class name is stored in the trailer in version 2. @@ -411,9 +422,15 @@ public void close(boolean evictOnClose) throws IOException { + " block(s)"); } } - if (closeIStream && istream != null) { - istream.close(); - istream = null; + if (closeIStream) { + if (istream != istreamNoFsChecksum && istreamNoFsChecksum != null) { + istreamNoFsChecksum.close(); + istreamNoFsChecksum = null; + } + if (istream != null) { + istream.close(); + istream = null; + } } } @@ -915,9 +932,9 @@ private void updateCurrentBlock(HFileBlock newBlock) { private ByteBuffer getEncodedBuffer(HFileBlock newBlock) { ByteBuffer origBlock = newBlock.getBufferReadOnly(); ByteBuffer encodedBlock = ByteBuffer.wrap(origBlock.array(), - origBlock.arrayOffset() + HFileBlock.HEADER_SIZE + + origBlock.arrayOffset() + newBlock.headerSize() + DataBlockEncoding.ID_SIZE, - origBlock.limit() - HFileBlock.HEADER_SIZE - + newBlock.getUncompressedSizeWithoutHeader() - DataBlockEncoding.ID_SIZE).slice(); return encodedBlock; } @@ -1053,4 +1070,19 @@ public boolean isFileInfoLoaded() { return true; // We load file info in constructor in version 2. } + /** + * Validates that the minor version is within acceptable limits. + * Otherwise throws an Runtime exception + */ + private void validateMinorVersion(Path path, int minorVersion) { + if (minorVersion < MIN_MINOR_VERSION || + minorVersion > MAX_MINOR_VERSION) { + String msg = "Minor version for path " + path + + " is expected to be between " + + MIN_MINOR_VERSION + " and " + MAX_MINOR_VERSION + + " but is found to be " + minorVersion; + LOG.error(msg); + throw new RuntimeException(msg); + } + } } diff --git a/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV1.java b/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV1.java index 0b5225541a19..6d251d8337de 100644 --- a/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV1.java +++ b/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV1.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.io.hfile.HFile.Writer; import org.apache.hadoop.hbase.regionserver.MemStore; import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics; +import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.BloomFilterWriter; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.Writable; @@ -92,8 +93,9 @@ static class WriterFactoryV1 extends HFile.WriterFactory { public Writer createWriter(FileSystem fs, Path path, FSDataOutputStream ostream, int blockSize, Algorithm compressAlgo, HFileDataBlockEncoder dataBlockEncoder, - KeyComparator comparator) - throws IOException { + KeyComparator comparator, final ChecksumType checksumType, + final int bytesPerChecksum) throws IOException { + // version 1 does not implement checksums return new HFileWriterV1(conf, cacheConf, fs, path, ostream, blockSize, compressAlgo, dataBlockEncoder, comparator); } @@ -149,7 +151,13 @@ private void finishBlock() throws IOException { HFileBlock block = new HFileBlock(BlockType.DATA, (int) (outputStream.getPos() - blockBegin), bytes.length, -1, ByteBuffer.wrap(bytes, 0, bytes.length), HFileBlock.FILL_HEADER, - blockBegin, MemStore.NO_PERSISTENT_TS); + blockBegin, MemStore.NO_PERSISTENT_TS, + HFileBlock.MINOR_VERSION_NO_CHECKSUM, // minor version + 0, // bytesPerChecksum + ChecksumType.NULL.getCode(), // checksum type + (int) (outputStream.getPos() - blockBegin) + + HFileBlock.HEADER_SIZE_NO_CHECKSUM); // onDiskDataSizeWithHeader + block = blockEncoder.diskToCacheFormat(block, false); passSchemaMetricsTo(block); cacheConf.getBlockCache().cacheBlock( @@ -174,7 +182,7 @@ private void newBlock() throws IOException { if (cacheConf.shouldCacheDataOnWrite()) { this.baos = new ByteArrayOutputStream(); this.baosDos = new DataOutputStream(baos); - baosDos.write(HFileBlock.DUMMY_HEADER); + baosDos.write(HFileBlock.DUMMY_HEADER_NO_CHECKSUM); } } @@ -332,7 +340,8 @@ public void close() throws IOException { finishBlock(); - FixedFileTrailer trailer = new FixedFileTrailer(1); + FixedFileTrailer trailer = new FixedFileTrailer(1, + HFileBlock.MINOR_VERSION_NO_CHECKSUM); // Write out the metadata blocks if any. ArrayList<Long> metaOffsets = null; diff --git a/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java b/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java index 2fe36ff7e9b5..3392400d9d77 100644 --- a/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java +++ b/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java @@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.io.hfile.HFile.Writer; import org.apache.hadoop.hbase.io.hfile.HFileBlock.BlockWritable; import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics; +import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.BloomFilterWriter; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.Writable; @@ -81,6 +82,10 @@ public class HFileWriterV2 extends AbstractHFileWriter { private List<BlockWritable> additionalLoadOnOpenData = new ArrayList<BlockWritable>(); + /** Checksum related settings */ + private ChecksumType checksumType = HFile.DEFAULT_CHECKSUM_TYPE; + private int bytesPerChecksum = HFile.DEFAULT_BYTES_PER_CHECKSUM; + private final boolean includeMemstoreTS = true; private long maxMemstoreTS = 0; @@ -93,9 +98,10 @@ static class WriterFactoryV2 extends HFile.WriterFactory { public Writer createWriter(FileSystem fs, Path path, FSDataOutputStream ostream, int blockSize, Compression.Algorithm compress, HFileDataBlockEncoder blockEncoder, - final KeyComparator comparator) throws IOException { + final KeyComparator comparator, final ChecksumType checksumType, + final int bytesPerChecksum) throws IOException { return new HFileWriterV2(conf, cacheConf, fs, path, ostream, blockSize, - compress, blockEncoder, comparator); + compress, blockEncoder, comparator, checksumType, bytesPerChecksum); } } @@ -103,11 +109,14 @@ public Writer createWriter(FileSystem fs, Path path, public HFileWriterV2(Configuration conf, CacheConfig cacheConf, FileSystem fs, Path path, FSDataOutputStream ostream, int blockSize, Compression.Algorithm compressAlgo, HFileDataBlockEncoder blockEncoder, - final KeyComparator comparator) throws IOException { + final KeyComparator comparator, final ChecksumType checksumType, + final int bytesPerChecksum) throws IOException { super(cacheConf, ostream == null ? createOutputStream(conf, fs, path) : ostream, path, blockSize, compressAlgo, blockEncoder, comparator); SchemaMetrics.configureGlobally(conf); + this.checksumType = checksumType; + this.bytesPerChecksum = bytesPerChecksum; finishInit(conf); } @@ -118,7 +127,7 @@ private void finishInit(final Configuration conf) { // HFile filesystem-level (non-caching) block writer fsBlockWriter = new HFileBlock.Writer(compressAlgo, blockEncoder, - includeMemstoreTS); + includeMemstoreTS, checksumType, bytesPerChecksum); // Data block index writer boolean cacheIndexesOnWrite = cacheConf.shouldCacheIndexesOnWrite(); @@ -356,7 +365,8 @@ public void close() throws IOException { finishBlock(); writeInlineBlocks(true); - FixedFileTrailer trailer = new FixedFileTrailer(2); + FixedFileTrailer trailer = new FixedFileTrailer(2, + HFileReaderV2.MAX_MINOR_VERSION); // Write out the metadata blocks if any. if (!metaNames.isEmpty()) { diff --git a/src/main/java/org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java b/src/main/java/org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java index 82541df5610e..8a6351fd1b00 100644 --- a/src/main/java/org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java +++ b/src/main/java/org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java @@ -46,7 +46,7 @@ public HFileBlock diskToCacheFormat(HFileBlock block, boolean isCompaction) { @Override public Pair<ByteBuffer, BlockType> beforeWriteToDisk( - ByteBuffer in, boolean includesMemstoreTS) { + ByteBuffer in, boolean includesMemstoreTS, byte[] dummyHeader) { return new Pair<ByteBuffer, BlockType>(in, BlockType.DATA); } diff --git a/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java b/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java index e5bb7ea22e09..19ae18c044bb 100644 --- a/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java +++ b/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java @@ -72,6 +72,7 @@ import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFileScanner; +import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType; import org.apache.hadoop.hbase.util.Bytes; @@ -553,6 +554,8 @@ private static void copyHFileHalf( .withCompression(compression) .withDataBlockEncoder(dataBlockEncoder) .withBloomType(bloomFilterType) + .withChecksumType(Store.getChecksumType(conf)) + .withBytesPerChecksum(Store.getBytesPerChecksum(conf)) .build(); HFileScanner scanner = halfReader.getScanner(false, false, false); scanner.seekTo(); diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index d422cf25abc3..555ffdcb6a0b 100644 --- a/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -3624,7 +3624,14 @@ public static HRegion openHRegion(final HRegionInfo info, } Path dir = HTableDescriptor.getTableDir(FSUtils.getRootDir(conf), info.getTableName()); - HRegion r = HRegion.newHRegion(dir, wal, FileSystem.get(conf), conf, info, + FileSystem fs = null; + if (rsServices != null) { + fs = rsServices.getFileSystem(); + } + if (fs == null) { + fs = FileSystem.get(conf); + } + HRegion r = HRegion.newHRegion(dir, wal, fs, conf, info, htd, rsServices); return r.openHRegion(reporter); } diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 35205a520f11..03d6b9c8cd24 100644 --- a/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -106,6 +106,7 @@ import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.WritableByteArrayComparable; +import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.hfile.BlockCache; import org.apache.hadoop.hbase.io.hfile.BlockCacheColumnFamilySummary; import org.apache.hadoop.hbase.io.hfile.CacheConfig; @@ -195,7 +196,8 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, protected final Configuration conf; protected final AtomicBoolean haveRootRegion = new AtomicBoolean(false); - private FileSystem fs; + private HFileSystem fs; + private boolean useHBaseChecksum; // verify hbase checksums? private Path rootDir; private final Random rand = new Random(); @@ -368,6 +370,11 @@ public HRegionServer(Configuration conf) this.isOnline = false; checkCodecs(this.conf); + // do we use checksum verfication in the hbase? If hbase checksum verification + // is enabled, then we automatically switch off hdfs checksum verification. + this.useHBaseChecksum = conf.getBoolean( + HConstants.HBASE_CHECKSUM_VERIFICATION, true); + // Config'ed params this.numRetries = conf.getInt("hbase.client.retries.number", 10); this.threadWakeFrequency = conf.getInt(HConstants.THREAD_WAKE_FREQUENCY, @@ -978,7 +985,7 @@ protected void handleReportForDutyResponse(final MapWritable c) // to defaults). this.conf.set("fs.defaultFS", this.conf.get("hbase.rootdir")); // Get fs instance used by this RS - this.fs = FileSystem.get(this.conf); + this.fs = new HFileSystem(this.conf, this.useHBaseChecksum); this.rootDir = new Path(this.conf.get(HConstants.HBASE_DIR)); this.tableDescriptors = new FSTableDescriptors(this.fs, this.rootDir, true); this.hlog = setupWALAndReplication(); @@ -1278,7 +1285,7 @@ private HLog setupWALAndReplication() throws IOException { * @throws IOException */ protected HLog instantiateHLog(Path logdir, Path oldLogDir) throws IOException { - return new HLog(this.fs, logdir, oldLogDir, this.conf, + return new HLog(this.fs.getBackingFs(), logdir, oldLogDir, this.conf, getWALActionListeners(), this.serverNameFromMasterPOV.toString()); } @@ -3165,7 +3172,7 @@ protected Path getRootDir() { /** * @return Return the fs. */ - protected FileSystem getFileSystem() { + public FileSystem getFileSystem() { return fs; } diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java b/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java index 9b7e88b34e1d..6884d531f8be 100644 --- a/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java +++ b/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java @@ -23,6 +23,7 @@ import java.util.Map; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.catalog.CatalogTracker; import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.regionserver.wal.HLog; @@ -80,5 +81,9 @@ public void postOpenDeployTasks(final HRegion r, final CatalogTracker ct, * @return map of regions in transition in this RS */ public Map<byte[], Boolean> getRegionsInTransitionInRS(); - + + /** + * @return Return the FileSystem object used by the regionserver + */ + public FileSystem getFileSystem(); } diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java b/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java index d884598fa0aa..e4ccac0172d3 100644 --- a/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java +++ b/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java @@ -70,6 +70,7 @@ import org.apache.hadoop.hbase.regionserver.metrics.SchemaConfigured; import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.CollectionBackedScanner; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @@ -157,6 +158,10 @@ public class Store extends SchemaConfigured implements HeapSize { private final Compression.Algorithm compactionCompression; private HFileDataBlockEncoder dataBlockEncoder; + /** Checksum configuration */ + private ChecksumType checksumType; + private int bytesPerChecksum; + // Comparing KeyValues final KeyValue.KVComparator comparator; @@ -246,6 +251,35 @@ protected Store(Path basedir, HRegion region, HColumnDescriptor family, "hbase.hstore.close.check.interval", 10*1000*1000 /* 10 MB */); } this.storefiles = sortAndClone(loadStoreFiles()); + + // Initialize checksum type from name. The names are CRC32, CRC32C, etc. + this.checksumType = getChecksumType(conf); + // initilize bytes per checksum + this.bytesPerChecksum = getBytesPerChecksum(conf); + } + + /** + * Returns the configured bytesPerChecksum value. + * @param conf The configuration + * @return The bytesPerChecksum that is set in the configuration + */ + public static int getBytesPerChecksum(Configuration conf) { + return conf.getInt(HConstants.BYTES_PER_CHECKSUM, + HFile.DEFAULT_BYTES_PER_CHECKSUM); + } + + /** + * Returns the configured checksum algorithm. + * @param conf The configuration + * @return The checksum algorithm that is set in the configuration + */ + public static ChecksumType getChecksumType(Configuration conf) { + String checksumName = conf.get(HConstants.CHECKSUM_TYPE_NAME); + if (checksumName == null) { + return HFile.DEFAULT_CHECKSUM_TYPE; + } else { + return ChecksumType.nameToType(checksumName); + } } public HColumnDescriptor getFamily() { @@ -799,6 +833,8 @@ private StoreFile.Writer createWriterInTmp(int maxKeyCount, .withComparator(comparator) .withBloomType(family.getBloomFilterType()) .withMaxKeyCount(maxKeyCount) + .withChecksumType(checksumType) + .withBytesPerChecksum(bytesPerChecksum) .build(); // The store file writer's path does not include the CF name, so we need // to configure the HFile writer directly. @@ -2192,8 +2228,8 @@ public CacheConfig getCacheConfig() { public static final long FIXED_OVERHEAD = ClassSize.align(SchemaConfigured.SCHEMA_CONFIGURED_UNALIGNED_HEAP_SIZE + - + (19 * ClassSize.REFERENCE) + (6 * Bytes.SIZEOF_LONG) - + (5 * Bytes.SIZEOF_INT) + Bytes.SIZEOF_BOOLEAN); + + (20 * ClassSize.REFERENCE) + (6 * Bytes.SIZEOF_LONG) + + (6 * Bytes.SIZEOF_INT) + Bytes.SIZEOF_BOOLEAN); public static final long DEEP_OVERHEAD = ClassSize.align(FIXED_OVERHEAD + ClassSize.OBJECT + ClassSize.REENTRANT_LOCK diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java b/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java index 12ebc0a554cd..2e98b393f472 100644 --- a/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java +++ b/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java @@ -60,6 +60,7 @@ import org.apache.hadoop.hbase.regionserver.metrics.SchemaConfigured; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder; import org.apache.hadoop.hbase.io.hfile.NoOpDataBlockEncoder; +import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.BloomFilter; import org.apache.hadoop.hbase.util.BloomFilterFactory; import org.apache.hadoop.hbase.util.BloomFilterWriter; @@ -699,6 +700,8 @@ public static class WriterBuilder { private long maxKeyCount = 0; private Path dir; private Path filePath; + private ChecksumType checksumType = HFile.DEFAULT_CHECKSUM_TYPE; + private int bytesPerChecksum = HFile.DEFAULT_BYTES_PER_CHECKSUM; public WriterBuilder(Configuration conf, CacheConfig cacheConf, FileSystem fs, int blockSize) { @@ -765,6 +768,24 @@ public WriterBuilder withMaxKeyCount(long maxKeyCount) { return this; } + /** + * @param checksumType the type of checksum + * @return this (for chained invocation) + */ + public WriterBuilder withChecksumType(ChecksumType checksumType) { + this.checksumType = checksumType; + return this; + } + + /** + * @param bytesPerChecksum the number of bytes per checksum chunk + * @return this (for chained invocation) + */ + public WriterBuilder withBytesPerChecksum(int bytesPerChecksum) { + this.bytesPerChecksum = bytesPerChecksum; + return this; + } + /** * Create a store file writer. Client is responsible for closing file when * done. If metadata, add BEFORE closing using @@ -798,7 +819,8 @@ public Writer build() throws IOException { comparator = KeyValue.COMPARATOR; } return new Writer(fs, filePath, blockSize, compressAlgo, dataBlockEncoder, - conf, cacheConf, comparator, bloomType, maxKeyCount); + conf, cacheConf, comparator, bloomType, maxKeyCount, checksumType, + bytesPerChecksum); } } @@ -896,6 +918,12 @@ public static class Writer { protected HFileDataBlockEncoder dataBlockEncoder; + /** Checksum type */ + protected ChecksumType checksumType; + + /** Bytes per Checksum */ + protected int bytesPerChecksum; + TimeRangeTracker timeRangeTracker = new TimeRangeTracker(); /* isTimeRangeTrackerSet keeps track if the timeRange has already been set * When flushing a memstore, we set TimeRange and use this variable to @@ -918,13 +946,16 @@ public static class Writer { * @param bloomType bloom filter setting * @param maxKeys the expected maximum number of keys to be added. Was used * for Bloom filter size in {@link HFile} format version 1. + * @param checksumType the checksum type + * @param bytesPerChecksum the number of bytes per checksum value * @throws IOException problem writing to FS */ private Writer(FileSystem fs, Path path, int blocksize, Compression.Algorithm compress, HFileDataBlockEncoder dataBlockEncoder, final Configuration conf, CacheConfig cacheConf, - final KVComparator comparator, BloomType bloomType, long maxKeys) + final KVComparator comparator, BloomType bloomType, long maxKeys, + final ChecksumType checksumType, final int bytesPerChecksum) throws IOException { this.dataBlockEncoder = dataBlockEncoder != null ? dataBlockEncoder : NoOpDataBlockEncoder.INSTANCE; @@ -934,6 +965,8 @@ private Writer(FileSystem fs, Path path, int blocksize, .withCompression(compress) .withDataBlockEncoder(dataBlockEncoder) .withComparator(comparator.getRawComparator()) + .withChecksumType(checksumType) + .withBytesPerChecksum(bytesPerChecksum) .create(); this.kvComparator = comparator; @@ -964,6 +997,8 @@ private Writer(FileSystem fs, Path path, int blocksize, LOG.info("Delete Family Bloom filter type for " + path + ": " + deleteFamilyBloomFilterWriter.getClass().getSimpleName()); } + this.checksumType = checksumType; + this.bytesPerChecksum = bytesPerChecksum; } /** @@ -1660,7 +1695,7 @@ public long getTotalBloomSize() { } public int getHFileVersion() { - return reader.getTrailer().getVersion(); + return reader.getTrailer().getMajorVersion(); } HFile.Reader getHFileReader() { diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java b/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java index ad9345998df1..e5e85b02bbd7 100644 --- a/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java +++ b/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java @@ -248,6 +248,12 @@ public class RegionServerMetrics implements Updater { public final MetricsTimeVaryingLong regionSplitFailureCount = new MetricsTimeVaryingLong("regionSplitFailureCount", registry); + /** + * Number of times checksum verification failed. + */ + public final MetricsLongValue checksumFailuresCount = + new MetricsLongValue("checksumFailuresCount", registry); + public RegionServerMetrics() { MetricsContext context = MetricsUtil.getContext("hbase"); metricsRecord = MetricsUtil.createRecord(context, "regionserver"); @@ -346,6 +352,8 @@ public void doUpdates(MetricsContext caller) { // HFile metrics, positional reads ops = HFile.getPreadOps(); if (ops != 0) this.fsPreadLatency.inc(ops, HFile.getPreadTimeMs()); + this.checksumFailuresCount.set(HFile.getChecksumFailuresCount()); + /* NOTE: removed HFile write latency. 2 reasons: * 1) Mixing HLog latencies are far higher priority since they're * on-demand and HFile is used in background (compact/flush) @@ -366,6 +374,7 @@ public void doUpdates(MetricsContext caller) { this.slowHLogAppendCount.pushMetric(this.metricsRecord); this.regionSplitSuccessCount.pushMetric(this.metricsRecord); this.regionSplitFailureCount.pushMetric(this.metricsRecord); + this.checksumFailuresCount.pushMetric(this.metricsRecord); } this.metricsRecord.update(); } diff --git a/src/main/java/org/apache/hadoop/hbase/util/ChecksumFactory.java b/src/main/java/org/apache/hadoop/hbase/util/ChecksumFactory.java new file mode 100644 index 000000000000..d61238bd02e3 --- /dev/null +++ b/src/main/java/org/apache/hadoop/hbase/util/ChecksumFactory.java @@ -0,0 +1,99 @@ +/** + * Copyright The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.util; + +import java.io.IOException; +import java.lang.ClassNotFoundException; +import java.util.zip.Checksum; +import java.lang.reflect.Constructor; + +/** + * Utility class that is used to generate a Checksum object. + * The Checksum implementation is pluggable and an application + * can specify their own class that implements their own + * Checksum algorithm. + */ +public class ChecksumFactory { + + static private final Class<?>[] EMPTY_ARRAY = new Class[]{}; + + /** + * Create a new instance of a Checksum object. + * @return The newly created Checksum object + */ + static public Checksum newInstance(String className) throws IOException { + try { + Class<?> clazz = getClassByName(className); + return (Checksum)newInstance(clazz); + } catch (ClassNotFoundException e) { + throw new IOException(e); + } + } + + /** + * Returns a Constructor that can be used to create a Checksum object. + * @return The Constructor that can be used to create a + * new Checksum object. + * @param theClass classname for which an constructor is created + * @return a new Constructor object + */ + static public Constructor<?> newConstructor(String className) + throws IOException { + try { + Class<?> clazz = getClassByName(className); + Constructor<?> ctor = clazz.getDeclaredConstructor(EMPTY_ARRAY); + ctor.setAccessible(true); + return ctor; + } catch (ClassNotFoundException e) { + throw new IOException(e); + } catch (java.lang.NoSuchMethodException e) { + throw new IOException(e); + } + } + + /** Create an object for the given class and initialize it from conf + * + * @param theClass class of which an object is created + * @return a new object + */ + static private <T> T newInstance(Class<T> theClass) { + T result; + try { + Constructor<T> ctor = theClass.getDeclaredConstructor(EMPTY_ARRAY); + ctor.setAccessible(true); + result = ctor.newInstance(); + } catch (Exception e) { + throw new RuntimeException(e); + } + return result; + } + + /** + * Load a class by name. + * @param name the class name. + * @return the class object. + * @throws ClassNotFoundException if the class is not found. + */ + static private Class<?> getClassByName(String name) + throws ClassNotFoundException { + ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); + return Class.forName(name, true, classLoader); + } +} diff --git a/src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java b/src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java new file mode 100644 index 000000000000..d2329e10c588 --- /dev/null +++ b/src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java @@ -0,0 +1,183 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.util; + +import java.io.IOException; +import java.lang.reflect.Constructor; +import java.util.zip.Checksum; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.util.ChecksumFactory; + +/** + * Checksum types. The Checksum type is a one byte number + * that stores a representation of the checksum algorithm + * used to encode a hfile. The ordinal of these cannot + * change or else you risk breaking all existing HFiles out there. + */ +public enum ChecksumType { + + NULL((byte)0) { + @Override + public String getName() { + return "NULL"; + } + @Override + public void initialize() { + // do nothing + } + @Override + public Checksum getChecksumObject() throws IOException { + return null; // checksums not used + } + }, + + CRC32((byte)1) { + private volatile Constructor<?> ctor; + + @Override + public String getName() { + return "CRC32"; + } + + @Override + public void initialize() { + final String PURECRC32 = "org.apache.hadoop.util.PureJavaCrc32"; + final String JDKCRC = "java.util.zip.CRC32"; + LOG = LogFactory.getLog(ChecksumType.class); + + // check if hadoop library is available + try { + ctor = ChecksumFactory.newConstructor(PURECRC32); + LOG.info("Checksum using " + PURECRC32); + } catch (Exception e) { + LOG.info(PURECRC32 + " not available."); + } + try { + // The default checksum class name is java.util.zip.CRC32. + // This is available on all JVMs. + if (ctor == null) { + ctor = ChecksumFactory.newConstructor(JDKCRC); + LOG.info("Checksum can use " + JDKCRC); + } + } catch (Exception e) { + LOG.warn(JDKCRC + " not available. ", e); + } + } + + @Override + public Checksum getChecksumObject() throws IOException { + if (ctor == null) { + throw new IOException("Bad constructor for " + getName()); + } + try { + return (Checksum)ctor.newInstance(); + } catch (Exception e) { + throw new IOException(e); + } + } + }, + + CRC32C((byte)2) { + private transient Constructor<?> ctor; + + @Override + public String getName() { + return "CRC32C"; + } + + @Override + public void initialize() { + final String PURECRC32C = "org.apache.hadoop.util.PureJavaCrc32C"; + LOG = LogFactory.getLog(ChecksumType.class); + try { + ctor = ChecksumFactory.newConstructor(PURECRC32C); + LOG.info("Checksum can use " + PURECRC32C); + } catch (Exception e) { + LOG.info(PURECRC32C + " not available. "); + } + } + + @Override + public Checksum getChecksumObject() throws IOException { + if (ctor == null) { + throw new IOException("Bad constructor for " + getName()); + } + try { + return (Checksum)ctor.newInstance(); + } catch (Exception e) { + throw new IOException(e); + } + } + }; + + private final byte code; + protected Log LOG; + + /** initializes the relevant checksum class object */ + abstract void initialize(); + + /** returns the name of this checksum type */ + public abstract String getName(); + + private ChecksumType(final byte c) { + this.code = c; + initialize(); + } + + /** returns a object that can be used to generate/validate checksums */ + public abstract Checksum getChecksumObject() throws IOException; + + public byte getCode() { + return this.code; + } + + /** + * Cannot rely on enum ordinals . They change if item is removed or moved. + * Do our own codes. + * @param b + * @return Type associated with passed code. + */ + public static ChecksumType codeToType(final byte b) { + for (ChecksumType t : ChecksumType.values()) { + if (t.getCode() == b) { + return t; + } + } + throw new RuntimeException("Unknown checksum type code " + b); + } + + /** + * Map a checksum name to a specific type. + * Do our own names. + * @param b + * @return Type associated with passed code. + */ + public static ChecksumType nameToType(final String name) { + for (ChecksumType t : ChecksumType.values()) { + if (t.getName().equals(name)) { + return t; + } + } + throw new RuntimeException("Unknown checksum type name " + name); + } +} diff --git a/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilter.java b/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilter.java index 04cf70df82c1..5ee0c8febc6b 100644 --- a/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilter.java +++ b/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilter.java @@ -109,7 +109,7 @@ public boolean contains(byte[] key, int keyOffset, int keyLength, ByteBuffer bloomBuf = bloomBlock.getBufferReadOnly(); result = ByteBloomFilter.contains(key, keyOffset, keyLength, - bloomBuf.array(), bloomBuf.arrayOffset() + HFileBlock.HEADER_SIZE, + bloomBuf.array(), bloomBuf.arrayOffset() + bloomBlock.headerSize(), bloomBlock.getUncompressedSizeWithoutHeader(), hash, hashCount); } diff --git a/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index 38eb6a845996..5305607614c2 100644 --- a/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -58,7 +58,9 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; +import org.apache.hadoop.hbase.io.hfile.ChecksumUtil; import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; import org.apache.hadoop.hbase.master.HMaster; @@ -189,6 +191,9 @@ public HBaseTestingUtility() { public HBaseTestingUtility(Configuration conf) { this.conf = conf; + + // a hbase checksum verification failure will cause unit tests to fail + ChecksumUtil.generateExceptionForChecksumFailureForTest(true); } /** @@ -1437,7 +1442,7 @@ public void setDFSCluster(MiniDFSCluster cluster) throws IOException { } public FileSystem getTestFileSystem() throws IOException { - return FileSystem.get(conf); + return HFileSystem.get(conf); } /** diff --git a/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java b/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java index 61ce0775cab6..106056ff0b03 100644 --- a/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java +++ b/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.MultithreadedTestUtil; import org.apache.hadoop.hbase.MultithreadedTestUtil.TestThread; import org.apache.hadoop.hbase.io.HeapSize; +import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics; public class CacheTestUtils { @@ -323,7 +324,9 @@ private static HFileBlockPair[] generateHFileBlocks(int blockSize, HFileBlock generated = new HFileBlock(BlockType.DATA, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader, prevBlockOffset, cachedBuffer, HFileBlock.DONT_FILL_HEADER, - blockSize, includesMemstoreTS); + blockSize, includesMemstoreTS, HFileBlock.MINOR_VERSION_NO_CHECKSUM, + 0, ChecksumType.NULL.getCode(), + onDiskSizeWithoutHeader + HFileBlock.HEADER_SIZE); String strKey; /* No conflicting keys */ diff --git a/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java b/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java index 6007d5a1e46a..c8163f3aa755 100644 --- a/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java +++ b/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.StoreFile; @@ -49,6 +50,7 @@ import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics; import org.apache.hadoop.hbase.util.BloomFilterFactory; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.junit.After; import org.junit.Before; @@ -88,6 +90,8 @@ public class TestCacheOnWrite { private static final int INDEX_BLOCK_SIZE = 512; private static final int BLOOM_BLOCK_SIZE = 4096; private static final BloomType BLOOM_TYPE = StoreFile.BloomType.ROWCOL; + private static final ChecksumType CKTYPE = ChecksumType.CRC32; + private static final int CKBYTES = 512; /** The number of valid key types possible in a store file */ private static final int NUM_VALID_KEY_TYPES = @@ -192,7 +196,7 @@ public void setUp() throws IOException { conf.setBoolean(CacheConfig.CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, cowType.shouldBeCached(BlockType.BLOOM_CHUNK)); cowType.modifyConf(conf); - fs = FileSystem.get(conf); + fs = HFileSystem.get(conf); cacheConf = new CacheConfig(conf); blockCache = cacheConf.getBlockCache(); } @@ -292,6 +296,8 @@ public void writeStoreFile() throws IOException { .withComparator(KeyValue.COMPARATOR) .withBloomType(BLOOM_TYPE) .withMaxKeyCount(NUM_KV) + .withChecksumType(CKTYPE) + .withBytesPerChecksum(CKBYTES) .build(); final int rowLen = 32; diff --git a/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java b/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java new file mode 100644 index 000000000000..c037d2255d84 --- /dev/null +++ b/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java @@ -0,0 +1,290 @@ +/* + * Copyright The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.io.hfile; + +import static org.junit.Assert.*; + +import java.io.ByteArrayOutputStream; +import java.io.ByteArrayInputStream; +import java.io.DataOutputStream; +import java.io.DataInputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.zip.Checksum; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.MediumTests; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.fs.HFileSystem; +import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.ChecksumType; +import org.apache.hadoop.io.WritableUtils; +import org.apache.hadoop.io.compress.Compressor; + +import static org.apache.hadoop.hbase.io.hfile.Compression.Algorithm.*; +import org.junit.Before; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(MediumTests.class) +public class TestChecksum { + // change this value to activate more logs + private static final boolean detailedLogging = true; + private static final boolean[] BOOLEAN_VALUES = new boolean[] { false, true }; + + private static final Log LOG = LogFactory.getLog(TestHFileBlock.class); + + static final Compression.Algorithm[] COMPRESSION_ALGORITHMS = { + NONE, GZ }; + + static final int[] BYTES_PER_CHECKSUM = { + 50, 500, 688, 16*1024, (16*1024+980), 64 * 1024}; + + private static final HBaseTestingUtility TEST_UTIL = + new HBaseTestingUtility(); + private FileSystem fs; + private HFileSystem hfs; + + @Before + public void setUp() throws Exception { + fs = HFileSystem.get(TEST_UTIL.getConfiguration()); + hfs = (HFileSystem)fs; + } + + /** + * Introduce checksum failures and check that we can still read + * the data + */ + @Test + public void testChecksumCorruption() throws IOException { + for (Compression.Algorithm algo : COMPRESSION_ALGORITHMS) { + for (boolean pread : new boolean[] { false, true }) { + LOG.info("testChecksumCorruption: Compression algorithm: " + algo + + ", pread=" + pread); + Path path = new Path(TEST_UTIL.getDataTestDir(), "blocks_v2_" + + algo); + FSDataOutputStream os = fs.create(path); + HFileBlock.Writer hbw = new HFileBlock.Writer(algo, null, + true, HFile.DEFAULT_CHECKSUM_TYPE, + HFile.DEFAULT_BYTES_PER_CHECKSUM); + long totalSize = 0; + for (int blockId = 0; blockId < 2; ++blockId) { + DataOutputStream dos = hbw.startWriting(BlockType.DATA); + for (int i = 0; i < 1234; ++i) + dos.writeInt(i); + hbw.writeHeaderAndData(os); + totalSize += hbw.getOnDiskSizeWithHeader(); + } + os.close(); + + // Use hbase checksums. + assertEquals(true, hfs.useHBaseChecksum()); + assertEquals(true, hfs.getNoChecksumFs() != hfs.getBackingFs()); + + // Do a read that purposely introduces checksum verification failures. + FSDataInputStream is = fs.open(path); + HFileBlock.FSReader hbr = new FSReaderV2Test(is, algo, + totalSize, HFile.MAX_FORMAT_VERSION, fs, path); + HFileBlock b = hbr.readBlockData(0, -1, -1, pread); + b.sanityCheck(); + assertEquals(4936, b.getUncompressedSizeWithoutHeader()); + assertEquals(algo == GZ ? 2173 : 4936, + b.getOnDiskSizeWithoutHeader() - b.totalChecksumBytes()); + // read data back from the hfile, exclude header and checksum + ByteBuffer bb = b.getBufferWithoutHeader(); // read back data + DataInputStream in = new DataInputStream( + new ByteArrayInputStream( + bb.array(), bb.arrayOffset(), bb.limit())); + + // assert that we encountered hbase checksum verification failures + // but still used hdfs checksums and read data successfully. + assertEquals(1, HFile.getChecksumFailuresCount()); + validateData(in); + + // A single instance of hbase checksum failure causes the reader to + // switch off hbase checksum verification for the next 100 read + // requests. Verify that this is correct. + for (int i = 0; i < + HFileBlock.CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD + 1; i++) { + b = hbr.readBlockData(0, -1, -1, pread); + assertEquals(0, HFile.getChecksumFailuresCount()); + } + // The next read should have hbase checksum verification reanabled, + // we verify this by assertng that there was a hbase-checksum failure. + b = hbr.readBlockData(0, -1, -1, pread); + assertEquals(1, HFile.getChecksumFailuresCount()); + + // Since the above encountered a checksum failure, we switch + // back to not checking hbase checksums. + b = hbr.readBlockData(0, -1, -1, pread); + assertEquals(0, HFile.getChecksumFailuresCount()); + is.close(); + + // Now, use a completely new reader. Switch off hbase checksums in + // the configuration. In this case, we should not detect + // any retries within hbase. + HFileSystem newfs = new HFileSystem(TEST_UTIL.getConfiguration(), false); + assertEquals(false, newfs.useHBaseChecksum()); + is = newfs.open(path); + hbr = new FSReaderV2Test(is, algo, + totalSize, HFile.MAX_FORMAT_VERSION, newfs, path); + b = hbr.readBlockData(0, -1, -1, pread); + is.close(); + b.sanityCheck(); + assertEquals(4936, b.getUncompressedSizeWithoutHeader()); + assertEquals(algo == GZ ? 2173 : 4936, + b.getOnDiskSizeWithoutHeader() - b.totalChecksumBytes()); + // read data back from the hfile, exclude header and checksum + bb = b.getBufferWithoutHeader(); // read back data + in = new DataInputStream(new ByteArrayInputStream( + bb.array(), bb.arrayOffset(), bb.limit())); + + // assert that we did not encounter hbase checksum verification failures + // but still used hdfs checksums and read data successfully. + assertEquals(0, HFile.getChecksumFailuresCount()); + validateData(in); + } + } + } + + /** + * Test different values of bytesPerChecksum + */ + @Test + public void testChecksumChunks() throws IOException { + Compression.Algorithm algo = NONE; + for (boolean pread : new boolean[] { false, true }) { + for (int bytesPerChecksum : BYTES_PER_CHECKSUM) { + Path path = new Path(TEST_UTIL.getDataTestDir(), "checksumChunk_" + + algo + bytesPerChecksum); + FSDataOutputStream os = fs.create(path); + HFileBlock.Writer hbw = new HFileBlock.Writer(algo, null, + true, HFile.DEFAULT_CHECKSUM_TYPE, bytesPerChecksum); + + // write one block. The block has data + // that is at least 6 times more than the checksum chunk size + long dataSize = 0; + DataOutputStream dos = hbw.startWriting(BlockType.DATA); + for (; dataSize < 6 * bytesPerChecksum;) { + for (int i = 0; i < 1234; ++i) { + dos.writeInt(i); + dataSize += 4; + } + } + hbw.writeHeaderAndData(os); + long totalSize = hbw.getOnDiskSizeWithHeader(); + os.close(); + + long expectedChunks = ChecksumUtil.numChunks( + dataSize + HFileBlock.HEADER_SIZE, + bytesPerChecksum); + LOG.info("testChecksumChunks: pread=" + pread + + ", bytesPerChecksum=" + bytesPerChecksum + + ", fileSize=" + totalSize + + ", dataSize=" + dataSize + + ", expectedChunks=" + expectedChunks); + + // Verify hbase checksums. + assertEquals(true, hfs.useHBaseChecksum()); + assertEquals(true, hfs.getNoChecksumFs() != hfs.getBackingFs()); + + // Read data back from file. + FSDataInputStream is = fs.open(path); + FSDataInputStream nochecksum = hfs.getNoChecksumFs().open(path); + HFileBlock.FSReader hbr = new HFileBlock.FSReaderV2(is, nochecksum, + algo, totalSize, HFile.MAX_FORMAT_VERSION, hfs, path); + HFileBlock b = hbr.readBlockData(0, -1, -1, pread); + is.close(); + b.sanityCheck(); + assertEquals(dataSize, b.getUncompressedSizeWithoutHeader()); + + // verify that we have the expected number of checksum chunks + assertEquals(totalSize, HFileBlock.HEADER_SIZE + dataSize + + expectedChunks * HFileBlock.CHECKSUM_SIZE); + + // assert that we did not encounter hbase checksum verification failures + assertEquals(0, HFile.getChecksumFailuresCount()); + } + } + } + + /** + * Test to ensure that these is at least one valid checksum implementation + */ + @Test + public void testChecksumAlgorithm() throws IOException { + ChecksumType type = ChecksumType.CRC32; + assertEquals(ChecksumType.nameToType(type.getName()), type); + assertEquals(ChecksumType.valueOf(type.toString()), type); + } + + private void validateData(DataInputStream in) throws IOException { + // validate data + for (int i = 0; i < 1234; i++) { + int val = in.readInt(); + if (val != i) { + String msg = "testChecksumCorruption: data mismatch at index " + + i + " expected " + i + " found " + val; + LOG.warn(msg); + assertEquals(i, val); + } + } + } + + @org.junit.Rule + public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu = + new org.apache.hadoop.hbase.ResourceCheckerJUnitRule(); + + /** + * A class that introduces hbase-checksum failures while + * reading data from hfiles. This should trigger the hdfs level + * checksum validations. + */ + static private class FSReaderV2Test extends HFileBlock.FSReaderV2 { + + FSReaderV2Test(FSDataInputStream istream, Algorithm algo, + long fileSize, int minorVersion, FileSystem fs, + Path path) throws IOException { + super(istream, istream, algo, fileSize, minorVersion, + (HFileSystem)fs, path); + } + + @Override + protected boolean validateBlockChecksum(HFileBlock block, + byte[] data, int hdrSize) throws IOException { + return false; // checksum validation failure + } + } +} + diff --git a/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java b/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java index 1d0f2cffd7c9..a9288ce979eb 100644 --- a/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java +++ b/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java @@ -52,7 +52,7 @@ public class TestFixedFileTrailer { private static final Log LOG = LogFactory.getLog(TestFixedFileTrailer.class); /** The number of used fields by version. Indexed by version minus one. */ - private static final int[] NUM_FIELDS_BY_VERSION = new int[] { 8, 13 }; + private static final int[] NUM_FIELDS_BY_VERSION = new int[] { 9, 14 }; private HBaseTestingUtility util = new HBaseTestingUtility(); private FileSystem fs; @@ -83,7 +83,8 @@ public void setUp() throws IOException { @Test public void testTrailer() throws IOException { - FixedFileTrailer t = new FixedFileTrailer(version); + FixedFileTrailer t = new FixedFileTrailer(version, + HFileBlock.MINOR_VERSION_NO_CHECKSUM); t.setDataIndexCount(3); t.setEntryCount(((long) Integer.MAX_VALUE) + 1); @@ -121,7 +122,8 @@ public void testTrailer() throws IOException { // Finished writing, trying to read. { DataInputStream dis = new DataInputStream(bais); - FixedFileTrailer t2 = new FixedFileTrailer(version); + FixedFileTrailer t2 = new FixedFileTrailer(version, + HFileBlock.MINOR_VERSION_NO_CHECKSUM); t2.deserialize(dis); assertEquals(-1, bais.read()); // Ensure we have read everything. checkLoadedTrailer(version, t, t2); @@ -191,7 +193,7 @@ private void writeTrailer(Path trailerPath, FixedFileTrailer t, private void checkLoadedTrailer(int version, FixedFileTrailer expected, FixedFileTrailer loaded) throws IOException { - assertEquals(version, loaded.getVersion()); + assertEquals(version, loaded.getMajorVersion()); assertEquals(expected.getDataIndexCount(), loaded.getDataIndexCount()); assertEquals(Math.min(expected.getEntryCount(), diff --git a/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java b/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java index e8b7df07591c..6456ccbee3db 100644 --- a/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java +++ b/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java @@ -48,9 +48,11 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.DoubleOutputStream; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.compress.CompressionOutputStream; @@ -102,16 +104,16 @@ public static Collection<Object[]> parameters() { @Before public void setUp() throws IOException { - fs = FileSystem.get(TEST_UTIL.getConfiguration()); + fs = HFileSystem.get(TEST_UTIL.getConfiguration()); } - public void writeTestBlockContents(DataOutputStream dos) throws IOException { + static void writeTestBlockContents(DataOutputStream dos) throws IOException { // This compresses really well. for (int i = 0; i < 1000; ++i) dos.writeInt(i / 100); } - private int writeTestKeyValues(OutputStream dos, int seed) + static int writeTestKeyValues(OutputStream dos, int seed, boolean includesMemstoreTS) throws IOException { List<KeyValue> keyValues = new ArrayList<KeyValue>(); Random randomizer = new Random(42l + seed); // just any fixed number @@ -191,22 +193,24 @@ public byte[] createTestV1Block(Compression.Algorithm algo) return baos.toByteArray(); } - private byte[] createTestV2Block(Compression.Algorithm algo) - throws IOException { + static HFileBlock.Writer createTestV2Block(Compression.Algorithm algo, + boolean includesMemstoreTS) throws IOException { final BlockType blockType = BlockType.DATA; HFileBlock.Writer hbw = new HFileBlock.Writer(algo, null, - includesMemstoreTS); + includesMemstoreTS, HFile.DEFAULT_CHECKSUM_TYPE, + HFile.DEFAULT_BYTES_PER_CHECKSUM); DataOutputStream dos = hbw.startWriting(blockType); writeTestBlockContents(dos); - byte[] headerAndData = hbw.getHeaderAndData(); + byte[] headerAndData = hbw.getHeaderAndDataForTest(); assertEquals(1000 * 4, hbw.getUncompressedSizeWithoutHeader()); hbw.releaseCompressor(); - return headerAndData; + return hbw; } public String createTestBlockStr(Compression.Algorithm algo, int correctLength) throws IOException { - byte[] testV2Block = createTestV2Block(algo); + HFileBlock.Writer hbw = createTestV2Block(algo, includesMemstoreTS); + byte[] testV2Block = hbw.getHeaderAndDataForTest(); int osOffset = HFileBlock.HEADER_SIZE + 9; if (testV2Block.length == correctLength) { // Force-set the "OS" field of the gzip header to 3 (Unix) to avoid @@ -221,14 +225,16 @@ public String createTestBlockStr(Compression.Algorithm algo, @Test public void testNoCompression() throws IOException { - assertEquals(4000 + HFileBlock.HEADER_SIZE, createTestV2Block(NONE).length); + assertEquals(4000, createTestV2Block(NONE, includesMemstoreTS). + getBlockForCaching().getUncompressedSizeWithoutHeader()); } @Test public void testGzipCompression() throws IOException { final String correctTestBlockStr = - "DATABLK*\\x00\\x00\\x00:\\x00\\x00\\x0F\\xA0\\xFF\\xFF\\xFF\\xFF" + "DATABLK*\\x00\\x00\\x00>\\x00\\x00\\x0F\\xA0\\xFF\\xFF\\xFF\\xFF" + "\\xFF\\xFF\\xFF\\xFF" + + "\\x01\\x00\\x00@\\x00\\x00\\x00\\x00[" // gzip-compressed block: http://www.gzip.org/zlib/rfc-gzip.html + "\\x1F\\x8B" // gzip magic signature + "\\x08" // Compression method: 8 = "deflate" @@ -240,8 +246,9 @@ public void testGzipCompression() throws IOException { + "\\x03" + "\\xED\\xC3\\xC1\\x11\\x00 \\x08\\xC00DD\\xDD\\x7Fa" + "\\xD6\\xE8\\xA3\\xB9K\\x84`\\x96Q\\xD3\\xA8\\xDB\\xA8e\\xD4c" - + "\\xD46\\xEA5\\xEA3\\xEA7\\xE7\\x00LI\\s\\xA0\\x0F\\x00\\x00"; - final int correctGzipBlockLength = 82; + + "\\xD46\\xEA5\\xEA3\\xEA7\\xE7\\x00LI\\s\\xA0\\x0F\\x00\\x00" + + "\\xAB\\x85g\\x91"; // 4 byte checksum + final int correctGzipBlockLength = 95; assertEquals(correctTestBlockStr, createTestBlockStr(GZ, correctGzipBlockLength)); } @@ -285,11 +292,14 @@ public void testReaderV1() throws IOException { public void testReaderV2() throws IOException { for (Compression.Algorithm algo : COMPRESSION_ALGORITHMS) { for (boolean pread : new boolean[] { false, true }) { + LOG.info("testReaderV2: Compression algorithm: " + algo + + ", pread=" + pread); Path path = new Path(TEST_UTIL.getDataTestDir(), "blocks_v2_" + algo); FSDataOutputStream os = fs.create(path); HFileBlock.Writer hbw = new HFileBlock.Writer(algo, null, - includesMemstoreTS); + includesMemstoreTS, HFile.DEFAULT_CHECKSUM_TYPE, + HFile.DEFAULT_BYTES_PER_CHECKSUM); long totalSize = 0; for (int blockId = 0; blockId < 2; ++blockId) { DataOutputStream dos = hbw.startWriting(BlockType.DATA); @@ -305,16 +315,19 @@ public void testReaderV2() throws IOException { totalSize); HFileBlock b = hbr.readBlockData(0, -1, -1, pread); is.close(); + assertEquals(0, HFile.getChecksumFailuresCount()); b.sanityCheck(); assertEquals(4936, b.getUncompressedSizeWithoutHeader()); - assertEquals(algo == GZ ? 2173 : 4936, b.getOnDiskSizeWithoutHeader()); + assertEquals(algo == GZ ? 2173 : 4936, + b.getOnDiskSizeWithoutHeader() - b.totalChecksumBytes()); String blockStr = b.toString(); if (algo == GZ) { is = fs.open(path); hbr = new HFileBlock.FSReaderV2(is, algo, totalSize); - b = hbr.readBlockData(0, 2173 + HFileBlock.HEADER_SIZE, -1, pread); + b = hbr.readBlockData(0, 2173 + HFileBlock.HEADER_SIZE + + b.totalChecksumBytes(), -1, pread); assertEquals(blockStr, b.toString()); int wrongCompressedSize = 2172; try { @@ -351,13 +364,15 @@ public void testDataBlockEncoding() throws IOException { HFileDataBlockEncoder dataBlockEncoder = new HFileDataBlockEncoderImpl(encoding); HFileBlock.Writer hbw = new HFileBlock.Writer(algo, dataBlockEncoder, - includesMemstoreTS); + includesMemstoreTS, HFile.DEFAULT_CHECKSUM_TYPE, + HFile.DEFAULT_BYTES_PER_CHECKSUM); long totalSize = 0; final List<Integer> encodedSizes = new ArrayList<Integer>(); final List<ByteBuffer> encodedBlocks = new ArrayList<ByteBuffer>(); for (int blockId = 0; blockId < numBlocks; ++blockId) { - writeEncodedBlock(encoding, hbw, encodedSizes, encodedBlocks, - blockId); + DataOutputStream dos = hbw.startWriting(BlockType.DATA); + writeEncodedBlock(encoding, dos, encodedSizes, encodedBlocks, + blockId, includesMemstoreTS); hbw.writeHeaderAndData(os); totalSize += hbw.getOnDiskSizeWithHeader(); @@ -374,6 +389,7 @@ public void testDataBlockEncoding() throws IOException { int pos = 0; for (int blockId = 0; blockId < numBlocks; ++blockId) { b = hbr.readBlockData(pos, -1, -1, pread); + assertEquals(0, HFile.getChecksumFailuresCount()); b.sanityCheck(); pos += b.getOnDiskSizeWithHeader(); @@ -401,16 +417,16 @@ public void testDataBlockEncoding() throws IOException { } } - private void writeEncodedBlock(DataBlockEncoding encoding, - HFileBlock.Writer hbw, final List<Integer> encodedSizes, - final List<ByteBuffer> encodedBlocks, int blockId) throws IOException { - DataOutputStream dos = hbw.startWriting(BlockType.DATA); + static void writeEncodedBlock(DataBlockEncoding encoding, + DataOutputStream dos, final List<Integer> encodedSizes, + final List<ByteBuffer> encodedBlocks, int blockId, + boolean includesMemstoreTS) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DoubleOutputStream doubleOutputStream = new DoubleOutputStream(dos, baos); final int rawBlockSize = writeTestKeyValues(doubleOutputStream, - blockId); + blockId, includesMemstoreTS); ByteBuffer rawBuf = ByteBuffer.wrap(baos.toByteArray()); rawBuf.rewind(); @@ -434,7 +450,7 @@ private void writeEncodedBlock(DataBlockEncoding encoding, encodedBlocks.add(encodedBuf); } - private void assertBuffersEqual(ByteBuffer expectedBuffer, + static void assertBuffersEqual(ByteBuffer expectedBuffer, ByteBuffer actualBuffer, Compression.Algorithm compression, DataBlockEncoding encoding, boolean pread) { if (!actualBuffer.equals(expectedBuffer)) { @@ -471,7 +487,9 @@ public void testPreviousOffset() throws IOException { for (boolean pread : BOOLEAN_VALUES) { for (boolean cacheOnWrite : BOOLEAN_VALUES) { Random rand = defaultRandom(); - LOG.info("Compression algorithm: " + algo + ", pread=" + pread); + LOG.info("testPreviousOffset:Compression algorithm: " + algo + + ", pread=" + pread + + ", cacheOnWrite=" + cacheOnWrite); Path path = new Path(TEST_UTIL.getDataTestDir(), "prev_offset"); List<Long> expectedOffsets = new ArrayList<Long>(); List<Long> expectedPrevOffsets = new ArrayList<Long>(); @@ -522,17 +540,23 @@ public void testPreviousOffset() throws IOException { b2.getUncompressedSizeWithoutHeader()); assertEquals(b.getPrevBlockOffset(), b2.getPrevBlockOffset()); assertEquals(curOffset, b2.getOffset()); + assertEquals(b.getBytesPerChecksum(), b2.getBytesPerChecksum()); + assertEquals(b.getOnDiskDataSizeWithHeader(), + b2.getOnDiskDataSizeWithHeader()); + assertEquals(0, HFile.getChecksumFailuresCount()); curOffset += b.getOnDiskSizeWithHeader(); if (cacheOnWrite) { // In the cache-on-write mode we store uncompressed bytes so we // can compare them to what was read by the block reader. - + // b's buffer has header + data + checksum while + // expectedContents have header + data only ByteBuffer bufRead = b.getBufferWithHeader(); ByteBuffer bufExpected = expectedContents.get(i); boolean bytesAreCorrect = Bytes.compareTo(bufRead.array(), - bufRead.arrayOffset(), bufRead.limit(), + bufRead.arrayOffset(), + bufRead.limit() - b.totalChecksumBytes(), bufExpected.array(), bufExpected.arrayOffset(), bufExpected.limit()) == 0; String wrongBytesMsg = ""; @@ -541,15 +565,26 @@ public void testPreviousOffset() throws IOException { // Optimization: only construct an error message in case we // will need it. wrongBytesMsg = "Expected bytes in block #" + i + " (algo=" - + algo + ", pread=" + pread + "):\n"; + + algo + ", pread=" + pread + + ", cacheOnWrite=" + cacheOnWrite + "):\n"; wrongBytesMsg += Bytes.toStringBinary(bufExpected.array(), bufExpected.arrayOffset(), Math.min(32, bufExpected.limit())) + ", actual:\n" + Bytes.toStringBinary(bufRead.array(), bufRead.arrayOffset(), Math.min(32, bufRead.limit())); + if (detailedLogging) { + LOG.warn("expected header" + + HFileBlock.toStringHeader(bufExpected) + + "\nfound header" + + HFileBlock.toStringHeader(bufRead)); + LOG.warn("bufread offset " + bufRead.arrayOffset() + + " limit " + bufRead.limit() + + " expected offset " + bufExpected.arrayOffset() + + " limit " + bufExpected.limit()); + LOG.warn(wrongBytesMsg); + } } - assertTrue(wrongBytesMsg, bytesAreCorrect); } } @@ -672,10 +707,12 @@ private long writeBlocks(Random rand, Compression.Algorithm compressAlgo, boolean cacheOnWrite = expectedContents != null; FSDataOutputStream os = fs.create(path); HFileBlock.Writer hbw = new HFileBlock.Writer(compressAlgo, null, - includesMemstoreTS); + includesMemstoreTS, HFile.DEFAULT_CHECKSUM_TYPE, + HFile.DEFAULT_BYTES_PER_CHECKSUM); Map<BlockType, Long> prevOffsetByType = new HashMap<BlockType, Long>(); long totalSize = 0; for (int i = 0; i < NUM_TEST_BLOCKS; ++i) { + long pos = os.getPos(); int blockTypeOrdinal = rand.nextInt(BlockType.values().length); if (blockTypeOrdinal == BlockType.ENCODED_DATA.ordinal()) { blockTypeOrdinal = BlockType.DATA.ordinal(); @@ -706,9 +743,9 @@ private long writeBlocks(Random rand, Compression.Algorithm compressAlgo, expectedContents.add(hbw.getUncompressedBufferWithHeader()); if (detailedLogging) { - LOG.info("Writing block #" + i + " of type " + bt + LOG.info("Written block #" + i + " of type " + bt + ", uncompressed size " + hbw.getUncompressedSizeWithoutHeader() - + " at offset " + os.getPos()); + + " at offset " + pos); } } os.close(); @@ -730,7 +767,9 @@ public void testBlockHeapSize() { byte[] byteArr = new byte[HFileBlock.HEADER_SIZE + size]; ByteBuffer buf = ByteBuffer.wrap(byteArr, 0, size); HFileBlock block = new HFileBlock(BlockType.DATA, size, size, -1, buf, - HFileBlock.FILL_HEADER, -1, includesMemstoreTS); + HFileBlock.FILL_HEADER, -1, includesMemstoreTS, + HFileBlock.MINOR_VERSION_NO_CHECKSUM, 0, ChecksumType.NULL.getCode(), + 0); long byteBufferExpectedSize = ClassSize.align(ClassSize.estimateBase(buf.getClass(), true) + HFileBlock.HEADER_SIZE + size); diff --git a/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java b/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java new file mode 100644 index 000000000000..4d9b1580d4cd --- /dev/null +++ b/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java @@ -0,0 +1,806 @@ +/* + * Copyright 2011 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.io.hfile; + +import static org.junit.Assert.*; + +import java.io.ByteArrayOutputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.concurrent.Callable; +import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorCompletionService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.MediumTests; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.fs.HFileSystem; +import org.apache.hadoop.hbase.io.DoubleOutputStream; +import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.ClassSize; +import org.apache.hadoop.io.WritableUtils; +import org.apache.hadoop.io.compress.CompressionOutputStream; +import org.apache.hadoop.io.compress.Compressor; +import org.apache.hadoop.hbase.io.hfile.HFileBlock.BlockWritable; +import org.apache.hadoop.hbase.util.ChecksumType; +import org.apache.hadoop.hbase.util.Pair; +import com.google.common.base.Preconditions; + +import static org.apache.hadoop.hbase.io.hfile.Compression.Algorithm.*; +import org.junit.Before; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +/** + * This class has unit tests to prove that older versions of + * HFiles (without checksums) are compatible with current readers. + */ +@Category(MediumTests.class) +@RunWith(Parameterized.class) +public class TestHFileBlockCompatibility { + // change this value to activate more logs + private static final boolean[] BOOLEAN_VALUES = new boolean[] { false, true }; + + private static final Log LOG = LogFactory.getLog(TestHFileBlockCompatibility.class); + + private static final Compression.Algorithm[] COMPRESSION_ALGORITHMS = { + NONE, GZ }; + + // The mnior version for pre-checksum files + private static int MINOR_VERSION = 0; + + private static final HBaseTestingUtility TEST_UTIL = + new HBaseTestingUtility(); + private HFileSystem fs; + private int uncompressedSizeV1; + + private final boolean includesMemstoreTS; + + public TestHFileBlockCompatibility(boolean includesMemstoreTS) { + this.includesMemstoreTS = includesMemstoreTS; + } + + @Parameters + public static Collection<Object[]> parameters() { + return HBaseTestingUtility.BOOLEAN_PARAMETERIZED; + } + + @Before + public void setUp() throws IOException { + fs = (HFileSystem)HFileSystem.get(TEST_UTIL.getConfiguration()); + } + + public byte[] createTestV1Block(Compression.Algorithm algo) + throws IOException { + Compressor compressor = algo.getCompressor(); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + OutputStream os = algo.createCompressionStream(baos, compressor, 0); + DataOutputStream dos = new DataOutputStream(os); + BlockType.META.write(dos); // Let's make this a meta block. + TestHFileBlock.writeTestBlockContents(dos); + uncompressedSizeV1 = dos.size(); + dos.flush(); + algo.returnCompressor(compressor); + return baos.toByteArray(); + } + + private Writer createTestV2Block(Compression.Algorithm algo) + throws IOException { + final BlockType blockType = BlockType.DATA; + Writer hbw = new Writer(algo, null, + includesMemstoreTS); + DataOutputStream dos = hbw.startWriting(blockType); + TestHFileBlock.writeTestBlockContents(dos); + byte[] headerAndData = hbw.getHeaderAndData(); + assertEquals(1000 * 4, hbw.getUncompressedSizeWithoutHeader()); + hbw.releaseCompressor(); + return hbw; + } + + private String createTestBlockStr(Compression.Algorithm algo, + int correctLength) throws IOException { + Writer hbw = createTestV2Block(algo); + byte[] testV2Block = hbw.getHeaderAndData(); + int osOffset = HFileBlock.HEADER_SIZE_NO_CHECKSUM + 9; + if (testV2Block.length == correctLength) { + // Force-set the "OS" field of the gzip header to 3 (Unix) to avoid + // variations across operating systems. + // See http://www.gzip.org/zlib/rfc-gzip.html for gzip format. + testV2Block[osOffset] = 3; + } + return Bytes.toStringBinary(testV2Block); + } + + @Test + public void testNoCompression() throws IOException { + assertEquals(4000, createTestV2Block(NONE).getBlockForCaching(). + getUncompressedSizeWithoutHeader()); + } + + @Test + public void testGzipCompression() throws IOException { + final String correctTestBlockStr = + "DATABLK*\\x00\\x00\\x00:\\x00\\x00\\x0F\\xA0\\xFF\\xFF\\xFF\\xFF" + + "\\xFF\\xFF\\xFF\\xFF" + // gzip-compressed block: http://www.gzip.org/zlib/rfc-gzip.html + + "\\x1F\\x8B" // gzip magic signature + + "\\x08" // Compression method: 8 = "deflate" + + "\\x00" // Flags + + "\\x00\\x00\\x00\\x00" // mtime + + "\\x00" // XFL (extra flags) + // OS (0 = FAT filesystems, 3 = Unix). However, this field + // sometimes gets set to 0 on Linux and Mac, so we reset it to 3. + + "\\x03" + + "\\xED\\xC3\\xC1\\x11\\x00 \\x08\\xC00DD\\xDD\\x7Fa" + + "\\xD6\\xE8\\xA3\\xB9K\\x84`\\x96Q\\xD3\\xA8\\xDB\\xA8e\\xD4c" + + "\\xD46\\xEA5\\xEA3\\xEA7\\xE7\\x00LI\\s\\xA0\\x0F\\x00\\x00"; + final int correctGzipBlockLength = 82; + assertEquals(correctTestBlockStr, createTestBlockStr(GZ, + correctGzipBlockLength)); + } + + @Test + public void testReaderV1() throws IOException { + for (Compression.Algorithm algo : COMPRESSION_ALGORITHMS) { + for (boolean pread : new boolean[] { false, true }) { + byte[] block = createTestV1Block(algo); + Path path = new Path(TEST_UTIL.getDataTestDir(), + "blocks_v1_"+ algo); + LOG.info("Creating temporary file at " + path); + FSDataOutputStream os = fs.create(path); + int totalSize = 0; + int numBlocks = 50; + for (int i = 0; i < numBlocks; ++i) { + os.write(block); + totalSize += block.length; + } + os.close(); + + FSDataInputStream is = fs.open(path); + HFileBlock.FSReader hbr = new HFileBlock.FSReaderV1(is, algo, + totalSize); + HFileBlock b; + int numBlocksRead = 0; + long pos = 0; + while (pos < totalSize) { + b = hbr.readBlockData(pos, block.length, uncompressedSizeV1, pread); + b.sanityCheck(); + pos += block.length; + numBlocksRead++; + } + assertEquals(numBlocks, numBlocksRead); + is.close(); + } + } + } + + @Test + public void testReaderV2() throws IOException { + for (Compression.Algorithm algo : COMPRESSION_ALGORITHMS) { + for (boolean pread : new boolean[] { false, true }) { + LOG.info("testReaderV2: Compression algorithm: " + algo + + ", pread=" + pread); + Path path = new Path(TEST_UTIL.getDataTestDir(), "blocks_v2_" + + algo); + FSDataOutputStream os = fs.create(path); + Writer hbw = new Writer(algo, null, + includesMemstoreTS); + long totalSize = 0; + for (int blockId = 0; blockId < 2; ++blockId) { + DataOutputStream dos = hbw.startWriting(BlockType.DATA); + for (int i = 0; i < 1234; ++i) + dos.writeInt(i); + hbw.writeHeaderAndData(os); + totalSize += hbw.getOnDiskSizeWithHeader(); + } + os.close(); + + FSDataInputStream is = fs.open(path); + HFileBlock.FSReader hbr = new HFileBlock.FSReaderV2(is, is, algo, + totalSize, MINOR_VERSION, fs, path); + HFileBlock b = hbr.readBlockData(0, -1, -1, pread); + is.close(); + + b.sanityCheck(); + assertEquals(4936, b.getUncompressedSizeWithoutHeader()); + assertEquals(algo == GZ ? 2173 : 4936, + b.getOnDiskSizeWithoutHeader() - b.totalChecksumBytes()); + String blockStr = b.toString(); + + if (algo == GZ) { + is = fs.open(path); + hbr = new HFileBlock.FSReaderV2(is, is, algo, totalSize, MINOR_VERSION, + fs, path); + b = hbr.readBlockData(0, 2173 + HFileBlock.HEADER_SIZE_NO_CHECKSUM + + b.totalChecksumBytes(), -1, pread); + assertEquals(blockStr, b.toString()); + int wrongCompressedSize = 2172; + try { + b = hbr.readBlockData(0, wrongCompressedSize + + HFileBlock.HEADER_SIZE_NO_CHECKSUM, -1, pread); + fail("Exception expected"); + } catch (IOException ex) { + String expectedPrefix = "On-disk size without header provided is " + + wrongCompressedSize + ", but block header contains " + + b.getOnDiskSizeWithoutHeader() + "."; + assertTrue("Invalid exception message: '" + ex.getMessage() + + "'.\nMessage is expected to start with: '" + expectedPrefix + + "'", ex.getMessage().startsWith(expectedPrefix)); + } + is.close(); + } + } + } + } + + /** + * Test encoding/decoding data blocks. + * @throws IOException a bug or a problem with temporary files. + */ + @Test + public void testDataBlockEncoding() throws IOException { + final int numBlocks = 5; + for (Compression.Algorithm algo : COMPRESSION_ALGORITHMS) { + for (boolean pread : new boolean[] { false, true }) { + for (DataBlockEncoding encoding : DataBlockEncoding.values()) { + LOG.info("testDataBlockEncoding algo " + algo + + " pread = " + pread + + " encoding " + encoding); + Path path = new Path(TEST_UTIL.getDataTestDir(), "blocks_v2_" + + algo + "_" + encoding.toString()); + FSDataOutputStream os = fs.create(path); + HFileDataBlockEncoder dataBlockEncoder = + new HFileDataBlockEncoderImpl(encoding); + Writer hbw = new Writer(algo, dataBlockEncoder, + includesMemstoreTS); + long totalSize = 0; + final List<Integer> encodedSizes = new ArrayList<Integer>(); + final List<ByteBuffer> encodedBlocks = new ArrayList<ByteBuffer>(); + for (int blockId = 0; blockId < numBlocks; ++blockId) { + DataOutputStream dos = hbw.startWriting(BlockType.DATA); + TestHFileBlock.writeEncodedBlock(encoding, dos, encodedSizes, encodedBlocks, + blockId, includesMemstoreTS); + + hbw.writeHeaderAndData(os); + totalSize += hbw.getOnDiskSizeWithHeader(); + } + os.close(); + + FSDataInputStream is = fs.open(path); + HFileBlock.FSReaderV2 hbr = new HFileBlock.FSReaderV2(is, is, algo, + totalSize, MINOR_VERSION, fs, path); + hbr.setDataBlockEncoder(dataBlockEncoder); + hbr.setIncludesMemstoreTS(includesMemstoreTS); + + HFileBlock b; + int pos = 0; + for (int blockId = 0; blockId < numBlocks; ++blockId) { + b = hbr.readBlockData(pos, -1, -1, pread); + b.sanityCheck(); + pos += b.getOnDiskSizeWithHeader(); + + assertEquals((int) encodedSizes.get(blockId), + b.getUncompressedSizeWithoutHeader()); + ByteBuffer actualBuffer = b.getBufferWithoutHeader(); + if (encoding != DataBlockEncoding.NONE) { + // We expect a two-byte big-endian encoding id. + assertEquals(0, actualBuffer.get(0)); + assertEquals(encoding.getId(), actualBuffer.get(1)); + actualBuffer.position(2); + actualBuffer = actualBuffer.slice(); + } + + ByteBuffer expectedBuffer = encodedBlocks.get(blockId); + expectedBuffer.rewind(); + + // test if content matches, produce nice message + TestHFileBlock.assertBuffersEqual(expectedBuffer, actualBuffer, algo, encoding, + pread); + } + is.close(); + } + } + } + } + + @org.junit.Rule + public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu = + new org.apache.hadoop.hbase.ResourceCheckerJUnitRule(); + + + /** + * This is the version of the HFileBlock.Writer that is used to + * create V2 blocks with minor version 0. These blocks do not + * have hbase-level checksums. The code is here to test + * backward compatibility. The reason we do not inherit from + * HFileBlock.Writer is because we never ever want to change the code + * in this class but the code in HFileBlock.Writer will continually + * evolve. + */ + public static final class Writer { + + // These constants are as they were in minorVersion 0. + private static final int HEADER_SIZE = HFileBlock.HEADER_SIZE_NO_CHECKSUM; + private static final boolean DONT_FILL_HEADER = HFileBlock.DONT_FILL_HEADER; + private static final byte[] DUMMY_HEADER = + HFileBlock.DUMMY_HEADER_NO_CHECKSUM; + + private enum State { + INIT, + WRITING, + BLOCK_READY + }; + + /** Writer state. Used to ensure the correct usage protocol. */ + private State state = State.INIT; + + /** Compression algorithm for all blocks this instance writes. */ + private final Compression.Algorithm compressAlgo; + + /** Data block encoder used for data blocks */ + private final HFileDataBlockEncoder dataBlockEncoder; + + /** + * The stream we use to accumulate data in uncompressed format for each + * block. We reset this stream at the end of each block and reuse it. The + * header is written as the first {@link #HEADER_SIZE} bytes into this + * stream. + */ + private ByteArrayOutputStream baosInMemory; + + /** Compressor, which is also reused between consecutive blocks. */ + private Compressor compressor; + + /** Compression output stream */ + private CompressionOutputStream compressionStream; + + /** Underlying stream to write compressed bytes to */ + private ByteArrayOutputStream compressedByteStream; + + /** + * Current block type. Set in {@link #startWriting(BlockType)}. Could be + * changed in {@link #encodeDataBlockForDisk()} from {@link BlockType#DATA} + * to {@link BlockType#ENCODED_DATA}. + */ + private BlockType blockType; + + /** + * A stream that we write uncompressed bytes to, which compresses them and + * writes them to {@link #baosInMemory}. + */ + private DataOutputStream userDataStream; + + /** + * Bytes to be written to the file system, including the header. Compressed + * if compression is turned on. + */ + private byte[] onDiskBytesWithHeader; + + /** + * Valid in the READY state. Contains the header and the uncompressed (but + * potentially encoded, if this is a data block) bytes, so the length is + * {@link #uncompressedSizeWithoutHeader} + {@link HFileBlock#HEADER_SIZE}. + */ + private byte[] uncompressedBytesWithHeader; + + /** + * Current block's start offset in the {@link HFile}. Set in + * {@link #writeHeaderAndData(FSDataOutputStream)}. + */ + private long startOffset; + + /** + * Offset of previous block by block type. Updated when the next block is + * started. + */ + private long[] prevOffsetByType; + + /** The offset of the previous block of the same type */ + private long prevOffset; + + /** Whether we are including memstore timestamp after every key/value */ + private boolean includesMemstoreTS; + + /** + * @param compressionAlgorithm compression algorithm to use + * @param dataBlockEncoderAlgo data block encoding algorithm to use + */ + public Writer(Compression.Algorithm compressionAlgorithm, + HFileDataBlockEncoder dataBlockEncoder, boolean includesMemstoreTS) { + compressAlgo = compressionAlgorithm == null ? NONE : compressionAlgorithm; + this.dataBlockEncoder = dataBlockEncoder != null + ? dataBlockEncoder : NoOpDataBlockEncoder.INSTANCE; + + baosInMemory = new ByteArrayOutputStream(); + if (compressAlgo != NONE) { + compressor = compressionAlgorithm.getCompressor(); + compressedByteStream = new ByteArrayOutputStream(); + try { + compressionStream = + compressionAlgorithm.createPlainCompressionStream( + compressedByteStream, compressor); + } catch (IOException e) { + throw new RuntimeException("Could not create compression stream " + + "for algorithm " + compressionAlgorithm, e); + } + } + + prevOffsetByType = new long[BlockType.values().length]; + for (int i = 0; i < prevOffsetByType.length; ++i) + prevOffsetByType[i] = -1; + + this.includesMemstoreTS = includesMemstoreTS; + } + + /** + * Starts writing into the block. The previous block's data is discarded. + * + * @return the stream the user can write their data into + * @throws IOException + */ + public DataOutputStream startWriting(BlockType newBlockType) + throws IOException { + if (state == State.BLOCK_READY && startOffset != -1) { + // We had a previous block that was written to a stream at a specific + // offset. Save that offset as the last offset of a block of that type. + prevOffsetByType[blockType.getId()] = startOffset; + } + + startOffset = -1; + blockType = newBlockType; + + baosInMemory.reset(); + baosInMemory.write(DUMMY_HEADER); + + state = State.WRITING; + + // We will compress it later in finishBlock() + userDataStream = new DataOutputStream(baosInMemory); + return userDataStream; + } + + /** + * Returns the stream for the user to write to. The block writer takes care + * of handling compression and buffering for caching on write. Can only be + * called in the "writing" state. + * + * @return the data output stream for the user to write to + */ + DataOutputStream getUserDataStream() { + expectState(State.WRITING); + return userDataStream; + } + + /** + * Transitions the block writer from the "writing" state to the "block + * ready" state. Does nothing if a block is already finished. + */ + private void ensureBlockReady() throws IOException { + Preconditions.checkState(state != State.INIT, + "Unexpected state: " + state); + + if (state == State.BLOCK_READY) + return; + + // This will set state to BLOCK_READY. + finishBlock(); + } + + /** + * An internal method that flushes the compressing stream (if using + * compression), serializes the header, and takes care of the separate + * uncompressed stream for caching on write, if applicable. Sets block + * write state to "block ready". + */ + private void finishBlock() throws IOException { + userDataStream.flush(); + + // This does an array copy, so it is safe to cache this byte array. + uncompressedBytesWithHeader = baosInMemory.toByteArray(); + LOG.warn("Writer.finishBlock user data size with header before compression " + + uncompressedBytesWithHeader.length); + prevOffset = prevOffsetByType[blockType.getId()]; + + // We need to set state before we can package the block up for + // cache-on-write. In a way, the block is ready, but not yet encoded or + // compressed. + state = State.BLOCK_READY; + encodeDataBlockForDisk(); + + doCompression(); + putHeader(uncompressedBytesWithHeader, 0, onDiskBytesWithHeader.length, + uncompressedBytesWithHeader.length); + } + + /** + * Do compression if it is enabled, or re-use the uncompressed buffer if + * it is not. Fills in the compressed block's header if doing compression. + */ + private void doCompression() throws IOException { + // do the compression + if (compressAlgo != NONE) { + compressedByteStream.reset(); + compressedByteStream.write(DUMMY_HEADER); + + compressionStream.resetState(); + + compressionStream.write(uncompressedBytesWithHeader, HEADER_SIZE, + uncompressedBytesWithHeader.length - HEADER_SIZE); + + compressionStream.flush(); + compressionStream.finish(); + + onDiskBytesWithHeader = compressedByteStream.toByteArray(); + putHeader(onDiskBytesWithHeader, 0, onDiskBytesWithHeader.length, + uncompressedBytesWithHeader.length); + } else { + onDiskBytesWithHeader = uncompressedBytesWithHeader; + } + } + + /** + * Encodes this block if it is a data block and encoding is turned on in + * {@link #dataBlockEncoder}. + */ + private void encodeDataBlockForDisk() throws IOException { + if (blockType != BlockType.DATA) { + return; // skip any non-data block + } + + // do data block encoding, if data block encoder is set + ByteBuffer rawKeyValues = ByteBuffer.wrap(uncompressedBytesWithHeader, + HEADER_SIZE, uncompressedBytesWithHeader.length - + HEADER_SIZE).slice(); + Pair<ByteBuffer, BlockType> encodingResult = + dataBlockEncoder.beforeWriteToDisk(rawKeyValues, + includesMemstoreTS, DUMMY_HEADER); + + BlockType encodedBlockType = encodingResult.getSecond(); + if (encodedBlockType == BlockType.ENCODED_DATA) { + uncompressedBytesWithHeader = encodingResult.getFirst().array(); + blockType = BlockType.ENCODED_DATA; + } else { + // There is no encoding configured. Do some extra sanity-checking. + if (encodedBlockType != BlockType.DATA) { + throw new IOException("Unexpected block type coming out of data " + + "block encoder: " + encodedBlockType); + } + if (userDataStream.size() != + uncompressedBytesWithHeader.length - HEADER_SIZE) { + throw new IOException("Uncompressed size mismatch: " + + userDataStream.size() + " vs. " + + (uncompressedBytesWithHeader.length - HEADER_SIZE)); + } + } + } + + /** + * Put the header into the given byte array at the given offset. + * @param onDiskSize size of the block on disk + * @param uncompressedSize size of the block after decompression (but + * before optional data block decoding) + */ + private void putHeader(byte[] dest, int offset, int onDiskSize, + int uncompressedSize) { + offset = blockType.put(dest, offset); + offset = Bytes.putInt(dest, offset, onDiskSize - HEADER_SIZE); + offset = Bytes.putInt(dest, offset, uncompressedSize - HEADER_SIZE); + Bytes.putLong(dest, offset, prevOffset); + } + + /** + * Similar to {@link #writeHeaderAndData(FSDataOutputStream)}, but records + * the offset of this block so that it can be referenced in the next block + * of the same type. + * + * @param out + * @throws IOException + */ + public void writeHeaderAndData(FSDataOutputStream out) throws IOException { + long offset = out.getPos(); + if (startOffset != -1 && offset != startOffset) { + throw new IOException("A " + blockType + " block written to a " + + "stream twice, first at offset " + startOffset + ", then at " + + offset); + } + startOffset = offset; + + writeHeaderAndData((DataOutputStream) out); + } + + /** + * Writes the header and the compressed data of this block (or uncompressed + * data when not using compression) into the given stream. Can be called in + * the "writing" state or in the "block ready" state. If called in the + * "writing" state, transitions the writer to the "block ready" state. + * + * @param out the output stream to write the + * @throws IOException + */ + private void writeHeaderAndData(DataOutputStream out) throws IOException { + ensureBlockReady(); + out.write(onDiskBytesWithHeader); + } + + /** + * Returns the header or the compressed data (or uncompressed data when not + * using compression) as a byte array. Can be called in the "writing" state + * or in the "block ready" state. If called in the "writing" state, + * transitions the writer to the "block ready" state. + * + * @return header and data as they would be stored on disk in a byte array + * @throws IOException + */ + public byte[] getHeaderAndData() throws IOException { + ensureBlockReady(); + return onDiskBytesWithHeader; + } + + /** + * Releases the compressor this writer uses to compress blocks into the + * compressor pool. Needs to be called before the writer is discarded. + */ + public void releaseCompressor() { + if (compressor != null) { + compressAlgo.returnCompressor(compressor); + compressor = null; + } + } + + /** + * Returns the on-disk size of the data portion of the block. This is the + * compressed size if compression is enabled. Can only be called in the + * "block ready" state. Header is not compressed, and its size is not + * included in the return value. + * + * @return the on-disk size of the block, not including the header. + */ + public int getOnDiskSizeWithoutHeader() { + expectState(State.BLOCK_READY); + return onDiskBytesWithHeader.length - HEADER_SIZE; + } + + /** + * Returns the on-disk size of the block. Can only be called in the + * "block ready" state. + * + * @return the on-disk size of the block ready to be written, including the + * header size + */ + public int getOnDiskSizeWithHeader() { + expectState(State.BLOCK_READY); + return onDiskBytesWithHeader.length; + } + + /** + * The uncompressed size of the block data. Does not include header size. + */ + public int getUncompressedSizeWithoutHeader() { + expectState(State.BLOCK_READY); + return uncompressedBytesWithHeader.length - HEADER_SIZE; + } + + /** + * The uncompressed size of the block data, including header size. + */ + public int getUncompressedSizeWithHeader() { + expectState(State.BLOCK_READY); + return uncompressedBytesWithHeader.length; + } + + /** @return true if a block is being written */ + public boolean isWriting() { + return state == State.WRITING; + } + + /** + * Returns the number of bytes written into the current block so far, or + * zero if not writing the block at the moment. Note that this will return + * zero in the "block ready" state as well. + * + * @return the number of bytes written + */ + public int blockSizeWritten() { + if (state != State.WRITING) + return 0; + return userDataStream.size(); + } + + /** + * Returns the header followed by the uncompressed data, even if using + * compression. This is needed for storing uncompressed blocks in the block + * cache. Can be called in the "writing" state or the "block ready" state. + * + * @return uncompressed block bytes for caching on write + */ + private byte[] getUncompressedDataWithHeader() { + expectState(State.BLOCK_READY); + + return uncompressedBytesWithHeader; + } + + private void expectState(State expectedState) { + if (state != expectedState) { + throw new IllegalStateException("Expected state: " + expectedState + + ", actual state: " + state); + } + } + + /** + * Similar to {@link #getUncompressedBufferWithHeader()} but returns a byte + * buffer. + * + * @return uncompressed block for caching on write in the form of a buffer + */ + public ByteBuffer getUncompressedBufferWithHeader() { + byte[] b = getUncompressedDataWithHeader(); + return ByteBuffer.wrap(b, 0, b.length); + } + + /** + * Takes the given {@link BlockWritable} instance, creates a new block of + * its appropriate type, writes the writable into this block, and flushes + * the block into the output stream. The writer is instructed not to buffer + * uncompressed bytes for cache-on-write. + * + * @param bw the block-writable object to write as a block + * @param out the file system output stream + * @throws IOException + */ + public void writeBlock(BlockWritable bw, FSDataOutputStream out) + throws IOException { + bw.writeToBlock(startWriting(bw.getBlockType())); + writeHeaderAndData(out); + } + + /** + * Creates a new HFileBlock. + */ + public HFileBlock getBlockForCaching() { + return new HFileBlock(blockType, getOnDiskSizeWithoutHeader(), + getUncompressedSizeWithoutHeader(), prevOffset, + getUncompressedBufferWithHeader(), DONT_FILL_HEADER, startOffset, + includesMemstoreTS, MINOR_VERSION, 0, ChecksumType.NULL.getCode(), + getOnDiskSizeWithoutHeader()); + } + } +} + diff --git a/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java b/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java index b7d0665f225f..3f36e64cde25 100644 --- a/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java +++ b/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java @@ -44,6 +44,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.*; +import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.BlockIndexReader; import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.BlockIndexChunk; import org.apache.hadoop.hbase.util.Bytes; @@ -110,7 +111,7 @@ public void setUp() throws IOException { // This test requires at least HFile format version 2. conf.setInt(HFile.FORMAT_VERSION_KEY, HFile.MAX_FORMAT_VERSION); - fs = FileSystem.get(conf); + fs = HFileSystem.get(conf); } @Test @@ -215,7 +216,8 @@ public void readIndex() throws IOException { private void writeWholeIndex() throws IOException { assertEquals(0, keys.size()); HFileBlock.Writer hbw = new HFileBlock.Writer(compr, null, - includesMemstoreTS); + includesMemstoreTS, HFile.DEFAULT_CHECKSUM_TYPE, + HFile.DEFAULT_BYTES_PER_CHECKSUM); FSDataOutputStream outputStream = fs.create(path); HFileBlockIndex.BlockIndexWriter biw = new HFileBlockIndex.BlockIndexWriter(hbw, null, null); diff --git a/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java b/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java index e1a57e52d225..613ad7da816a 100644 --- a/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java +++ b/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.io.encoding.RedundantKVGenerator; import org.apache.hadoop.hbase.regionserver.metrics.SchemaConfigured; import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics; +import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.Pair; import org.junit.After; import org.junit.Before; @@ -123,12 +124,14 @@ public void testEncodingWritePath() { HFileBlock block = getSampleHFileBlock(); Pair<ByteBuffer, BlockType> result = blockEncoder.beforeWriteToDisk(block.getBufferWithoutHeader(), - includesMemstoreTS); + includesMemstoreTS, HFileBlock.DUMMY_HEADER); int size = result.getFirst().limit() - HFileBlock.HEADER_SIZE; HFileBlock blockOnDisk = new HFileBlock(result.getSecond(), size, size, -1, result.getFirst(), HFileBlock.FILL_HEADER, 0, - includesMemstoreTS); + includesMemstoreTS, block.getMinorVersion(), + block.getBytesPerChecksum(), block.getChecksumType(), + block.getOnDiskDataSizeWithHeader()); if (blockEncoder.getEncodingOnDisk() != DataBlockEncoding.NONE) { @@ -158,7 +161,8 @@ private HFileBlock getSampleHFileBlock() { keyValues.rewind(); buf.put(keyValues); HFileBlock b = new HFileBlock(BlockType.DATA, size, size, -1, buf, - HFileBlock.FILL_HEADER, 0, includesMemstoreTS); + HFileBlock.FILL_HEADER, 0, includesMemstoreTS, + HFileReaderV2.MAX_MINOR_VERSION, 0, ChecksumType.NULL.getCode(), 0); UNKNOWN_TABLE_AND_CF.passSchemaMetricsTo(b); return b; } diff --git a/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderV1.java b/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderV1.java index c745ffcb9e1f..d313ddd435d4 100644 --- a/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderV1.java +++ b/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderV1.java @@ -76,7 +76,7 @@ public void testReadingExistingVersion1HFile() throws IOException { assertEquals(N, reader.getEntries()); assertEquals(N, trailer.getEntryCount()); - assertEquals(1, trailer.getVersion()); + assertEquals(1, trailer.getMajorVersion()); assertEquals(Compression.Algorithm.GZ, trailer.getCompressionCodec()); for (boolean pread : new boolean[] { false, true }) { diff --git a/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java b/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java index 365d9fa471b8..e3b18c91c924 100644 --- a/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java +++ b/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java @@ -123,7 +123,7 @@ public void testHFileFormatV2() throws IOException { FixedFileTrailer trailer = FixedFileTrailer.readFromStream(fsdis, fileSize); - assertEquals(2, trailer.getVersion()); + assertEquals(2, trailer.getMajorVersion()); assertEquals(ENTRY_COUNT, trailer.getEntryCount()); HFileBlock.FSReader blockReader = diff --git a/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java b/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java index cbcbffa54b79..f1cd8a02ca5c 100644 --- a/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java +++ b/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java @@ -189,6 +189,8 @@ public boolean run(String[] args) throws IOException { .withCompression(compr) .withBloomType(bloomType) .withMaxKeyCount(numKV) + .withChecksumType(HFile.DEFAULT_CHECKSUM_TYPE) + .withBytesPerChecksum(HFile.DEFAULT_BYTES_PER_CHECKSUM) .build(); rand = new Random(); diff --git a/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java b/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java index e4d849331cb7..ebc5373e2248 100644 --- a/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java +++ b/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java @@ -356,6 +356,8 @@ public void runMergeWorkload() throws IOException { .withDataBlockEncoder(dataBlockEncoder) .withBloomType(bloomType) .withMaxKeyCount(maxKeyCount) + .withChecksumType(HFile.DEFAULT_CHECKSUM_TYPE) + .withBytesPerChecksum(HFile.DEFAULT_BYTES_PER_CHECKSUM) .build(); StatisticsPrinter statsPrinter = new StatisticsPrinter(); diff --git a/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java b/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java index 8859793c1bdf..aeaf62550684 100644 --- a/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java +++ b/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java @@ -298,6 +298,8 @@ private Path writeStoreFile(int t, BloomType bt, List<KeyValue> kvs) BLOCK_SIZES[t]) .withOutputDir(TEST_UTIL.getDataTestDir()) .withBloomType(bt) + .withChecksumType(HFile.DEFAULT_CHECKSUM_TYPE) + .withBytesPerChecksum(HFile.DEFAULT_BYTES_PER_CHECKSUM) .build(); assertTrue(w.hasGeneralBloom()); diff --git a/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java b/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java index f93487d6adb4..89dfbf74af8e 100644 --- a/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java +++ b/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java @@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.io.hfile.NoOpDataBlockEncoder; @@ -69,10 +70,12 @@ public void testHFileScannerThrowsErrors() throws IOException { Path hfilePath = new Path(new Path( util.getDataTestDir("internalScannerExposesErrors"), "regionname"), "familyname"); - FaultyFileSystem fs = new FaultyFileSystem(util.getTestFileSystem()); + HFileSystem hfs = (HFileSystem)util.getTestFileSystem(); + FaultyFileSystem faultyfs = new FaultyFileSystem(hfs.getBackingFs()); + FileSystem fs = new HFileSystem(faultyfs); CacheConfig cacheConf = new CacheConfig(util.getConfiguration()); StoreFile.Writer writer = new StoreFile.WriterBuilder( - util.getConfiguration(), cacheConf, fs, 2*1024) + util.getConfiguration(), cacheConf, hfs, 2*1024) .withOutputDir(hfilePath) .build(); TestStoreFile.writeStoreFile( @@ -85,14 +88,14 @@ public void testHFileScannerThrowsErrors() throws IOException { StoreFile.Reader reader = sf.createReader(); HFileScanner scanner = reader.getScanner(false, true); - FaultyInputStream inStream = fs.inStreams.get(0).get(); + FaultyInputStream inStream = faultyfs.inStreams.get(0).get(); assertNotNull(inStream); scanner.seekTo(); // Do at least one successful read assertTrue(scanner.next()); - inStream.startFaults(); + faultyfs.startFaults(); try { int scanned=0; @@ -116,10 +119,12 @@ public void testStoreFileScannerThrowsErrors() throws IOException { Path hfilePath = new Path(new Path( util.getDataTestDir("internalScannerExposesErrors"), "regionname"), "familyname"); - FaultyFileSystem fs = new FaultyFileSystem(util.getTestFileSystem()); + HFileSystem hfs = (HFileSystem)util.getTestFileSystem(); + FaultyFileSystem faultyfs = new FaultyFileSystem(hfs.getBackingFs()); + HFileSystem fs = new HFileSystem(faultyfs); CacheConfig cacheConf = new CacheConfig(util.getConfiguration()); StoreFile.Writer writer = new StoreFile.WriterBuilder( - util.getConfiguration(), cacheConf, fs, 2 * 1024) + util.getConfiguration(), cacheConf, hfs, 2 * 1024) .withOutputDir(hfilePath) .build(); TestStoreFile.writeStoreFile( @@ -132,14 +137,13 @@ public void testStoreFileScannerThrowsErrors() throws IOException { Collections.singletonList(sf), false, true, false); KeyValueScanner scanner = scanners.get(0); - FaultyInputStream inStream = fs.inStreams.get(0).get(); + FaultyInputStream inStream = faultyfs.inStreams.get(0).get(); assertNotNull(inStream); scanner.seek(KeyValue.LOWESTKEY); // Do at least one successful read assertNotNull(scanner.next()); - - inStream.startFaults(); + faultyfs.startFaults(); try { int scanned=0; @@ -220,6 +224,15 @@ public FSDataInputStream open(Path p, int bufferSize) throws IOException { inStreams.add(new SoftReference<FaultyInputStream>(faulty)); return faulty; } + + /** + * Starts to simulate faults on all streams opened so far + */ + public void startFaults() { + for (SoftReference<FaultyInputStream> is: inStreams) { + is.get().startFaults(); + } + } } static class FaultyInputStream extends FSDataInputStream { diff --git a/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 8db43a41ae08..e2db6886c882 100644 --- a/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -3173,7 +3173,7 @@ public void testDeleteRowWithBloomFilter() throws IOException { // set up a cluster with 3 nodes - MiniHBaseCluster cluster; + MiniHBaseCluster cluster = null; String dataNodeHosts[] = new String[] { "host1", "host2", "host3" }; int regionServersCount = 3; @@ -3221,7 +3221,9 @@ public void testDeleteRowWithBloomFilter() throws IOException { ht.close(); } finally { - htu.shutdownMiniCluster(); + if (cluster != null) { + htu.shutdownMiniCluster(); + } } } diff --git a/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java b/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java index 5b3b96234649..76c329fa4bff 100644 --- a/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java +++ b/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java @@ -52,6 +52,7 @@ import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics; import org.apache.hadoop.hbase.util.BloomFilterFactory; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.ChecksumType; import org.junit.experimental.categories.Category; import org.mockito.Mockito; @@ -69,6 +70,9 @@ public class TestStoreFile extends HBaseTestCase { private String ROOT_DIR; private Map<String, Long> startingMetrics; + private static final ChecksumType CKTYPE = ChecksumType.CRC32; + private static final int CKBYTES = 512; + @Override public void setUp() throws Exception { super.setUp(); @@ -401,6 +405,8 @@ public void testBloomFilter() throws Exception { .withFilePath(f) .withBloomType(StoreFile.BloomType.ROW) .withMaxKeyCount(2000) + .withChecksumType(CKTYPE) + .withBytesPerChecksum(CKBYTES) .build(); bloomWriteRead(writer, fs); } @@ -420,6 +426,8 @@ public void testDeleteFamilyBloomFilter() throws Exception { fs, StoreFile.DEFAULT_BLOCKSIZE_SMALL) .withFilePath(f) .withMaxKeyCount(2000) + .withChecksumType(CKTYPE) + .withBytesPerChecksum(CKBYTES) .build(); // add delete family @@ -490,6 +498,8 @@ public void testBloomTypes() throws Exception { .withFilePath(f) .withBloomType(bt[x]) .withMaxKeyCount(expKeys[x]) + .withChecksumType(CKTYPE) + .withBytesPerChecksum(CKBYTES) .build(); long now = System.currentTimeMillis(); @@ -565,6 +575,8 @@ public void testBloomEdgeCases() throws Exception { .withFilePath(f) .withBloomType(StoreFile.BloomType.ROW) .withMaxKeyCount(2000) + .withChecksumType(CKTYPE) + .withBytesPerChecksum(CKBYTES) .build(); assertFalse(writer.hasGeneralBloom()); writer.close(); @@ -592,6 +604,8 @@ public void testBloomEdgeCases() throws Exception { .withFilePath(f) .withBloomType(StoreFile.BloomType.ROW) .withMaxKeyCount(Integer.MAX_VALUE) + .withChecksumType(CKTYPE) + .withBytesPerChecksum(CKBYTES) .build(); assertFalse(writer.hasGeneralBloom()); writer.close(); @@ -859,6 +873,8 @@ private StoreFile.Writer writeStoreFile(Configuration conf, blockSize) .withFilePath(path) .withMaxKeyCount(2000) + .withChecksumType(CKTYPE) + .withBytesPerChecksum(CKBYTES) .build(); // We'll write N-1 KVs to ensure we don't write an extra block kvs.remove(kvs.size()-1); @@ -890,6 +906,8 @@ public void testDataBlockEncodingMetaData() throws IOException { .withFilePath(path) .withDataBlockEncoder(dataBlockEncoder) .withMaxKeyCount(2000) + .withChecksumType(CKTYPE) + .withBytesPerChecksum(CKBYTES) .build(); writer.close(); diff --git a/src/test/java/org/apache/hadoop/hbase/regionserver/handler/TestCloseRegionHandler.java b/src/test/java/org/apache/hadoop/hbase/regionserver/handler/TestCloseRegionHandler.java index e205acb403e0..ad5511c98675 100644 --- a/src/test/java/org/apache/hadoop/hbase/regionserver/handler/TestCloseRegionHandler.java +++ b/src/test/java/org/apache/hadoop/hbase/regionserver/handler/TestCloseRegionHandler.java @@ -133,7 +133,8 @@ public void setupHRI() { @Test public void testZKClosingNodeVersionMismatch() throws IOException, NodeExistsException, KeeperException { final Server server = new MockServer(HTU); - final RegionServerServices rss = new MockRegionServerServices(); + final MockRegionServerServices rss = new MockRegionServerServices(); + rss.setFileSystem(HTU.getTestFileSystem()); HTableDescriptor htd = TEST_HTD; final HRegionInfo hri = TEST_HRI; @@ -169,7 +170,8 @@ public void setupHRI() { @Test public void testCloseRegion() throws IOException, NodeExistsException, KeeperException { final Server server = new MockServer(HTU); - final RegionServerServices rss = new MockRegionServerServices(); + final MockRegionServerServices rss = new MockRegionServerServices(); + rss.setFileSystem(HTU.getTestFileSystem()); HTableDescriptor htd = TEST_HTD; HRegionInfo hri = TEST_HRI; diff --git a/src/test/java/org/apache/hadoop/hbase/util/MockRegionServerServices.java b/src/test/java/org/apache/hadoop/hbase/util/MockRegionServerServices.java index 967970d3d1f9..bb3ddd728a7c 100644 --- a/src/test/java/org/apache/hadoop/hbase/util/MockRegionServerServices.java +++ b/src/test/java/org/apache/hadoop/hbase/util/MockRegionServerServices.java @@ -24,8 +24,10 @@ import java.util.concurrent.ConcurrentSkipListMap; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.catalog.CatalogTracker; +import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.regionserver.CompactionRequestor; import org.apache.hadoop.hbase.regionserver.FlushRequester; @@ -45,6 +47,7 @@ public class MockRegionServerServices implements RegionServerServices { private boolean stopping = false; private final ConcurrentSkipListMap<byte[], Boolean> rit = new ConcurrentSkipListMap<byte[], Boolean>(Bytes.BYTES_COMPARATOR); + private HFileSystem hfs = null; @Override public boolean removeFromOnlineRegions(String encodedRegionName) { @@ -147,5 +150,13 @@ public boolean isStopped() { public boolean isAborted() { return false; } - + + @Override + public HFileSystem getFileSystem() { + return this.hfs; + } + + public void setFileSystem(FileSystem hfs) { + this.hfs = (HFileSystem)hfs; + } }
42d5bdd3378bbd246a5ecf4a3c09f76f519e4e9b
elasticsearch
If matching root doc's inner objects don't match- the `nested_filter` then the `missing` value should be used to sort the root- doc. Closes -3020--
a
https://github.com/elastic/elasticsearch
diff --git a/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/ByteValuesComparator.java b/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/ByteValuesComparator.java index a34a2f0650102..797b680119d3c 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/ByteValuesComparator.java +++ b/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/ByteValuesComparator.java @@ -66,4 +66,9 @@ public void add(int slot, int doc) { public void divide(int slot, int divisor) { values[slot] /= divisor; } + + @Override + public void missing(int slot) { + values[slot] = (byte) missingValue; + } } diff --git a/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparator.java b/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparator.java index cdcef1d39d8ad..a49aebca609dd 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparator.java +++ b/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparator.java @@ -66,4 +66,9 @@ public void add(int slot, int doc) { public void divide(int slot, int divisor) { values[slot] /= divisor; } + + @Override + public void missing(int slot) { + values[slot] = missingValue; + } } diff --git a/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorBase.java b/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorBase.java index 4af80a427ec10..1f798326608ee 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorBase.java +++ b/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorBase.java @@ -61,6 +61,11 @@ public final FieldComparator<T> setNextReader(AtomicReaderContext context) throw return this; } + @Override + public int compareBottomMissing() { + return compare(bottom, missingValue); + } + static final int compare(double left, double right) { return Double.compare(left, right); } diff --git a/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparator.java b/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparator.java index 0a41d9b3f1665..84045fadd3484 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparator.java +++ b/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparator.java @@ -66,4 +66,9 @@ public void add(int slot, int doc) { public void divide(int slot, int divisor) { values[slot] /= divisor; } + + @Override + public void missing(int slot) { + values[slot] = (float) missingValue; + } } diff --git a/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/IntValuesComparator.java b/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/IntValuesComparator.java index 02ffc9434193a..725010cf1c6e2 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/IntValuesComparator.java +++ b/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/IntValuesComparator.java @@ -72,4 +72,9 @@ public void add(int slot, int doc) { public void divide(int slot, int divisor) { values[slot] /= divisor; } + + @Override + public void missing(int slot) { + values[slot] = (int) missingValue; + } } diff --git a/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparator.java b/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparator.java index 8d75c17039519..675fa6b3575d2 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparator.java +++ b/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparator.java @@ -65,4 +65,9 @@ public void add(int slot, int doc) { public void divide(int slot, int divisor) { values[slot] /= divisor; } + + @Override + public void missing(int slot) { + values[slot] = missingValue; + } } diff --git a/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorBase.java b/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorBase.java index c19f3f655c5d9..f030fe9f12346 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorBase.java +++ b/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorBase.java @@ -72,6 +72,11 @@ public final FieldComparator<T> setNextReader(AtomicReaderContext context) throw return this; } + @Override + public int compareBottomMissing() { + return compare(bottom, missingValue); + } + private static final class MultiValueWrapper extends LongValues.Filtered { private final SortMode sortMode; diff --git a/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/NumberComparatorBase.java b/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/NumberComparatorBase.java index 66693987c51fc..ae05d9722ee1d 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/NumberComparatorBase.java +++ b/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/NumberComparatorBase.java @@ -42,4 +42,19 @@ public abstract class NumberComparatorBase<T> extends FieldComparator<T> { * @param divisor The specified divisor */ public abstract void divide(int slot, int divisor); + + /** + * Assigns the underlying missing value to the specified slot, if the actual implementation supports missing value. + * + * @param slot The slot to assign the the missing value to. + */ + public abstract void missing(int slot); + + /** + * Compares the missing value to the bottom. + * + * @return any N < 0 if the bottom value is not competitive with the missing value, any N > 0 if the + * bottom value is competitive with the missing value and 0 if they are equal. + */ + public abstract int compareBottomMissing(); } diff --git a/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/ShortValuesComparator.java b/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/ShortValuesComparator.java index 107d995a46fbf..3794a141e5041 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/ShortValuesComparator.java +++ b/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/ShortValuesComparator.java @@ -68,4 +68,9 @@ public void add(int slot, int doc) { public void divide(int slot, int divisor) { values[slot] /= divisor; } + + @Override + public void missing(int slot) { + values[slot] = (short) missingValue; + } } diff --git a/src/main/java/org/elasticsearch/index/search/nested/NestedFieldComparatorSource.java b/src/main/java/org/elasticsearch/index/search/nested/NestedFieldComparatorSource.java index fe87ecd418ea9..aebcce3234bcd 100644 --- a/src/main/java/org/elasticsearch/index/search/nested/NestedFieldComparatorSource.java +++ b/src/main/java/org/elasticsearch/index/search/nested/NestedFieldComparatorSource.java @@ -59,9 +59,9 @@ public FieldComparator<?> newComparator(String fieldname, int numHits, int sortP case MIN: return new NestedFieldComparator.Lowest(wrappedComparator, rootDocumentsFilter, innerDocumentsFilter, numHits); case SUM: - return new Sum((NumberComparatorBase) wrappedComparator, rootDocumentsFilter, innerDocumentsFilter, numHits); + return new NestedFieldComparator.Sum((NumberComparatorBase) wrappedComparator, rootDocumentsFilter, innerDocumentsFilter, numHits); case AVG: - return new Avg((NumberComparatorBase) wrappedComparator, rootDocumentsFilter, innerDocumentsFilter, numHits); + return new NestedFieldComparator.Avg((NumberComparatorBase) wrappedComparator, rootDocumentsFilter, innerDocumentsFilter, numHits); default: throw new ElasticSearchIllegalArgumentException( String.format("Unsupported sort_mode[%s] for nested type", sortMode) @@ -73,20 +73,21 @@ public FieldComparator<?> newComparator(String fieldname, int numHits, int sortP public SortField.Type reducedType() { return wrappedSource.reducedType(); } + } -class Sum extends FieldComparator { +abstract class NestedFieldComparator extends FieldComparator { final Filter rootDocumentsFilter; final Filter innerDocumentsFilter; final int spareSlot; - NumberComparatorBase wrappedComparator; + FieldComparator wrappedComparator; FixedBitSet rootDocuments; FixedBitSet innerDocuments; int bottomSlot; - Sum(NumberComparatorBase wrappedComparator, Filter rootDocumentsFilter, Filter innerDocumentsFilter, int spareSlot) { + NestedFieldComparator(FieldComparator wrappedComparator, Filter rootDocumentsFilter, Filter innerDocumentsFilter, int spareSlot) { this.wrappedComparator = wrappedComparator; this.rootDocumentsFilter = rootDocumentsFilter; this.innerDocumentsFilter = innerDocumentsFilter; @@ -94,12 +95,12 @@ class Sum extends FieldComparator { } @Override - public int compare(int slot1, int slot2) { + public final int compare(int slot1, int slot2) { return wrappedComparator.compare(slot1, slot2); } @Override - public void setBottom(int slot) { + public final void setBottom(int slot) { wrappedComparator.setBottom(slot); this.bottomSlot = slot; } @@ -123,171 +124,19 @@ public FieldComparator setNextReader(AtomicReaderContext context) throws IOExcep this.rootDocuments = DocIdSets.toFixedBitSet(rootDocuments.iterator(), context.reader().maxDoc()); } - wrappedComparator = (NumberComparatorBase) wrappedComparator.setNextReader(context); + wrappedComparator = wrappedComparator.setNextReader(context); return this; } @Override - public Object value(int slot) { + public final Object value(int slot) { return wrappedComparator.value(slot); } @Override - public int compareBottom(int rootDoc) throws IOException { - if (rootDoc == 0 || rootDocuments == null || innerDocuments == null) { - return 0; - } - - int prevRootDoc = rootDocuments.prevSetBit(rootDoc - 1); - int nestedDoc = innerDocuments.nextSetBit(prevRootDoc + 1); - if (nestedDoc >= rootDoc || nestedDoc == -1) { - return 0; - } - - wrappedComparator.copy(spareSlot, nestedDoc); - nestedDoc = innerDocuments.nextSetBit(nestedDoc + 1); - while (nestedDoc > prevRootDoc && nestedDoc < rootDoc) { - wrappedComparator.add(spareSlot, nestedDoc); - nestedDoc = innerDocuments.nextSetBit(nestedDoc + 1); - } - return compare(bottomSlot, spareSlot); - } - - @Override - public void copy(int slot, int rootDoc) throws IOException { - if (rootDoc == 0 || rootDocuments == null || innerDocuments == null) { - return; - } - - int prevRootDoc = rootDocuments.prevSetBit(rootDoc - 1); - int nestedDoc = innerDocuments.nextSetBit(prevRootDoc + 1); - if (nestedDoc >= rootDoc || nestedDoc == -1) { - return; - } - - wrappedComparator.copy(slot, nestedDoc); - nestedDoc = innerDocuments.nextSetBit(nestedDoc + 1); - while (nestedDoc > prevRootDoc && nestedDoc < rootDoc) { - wrappedComparator.add(slot, nestedDoc); - nestedDoc = innerDocuments.nextSetBit(nestedDoc + 1); - } - } - - @Override - public int compareDocToValue(int rootDoc, Object value) throws IOException { + public final int compareDocToValue(int rootDoc, Object value) throws IOException { throw new UnsupportedOperationException("compareDocToValue() not used for sorting in ES"); } -} - -final class Avg extends Sum { - - Avg(NumberComparatorBase wrappedComparator, Filter rootDocumentsFilter, Filter innerDocumentsFilter, int spareSlot) { - super(wrappedComparator, rootDocumentsFilter, innerDocumentsFilter, spareSlot); - } - - @Override - public int compareBottom(int rootDoc) throws IOException { - if (rootDoc == 0 || rootDocuments == null || innerDocuments == null) { - return 0; - } - - int prevRootDoc = rootDocuments.prevSetBit(rootDoc - 1); - int nestedDoc = innerDocuments.nextSetBit(prevRootDoc + 1); - if (nestedDoc >= rootDoc || nestedDoc == -1) { - return 0; - } - - int counter = 1; - wrappedComparator.copy(spareSlot, nestedDoc); - nestedDoc = innerDocuments.nextSetBit(nestedDoc + 1); - while (nestedDoc > prevRootDoc && nestedDoc < rootDoc) { - wrappedComparator.add(spareSlot, nestedDoc); - nestedDoc = innerDocuments.nextSetBit(nestedDoc + 1); - counter++; - } - wrappedComparator.divide(spareSlot, counter); - return compare(bottomSlot, spareSlot); - } - - @Override - public void copy(int slot, int rootDoc) throws IOException { - if (rootDoc == 0 || rootDocuments == null || innerDocuments == null) { - return; - } - - int prevRootDoc = rootDocuments.prevSetBit(rootDoc - 1); - int nestedDoc = innerDocuments.nextSetBit(prevRootDoc + 1); - if (nestedDoc >= rootDoc || nestedDoc == -1) { - return; - } - - int counter = 1; - wrappedComparator.copy(slot, nestedDoc); - nestedDoc = innerDocuments.nextSetBit(nestedDoc + 1); - while (nestedDoc > prevRootDoc && nestedDoc < rootDoc) { - wrappedComparator.add(slot, nestedDoc); - nestedDoc = innerDocuments.nextSetBit(nestedDoc + 1); - counter++; - } - wrappedComparator.divide(slot, counter); - } -} - -// Move to Lucene join module -abstract class NestedFieldComparator extends FieldComparator { - - final Filter rootDocumentsFilter; - final Filter innerDocumentsFilter; - final int spareSlot; - - FieldComparator wrappedComparator; - FixedBitSet rootDocuments; - FixedBitSet innerDocuments; - - NestedFieldComparator(FieldComparator wrappedComparator, Filter rootDocumentsFilter, Filter innerDocumentsFilter, int spareSlot) { - this.wrappedComparator = wrappedComparator; - this.rootDocumentsFilter = rootDocumentsFilter; - this.innerDocumentsFilter = innerDocumentsFilter; - this.spareSlot = spareSlot; - } - - @Override - public int compare(int slot1, int slot2) { - return wrappedComparator.compare(slot1, slot2); - } - - @Override - public void setBottom(int slot) { - wrappedComparator.setBottom(slot); - } - - @Override - public FieldComparator setNextReader(AtomicReaderContext context) throws IOException { - DocIdSet innerDocuments = innerDocumentsFilter.getDocIdSet(context, null); - if (DocIdSets.isEmpty(innerDocuments)) { - this.innerDocuments = null; - } else if (innerDocuments instanceof FixedBitSet) { - this.innerDocuments = (FixedBitSet) innerDocuments; - } else { - this.innerDocuments = DocIdSets.toFixedBitSet(innerDocuments.iterator(), context.reader().maxDoc()); - } - DocIdSet rootDocuments = rootDocumentsFilter.getDocIdSet(context, null); - if (DocIdSets.isEmpty(rootDocuments)) { - this.rootDocuments = null; - } else if (rootDocuments instanceof FixedBitSet) { - this.rootDocuments = (FixedBitSet) rootDocuments; - } else { - this.rootDocuments = DocIdSets.toFixedBitSet(rootDocuments.iterator(), context.reader().maxDoc()); - } - - wrappedComparator = wrappedComparator.setNextReader(context); - return this; - } - - @Override - public Object value(int slot) { - return wrappedComparator.value(slot); - } final static class Lowest extends NestedFieldComparator { @@ -298,14 +147,14 @@ final static class Lowest extends NestedFieldComparator { @Override public int compareBottom(int rootDoc) throws IOException { if (rootDoc == 0 || rootDocuments == null || innerDocuments == null) { - return 0; + return compareBottomMissing(wrappedComparator); } // We need to copy the lowest value from all nested docs into slot. int prevRootDoc = rootDocuments.prevSetBit(rootDoc - 1); int nestedDoc = innerDocuments.nextSetBit(prevRootDoc + 1); if (nestedDoc >= rootDoc || nestedDoc == -1) { - return 0; + return compareBottomMissing(wrappedComparator); } // We only need to emit a single cmp value for any matching nested doc @@ -333,6 +182,7 @@ public int compareBottom(int rootDoc) throws IOException { @Override public void copy(int slot, int rootDoc) throws IOException { if (rootDoc == 0 || rootDocuments == null || innerDocuments == null) { + copyMissing(wrappedComparator, slot); return; } @@ -340,6 +190,7 @@ public void copy(int slot, int rootDoc) throws IOException { int prevRootDoc = rootDocuments.prevSetBit(rootDoc - 1); int nestedDoc = innerDocuments.nextSetBit(prevRootDoc + 1); if (nestedDoc >= rootDoc || nestedDoc == -1) { + copyMissing(wrappedComparator, slot); return; } wrappedComparator.copy(spareSlot, nestedDoc); @@ -357,42 +208,6 @@ public void copy(int slot, int rootDoc) throws IOException { } } - @Override - @SuppressWarnings("unchecked") - public int compareDocToValue(int rootDoc, Object value) throws IOException { - if (rootDoc == 0 || rootDocuments == null || innerDocuments == null) { - return 0; - } - - // We need to copy the lowest value from all nested docs into slot. - int prevRootDoc = rootDocuments.prevSetBit(rootDoc - 1); - int nestedDoc = innerDocuments.nextSetBit(prevRootDoc + 1); - if (nestedDoc >= rootDoc || nestedDoc == -1) { - return 0; - } - - // We only need to emit a single cmp value for any matching nested doc - int cmp = wrappedComparator.compareBottom(nestedDoc); - if (cmp > 0) { - return cmp; - } - - while (true) { - nestedDoc = innerDocuments.nextSetBit(nestedDoc + 1); - if (nestedDoc >= rootDoc || nestedDoc == -1) { - return cmp; - } - int cmp1 = wrappedComparator.compareDocToValue(nestedDoc, value); - if (cmp1 > 0) { - return cmp1; - } else { - if (cmp1 == 0) { - cmp = 0; - } - } - } - } - } final static class Highest extends NestedFieldComparator { @@ -404,13 +219,13 @@ final static class Highest extends NestedFieldComparator { @Override public int compareBottom(int rootDoc) throws IOException { if (rootDoc == 0 || rootDocuments == null || innerDocuments == null) { - return 0; + return compareBottomMissing(wrappedComparator); } int prevRootDoc = rootDocuments.prevSetBit(rootDoc - 1); int nestedDoc = innerDocuments.nextSetBit(prevRootDoc + 1); if (nestedDoc >= rootDoc || nestedDoc == -1) { - return 0; + return compareBottomMissing(wrappedComparator); } int cmp = wrappedComparator.compareBottom(nestedDoc); @@ -437,12 +252,14 @@ public int compareBottom(int rootDoc) throws IOException { @Override public void copy(int slot, int rootDoc) throws IOException { if (rootDoc == 0 || rootDocuments == null || innerDocuments == null) { + copyMissing(wrappedComparator, slot); return; } int prevRootDoc = rootDocuments.prevSetBit(rootDoc - 1); int nestedDoc = innerDocuments.nextSetBit(prevRootDoc + 1); if (nestedDoc >= rootDoc || nestedDoc == -1) { + copyMissing(wrappedComparator, slot); return; } wrappedComparator.copy(spareSlot, nestedDoc); @@ -460,40 +277,146 @@ public void copy(int slot, int rootDoc) throws IOException { } } + } + + final static class Sum extends NestedFieldComparator { + + NumberComparatorBase wrappedComparator; + + Sum(NumberComparatorBase wrappedComparator, Filter rootDocumentsFilter, Filter innerDocumentsFilter, int spareSlot) { + super(wrappedComparator, rootDocumentsFilter, innerDocumentsFilter, spareSlot); + this.wrappedComparator = wrappedComparator; + } + + @Override + public int compareBottom(int rootDoc) throws IOException { + if (rootDoc == 0 || rootDocuments == null || innerDocuments == null) { + return compareBottomMissing(wrappedComparator); + } + + int prevRootDoc = rootDocuments.prevSetBit(rootDoc - 1); + int nestedDoc = innerDocuments.nextSetBit(prevRootDoc + 1); + if (nestedDoc >= rootDoc || nestedDoc == -1) { + return compareBottomMissing(wrappedComparator); + } + + wrappedComparator.copy(spareSlot, nestedDoc); + nestedDoc = innerDocuments.nextSetBit(nestedDoc + 1); + while (nestedDoc > prevRootDoc && nestedDoc < rootDoc) { + wrappedComparator.add(spareSlot, nestedDoc); + nestedDoc = innerDocuments.nextSetBit(nestedDoc + 1); + } + return compare(bottomSlot, spareSlot); + } + @Override - @SuppressWarnings("unchecked") - public int compareDocToValue(int rootDoc, Object value) throws IOException { + public void copy(int slot, int rootDoc) throws IOException { if (rootDoc == 0 || rootDocuments == null || innerDocuments == null) { - return 0; + copyMissing(wrappedComparator, slot); + return; } int prevRootDoc = rootDocuments.prevSetBit(rootDoc - 1); int nestedDoc = innerDocuments.nextSetBit(prevRootDoc + 1); if (nestedDoc >= rootDoc || nestedDoc == -1) { + copyMissing(wrappedComparator, slot); + return; + } + + wrappedComparator.copy(slot, nestedDoc); + nestedDoc = innerDocuments.nextSetBit(nestedDoc + 1); + while (nestedDoc > prevRootDoc && nestedDoc < rootDoc) { + wrappedComparator.add(slot, nestedDoc); + nestedDoc = innerDocuments.nextSetBit(nestedDoc + 1); + } + } + + @Override + public FieldComparator setNextReader(AtomicReaderContext context) throws IOException { + super.setNextReader(context); + wrappedComparator = (NumberComparatorBase) super.wrappedComparator; + return this; + } + + } + + final static class Avg extends NestedFieldComparator { + + NumberComparatorBase wrappedComparator; + + Avg(NumberComparatorBase wrappedComparator, Filter rootDocumentsFilter, Filter innerDocumentsFilter, int spareSlot) { + super(wrappedComparator, rootDocumentsFilter, innerDocumentsFilter, spareSlot); + this.wrappedComparator = wrappedComparator; + } + + @Override + public int compareBottom(int rootDoc) throws IOException { + if (rootDoc == 0 || rootDocuments == null || innerDocuments == null) { return 0; } - int cmp = wrappedComparator.compareBottom(nestedDoc); - if (cmp < 0) { - return cmp; + int prevRootDoc = rootDocuments.prevSetBit(rootDoc - 1); + int nestedDoc = innerDocuments.nextSetBit(prevRootDoc + 1); + if (nestedDoc >= rootDoc || nestedDoc == -1) { + return compareBottomMissing(wrappedComparator); } - while (true) { + int counter = 1; + wrappedComparator.copy(spareSlot, nestedDoc); + nestedDoc = innerDocuments.nextSetBit(nestedDoc + 1); + while (nestedDoc > prevRootDoc && nestedDoc < rootDoc) { + wrappedComparator.add(spareSlot, nestedDoc); nestedDoc = innerDocuments.nextSetBit(nestedDoc + 1); - if (nestedDoc >= rootDoc || nestedDoc == -1) { - return cmp; - } - int cmp1 = wrappedComparator.compareDocToValue(nestedDoc, value); - if (cmp1 < 0) { - return cmp1; - } else { - if (cmp1 == 0) { - cmp = 0; - } - } + counter++; } + wrappedComparator.divide(spareSlot, counter); + return compare(bottomSlot, spareSlot); } + @Override + public void copy(int slot, int rootDoc) throws IOException { + if (rootDoc == 0 || rootDocuments == null || innerDocuments == null) { + return; + } + + int prevRootDoc = rootDocuments.prevSetBit(rootDoc - 1); + int nestedDoc = innerDocuments.nextSetBit(prevRootDoc + 1); + if (nestedDoc >= rootDoc || nestedDoc == -1) { + copyMissing(wrappedComparator, slot); + return; + } + + int counter = 1; + wrappedComparator.copy(slot, nestedDoc); + nestedDoc = innerDocuments.nextSetBit(nestedDoc + 1); + while (nestedDoc > prevRootDoc && nestedDoc < rootDoc) { + wrappedComparator.add(slot, nestedDoc); + nestedDoc = innerDocuments.nextSetBit(nestedDoc + 1); + counter++; + } + wrappedComparator.divide(slot, counter); + } + + @Override + public FieldComparator setNextReader(AtomicReaderContext context) throws IOException { + super.setNextReader(context); + wrappedComparator = (NumberComparatorBase) super.wrappedComparator; + return this; + } + } + + static final void copyMissing(FieldComparator comparator, int slot) { + if (comparator instanceof NumberComparatorBase) { + ((NumberComparatorBase) comparator).missing(slot); + } + } + + static final int compareBottomMissing(FieldComparator comparator) { + if (comparator instanceof NumberComparatorBase) { + return ((NumberComparatorBase) comparator).compareBottomMissing(); + } else { + return 0; + } } } \ No newline at end of file diff --git a/src/test/java/org/elasticsearch/test/integration/nested/SimpleNestedTests.java b/src/test/java/org/elasticsearch/test/integration/nested/SimpleNestedTests.java index 05eeeeb565779..19e6d854d7996 100644 --- a/src/test/java/org/elasticsearch/test/integration/nested/SimpleNestedTests.java +++ b/src/test/java/org/elasticsearch/test/integration/nested/SimpleNestedTests.java @@ -682,6 +682,96 @@ public void testSimpleNestedSorting() throws Exception { assertThat(searchResponse.getHits().hits()[2].sortValues()[0].toString(), equalTo("2")); } + @Test + public void testSimpleNestedSorting_withNestedFilterMissing() throws Exception { + client.admin().indices().prepareDelete().execute().actionGet(); + client.admin().indices().prepareCreate("test") + .setSettings(settingsBuilder() + .put("index.number_of_shards", 1) + .put("index.number_of_replicas", 0) + .put("index.referesh_interval", -1) + .build() + ) + .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") + .startObject("nested1") + .field("type", "nested") + .endObject() + .endObject().endObject().endObject()) + .execute().actionGet(); + client.admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); + + client.prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() + .field("field1", 1) + .startArray("nested1") + .startObject() + .field("field1", 5) + .field("field2", true) + .endObject() + .startObject() + .field("field1", 4) + .field("field2", true) + .endObject() + .endArray() + .endObject()).execute().actionGet(); + client.prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject() + .field("field1", 2) + .startArray("nested1") + .startObject() + .field("field1", 1) + .field("field2", true) + .endObject() + .startObject() + .field("field1", 2) + .field("field2", true) + .endObject() + .endArray() + .endObject()).execute().actionGet(); + // Doc with missing nested docs if nested filter is used + client.admin().indices().prepareRefresh().execute().actionGet(); + client.prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject() + .field("field1", 3) + .startArray("nested1") + .startObject() + .field("field1", 3) + .field("field2", false) + .endObject() + .startObject() + .field("field1", 4) + .field("field2", false) + .endObject() + .endArray() + .endObject()).execute().actionGet(); + client.admin().indices().prepareRefresh().execute().actionGet(); + + SearchResponse searchResponse = client.prepareSearch("test") + .setTypes("type1") + .setQuery(QueryBuilders.matchAllQuery()) + .addSort(SortBuilders.fieldSort("nested1.field1").setNestedFilter(termFilter("nested1.field2", true)).missing(10).order(SortOrder.ASC)) + .execute().actionGet(); + + assertThat(searchResponse.getHits().totalHits(), equalTo(3l)); + assertThat(searchResponse.getHits().hits()[0].id(), equalTo("2")); + assertThat(searchResponse.getHits().hits()[0].sortValues()[0].toString(), equalTo("1")); + assertThat(searchResponse.getHits().hits()[1].id(), equalTo("1")); + assertThat(searchResponse.getHits().hits()[1].sortValues()[0].toString(), equalTo("4")); + assertThat(searchResponse.getHits().hits()[2].id(), equalTo("3")); + assertThat(searchResponse.getHits().hits()[2].sortValues()[0].toString(), equalTo("10")); + + searchResponse = client.prepareSearch("test") + .setTypes("type1") + .setQuery(QueryBuilders.matchAllQuery()) + .addSort(SortBuilders.fieldSort("nested1.field1").setNestedFilter(termFilter("nested1.field2", true)).missing(10).order(SortOrder.DESC)) + .execute().actionGet(); + + assertThat(searchResponse.getHits().totalHits(), equalTo(3l)); + assertThat(searchResponse.getHits().hits()[0].id(), equalTo("3")); + assertThat(searchResponse.getHits().hits()[0].sortValues()[0].toString(), equalTo("10")); + assertThat(searchResponse.getHits().hits()[1].id(), equalTo("1")); + assertThat(searchResponse.getHits().hits()[1].sortValues()[0].toString(), equalTo("5")); + assertThat(searchResponse.getHits().hits()[2].id(), equalTo("2")); + assertThat(searchResponse.getHits().hits()[2].sortValues()[0].toString(), equalTo("2")); + } + @Test public void testSortNestedWithNestedFilter() throws Exception { client.admin().indices().prepareDelete().execute().actionGet(); diff --git a/src/test/java/org/elasticsearch/test/unit/index/search/nested/AbstractNumberNestedSortingTests.java b/src/test/java/org/elasticsearch/test/unit/index/search/nested/AbstractNumberNestedSortingTests.java index ef1027c1995b2..7f6294821e586 100644 --- a/src/test/java/org/elasticsearch/test/unit/index/search/nested/AbstractNumberNestedSortingTests.java +++ b/src/test/java/org/elasticsearch/test/unit/index/search/nested/AbstractNumberNestedSortingTests.java @@ -28,7 +28,6 @@ import org.apache.lucene.search.*; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.join.ToParentBlockJoinQuery; -import org.elasticsearch.common.lucene.search.AndFilter; import org.elasticsearch.common.lucene.search.NotFilter; import org.elasticsearch.common.lucene.search.TermFilter; import org.elasticsearch.common.lucene.search.XFilteredQuery; @@ -40,7 +39,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; import static org.hamcrest.MatcherAssert.assertThat; @@ -148,6 +146,7 @@ public void testNestedSorting() throws Exception { document.add(createField("field1", 5, Field.Store.NO)); docs.add(document); writer.addDocuments(docs); + writer.commit(); docs.clear(); document = new Document(); @@ -167,12 +166,14 @@ public void testNestedSorting() throws Exception { document.add(createField("field1", 6, Field.Store.NO)); docs.add(document); writer.addDocuments(docs); + writer.commit(); // This doc will not be included, because it doesn't have nested docs document = new Document(); document.add(new StringField("__type", "parent", Field.Store.NO)); document.add(createField("field1", 7, Field.Store.NO)); writer.addDocument(document); + writer.commit(); docs.clear(); document = new Document(); @@ -207,7 +208,7 @@ public void testNestedSorting() throws Exception { SortMode sortMode = SortMode.SUM; IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, false)); - IndexFieldData.XFieldComparatorSource innerFieldComparator = createInnerFieldComparator("field2", sortMode); + IndexFieldData.XFieldComparatorSource innerFieldComparator = createInnerFieldComparator("field2", sortMode, null); Filter parentFilter = new TermFilter(new Term("__type", "parent")); Filter childFilter = new NotFilter(parentFilter); NestedFieldComparatorSource nestedComparatorSource = new NestedFieldComparatorSource(sortMode, innerFieldComparator, parentFilter, childFilter); @@ -243,7 +244,7 @@ public void testNestedSorting() throws Exception { assertThat(topDocs.scoreDocs[4].doc, equalTo(3)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).intValue(), equalTo(9)); - childFilter = new AndFilter(Arrays.asList(new NotFilter(parentFilter), new TermFilter(new Term("filter_1", "T")))); + childFilter = new TermFilter(new Term("filter_1", "T")); nestedComparatorSource = new NestedFieldComparatorSource(sortMode, innerFieldComparator, parentFilter, childFilter); query = new ToParentBlockJoinQuery( new XFilteredQuery(new MatchAllDocsQuery(), childFilter), @@ -280,6 +281,40 @@ public void testNestedSorting() throws Exception { assertThat(topDocs.scoreDocs[4].doc, equalTo(3)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).intValue(), equalTo(9)); + innerFieldComparator = createInnerFieldComparator("field2", sortMode, 127); + nestedComparatorSource = new NestedFieldComparatorSource(sortMode, innerFieldComparator, parentFilter, childFilter); + sort = new Sort(new SortField("field2", nestedComparatorSource, true)); + topDocs = searcher.search(new TermQuery(new Term("__type", "parent")), 5, sort); + assertThat(topDocs.totalHits, equalTo(8)); + assertThat(topDocs.scoreDocs.length, equalTo(5)); + assertThat(topDocs.scoreDocs[0].doc, equalTo(19)); + assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(127)); + assertThat(topDocs.scoreDocs[1].doc, equalTo(24)); + assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[1]).fields[0]).intValue(), equalTo(127)); + assertThat(topDocs.scoreDocs[2].doc, equalTo(23)); + assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[2]).fields[0]).intValue(), equalTo(12)); + assertThat(topDocs.scoreDocs[3].doc, equalTo(3)); + assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[3]).fields[0]).intValue(), equalTo(9)); + assertThat(topDocs.scoreDocs[4].doc, equalTo(7)); + assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).intValue(), equalTo(8)); + + innerFieldComparator = createInnerFieldComparator("field2", sortMode, -127); + nestedComparatorSource = new NestedFieldComparatorSource(sortMode, innerFieldComparator, parentFilter, childFilter); + sort = new Sort(new SortField("field2", nestedComparatorSource)); + topDocs = searcher.search(new TermQuery(new Term("__type", "parent")), 5, sort); + assertThat(topDocs.totalHits, equalTo(8)); + assertThat(topDocs.scoreDocs.length, equalTo(5)); + assertThat(topDocs.scoreDocs[0].doc, equalTo(19)); + assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(-127)); + assertThat(topDocs.scoreDocs[1].doc, equalTo(24)); + assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[1]).fields[0]).intValue(), equalTo(-127)); + assertThat(topDocs.scoreDocs[2].doc, equalTo(15)); + assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[2]).fields[0]).intValue(), equalTo(3)); + assertThat(topDocs.scoreDocs[3].doc, equalTo(28)); + assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[3]).fields[0]).intValue(), equalTo(3)); + assertThat(topDocs.scoreDocs[4].doc, equalTo(11)); + assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).intValue(), equalTo(7)); + // Moved to method, because floating point based XFieldComparatorSource have different outcome for SortMode avg, // than integral number based implementations... assertAvgScoreMode(parentFilter, searcher, innerFieldComparator); @@ -309,6 +344,6 @@ protected void assertAvgScoreMode(Filter parentFilter, IndexSearcher searcher, I protected abstract IndexableField createField(String name, int value, Field.Store store); - protected abstract IndexFieldData.XFieldComparatorSource createInnerFieldComparator(String fieldName, SortMode sortMode); + protected abstract IndexFieldData.XFieldComparatorSource createInnerFieldComparator(String fieldName, SortMode sortMode, Object missingValue); } diff --git a/src/test/java/org/elasticsearch/test/unit/index/search/nested/ByteNestedSortingTests.java b/src/test/java/org/elasticsearch/test/unit/index/search/nested/ByteNestedSortingTests.java index 3b656a344a448..fc79565f88e13 100644 --- a/src/test/java/org/elasticsearch/test/unit/index/search/nested/ByteNestedSortingTests.java +++ b/src/test/java/org/elasticsearch/test/unit/index/search/nested/ByteNestedSortingTests.java @@ -38,9 +38,9 @@ protected FieldDataType getFieldDataType() { } @Override - protected IndexFieldData.XFieldComparatorSource createInnerFieldComparator(String fieldName, SortMode sortMode) { + protected IndexFieldData.XFieldComparatorSource createInnerFieldComparator(String fieldName, SortMode sortMode, Object missingValue) { ByteArrayIndexFieldData fieldData = getForField(fieldName); - return new ByteValuesComparatorSource(fieldData, null, sortMode); + return new ByteValuesComparatorSource(fieldData, missingValue, sortMode); } @Override diff --git a/src/test/java/org/elasticsearch/test/unit/index/search/nested/DoubleNestedSortingTests.java b/src/test/java/org/elasticsearch/test/unit/index/search/nested/DoubleNestedSortingTests.java index 0d4f76e550c9d..4f6565600bb76 100644 --- a/src/test/java/org/elasticsearch/test/unit/index/search/nested/DoubleNestedSortingTests.java +++ b/src/test/java/org/elasticsearch/test/unit/index/search/nested/DoubleNestedSortingTests.java @@ -49,9 +49,9 @@ protected FieldDataType getFieldDataType() { } @Override - protected IndexFieldData.XFieldComparatorSource createInnerFieldComparator(String fieldName, SortMode sortMode) { + protected IndexFieldData.XFieldComparatorSource createInnerFieldComparator(String fieldName, SortMode sortMode, Object missingValue) { DoubleArrayIndexFieldData fieldData = getForField(fieldName); - return new DoubleValuesComparatorSource(fieldData, null, sortMode); + return new DoubleValuesComparatorSource(fieldData, missingValue, sortMode); } @Override diff --git a/src/test/java/org/elasticsearch/test/unit/index/search/nested/FloatNestedSortingTests.java b/src/test/java/org/elasticsearch/test/unit/index/search/nested/FloatNestedSortingTests.java index 3199dc26f80cd..37fc9ee1b9f2f 100644 --- a/src/test/java/org/elasticsearch/test/unit/index/search/nested/FloatNestedSortingTests.java +++ b/src/test/java/org/elasticsearch/test/unit/index/search/nested/FloatNestedSortingTests.java @@ -49,9 +49,9 @@ protected FieldDataType getFieldDataType() { } @Override - protected IndexFieldData.XFieldComparatorSource createInnerFieldComparator(String fieldName, SortMode sortMode) { + protected IndexFieldData.XFieldComparatorSource createInnerFieldComparator(String fieldName, SortMode sortMode, Object missingValue) { FloatArrayIndexFieldData fieldData = getForField(fieldName); - return new FloatValuesComparatorSource(fieldData, null, sortMode); + return new FloatValuesComparatorSource(fieldData, missingValue, sortMode); } @Override diff --git a/src/test/java/org/elasticsearch/test/unit/index/search/nested/IntegerNestedSortingTests.java b/src/test/java/org/elasticsearch/test/unit/index/search/nested/IntegerNestedSortingTests.java index a5d3ba8349118..6bd9810ac6cce 100644 --- a/src/test/java/org/elasticsearch/test/unit/index/search/nested/IntegerNestedSortingTests.java +++ b/src/test/java/org/elasticsearch/test/unit/index/search/nested/IntegerNestedSortingTests.java @@ -38,9 +38,9 @@ protected FieldDataType getFieldDataType() { } @Override - protected IndexFieldData.XFieldComparatorSource createInnerFieldComparator(String fieldName, SortMode sortMode) { + protected IndexFieldData.XFieldComparatorSource createInnerFieldComparator(String fieldName, SortMode sortMode, Object missingValue) { IntArrayIndexFieldData fieldData = getForField(fieldName); - return new IntValuesComparatorSource(fieldData, null, sortMode); + return new IntValuesComparatorSource(fieldData, missingValue, sortMode); } @Override diff --git a/src/test/java/org/elasticsearch/test/unit/index/search/nested/LongNestedSortingTests.java b/src/test/java/org/elasticsearch/test/unit/index/search/nested/LongNestedSortingTests.java index a8f2ac6bc2406..96c28899f0e30 100644 --- a/src/test/java/org/elasticsearch/test/unit/index/search/nested/LongNestedSortingTests.java +++ b/src/test/java/org/elasticsearch/test/unit/index/search/nested/LongNestedSortingTests.java @@ -38,9 +38,9 @@ protected FieldDataType getFieldDataType() { } @Override - protected IndexFieldData.XFieldComparatorSource createInnerFieldComparator(String fieldName, SortMode sortMode) { + protected IndexFieldData.XFieldComparatorSource createInnerFieldComparator(String fieldName, SortMode sortMode, Object missingValue) { LongArrayIndexFieldData fieldData = getForField(fieldName); - return new LongValuesComparatorSource(fieldData, null, sortMode); + return new LongValuesComparatorSource(fieldData, missingValue, sortMode); } @Override diff --git a/src/test/java/org/elasticsearch/test/unit/index/search/nested/ShortNestedSortingTests.java b/src/test/java/org/elasticsearch/test/unit/index/search/nested/ShortNestedSortingTests.java index b3031df068196..47bfc35e3c285 100644 --- a/src/test/java/org/elasticsearch/test/unit/index/search/nested/ShortNestedSortingTests.java +++ b/src/test/java/org/elasticsearch/test/unit/index/search/nested/ShortNestedSortingTests.java @@ -38,9 +38,9 @@ protected FieldDataType getFieldDataType() { } @Override - protected IndexFieldData.XFieldComparatorSource createInnerFieldComparator(String fieldName, SortMode sortMode) { + protected IndexFieldData.XFieldComparatorSource createInnerFieldComparator(String fieldName, SortMode sortMode, Object missingValue) { ShortArrayIndexFieldData fieldData = getForField(fieldName); - return new ShortValuesComparatorSource(fieldData, null, sortMode); + return new ShortValuesComparatorSource(fieldData, missingValue, sortMode); } @Override
ebb236ef9c00592c592f8d5bb885e7dfd4d05c3a
hadoop
HADOOP-7057. IOUtils.readFully and- IOUtils.skipFully have typo in exception creation's message. Contributed by- Konstantin Boudnik.--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1040849 13f79535-47bb-0310-9956-ffa450edef68-
p
https://github.com/apache/hadoop
diff --git a/CHANGES.txt b/CHANGES.txt index 6b853babb7b72..2132304e145d4 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -374,6 +374,9 @@ Release 0.22.0 - Unreleased HADOOP-6496. HttpServer sends wrong content-type for CSS files (and others). (Todd Lipcon via tomwhite) + HADOOP-7057. IOUtils.readFully and IOUtils.skipFully have typo in + exception creation's message. (cos) + Release 0.21.1 - Unreleased IMPROVEMENTS diff --git a/src/java/org/apache/hadoop/io/IOUtils.java b/src/java/org/apache/hadoop/io/IOUtils.java index aa3450d55d5f7..f7d27a7aebde5 100644 --- a/src/java/org/apache/hadoop/io/IOUtils.java +++ b/src/java/org/apache/hadoop/io/IOUtils.java @@ -115,7 +115,7 @@ public static void readFully( InputStream in, byte buf[], while ( toRead > 0 ) { int ret = in.read( buf, off, toRead ); if ( ret < 0 ) { - throw new IOException( "Premeture EOF from inputStream"); + throw new IOException( "Premature EOF from inputStream"); } toRead -= ret; off += ret; @@ -132,7 +132,7 @@ public static void skipFully( InputStream in, long len ) throws IOException { while ( len > 0 ) { long ret = in.skip( len ); if ( ret < 0 ) { - throw new IOException( "Premeture EOF from inputStream"); + throw new IOException( "Premature EOF from inputStream"); } len -= ret; }
0531b8bff5c14d9504beefb4ad47f473e3a22932
ReactiveX-RxJava
Change hasException to hasThrowable--
p
https://github.com/ReactiveX/RxJava
diff --git a/rxjava-core/src/main/java/rx/Notification.java b/rxjava-core/src/main/java/rx/Notification.java index ad6b81c002..866ed06450 100644 --- a/rxjava-core/src/main/java/rx/Notification.java +++ b/rxjava-core/src/main/java/rx/Notification.java @@ -91,7 +91,7 @@ public boolean hasValue() { * * @return a value indicating whether this notification has an exception. */ - public boolean hasException() { + public boolean hasThrowable() { return isOnError() && throwable != null; } @@ -125,7 +125,7 @@ public String toString() { StringBuilder str = new StringBuilder("[").append(super.toString()).append(" ").append(getKind()); if (hasValue()) str.append(" ").append(getValue()); - if (hasException()) + if (hasThrowable()) str.append(" ").append(getThrowable().getMessage()); str.append("]"); return str.toString(); @@ -136,7 +136,7 @@ public int hashCode() { int hash = getKind().hashCode(); if (hasValue()) hash = hash * 31 + getValue().hashCode(); - if (hasException()) + if (hasThrowable()) hash = hash * 31 + getThrowable().hashCode(); return hash; } @@ -154,7 +154,7 @@ public boolean equals(Object obj) { return false; if (hasValue() && !getValue().equals(notification.getValue())) return false; - if (hasException() && !getThrowable().equals(notification.getThrowable())) + if (hasThrowable() && !getThrowable().equals(notification.getThrowable())) return false; return true; }
46b622678e419de689d1debd514fa3b6876816e4
camel
CAMEL-3879 Applied patch of DanK with thanks--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@1099050 13f79535-47bb-0310-9956-ffa450edef68-
p
https://github.com/apache/camel
diff --git a/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/spring/CxfEndpointBeanDefinitionParser.java b/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/spring/CxfEndpointBeanDefinitionParser.java index 3cc9ee1480aa3..b9d2988efac39 100644 --- a/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/spring/CxfEndpointBeanDefinitionParser.java +++ b/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/spring/CxfEndpointBeanDefinitionParser.java @@ -28,6 +28,7 @@ import org.apache.cxf.bus.spring.BusWiringBeanFactoryPostProcessor; import org.apache.cxf.bus.spring.SpringBusFactory; import org.apache.cxf.service.factory.ReflectionServiceFactoryBean; +import org.apache.cxf.version.Version; import org.springframework.beans.BeansException; import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.xml.ParserContext; @@ -87,15 +88,19 @@ public CxfSpringEndpointBean(ReflectionServiceFactoryBean factory) { super(factory); } + @SuppressWarnings("deprecation") public void setApplicationContext(ApplicationContext ctx) throws BeansException { applicationContext = ctx; - if (getBus() == null) { - // Don't relate on the DefaultBus - BusFactory factory = new SpringBusFactory(ctx); - Bus bus = factory.createBus(); - setBus(bus); + if (bus == null) { + if (Version.getCurrentVersion().startsWith("2.3")) { + // Don't relate on the DefaultBus + BusFactory factory = new SpringBusFactory(ctx); + bus = factory.createBus(); + BusWiringBeanFactoryPostProcessor.updateBusReferencesInContext(bus, ctx); + } else { + bus = BusWiringBeanFactoryPostProcessor.addDefaultBus(ctx); + } } - BusWiringBeanFactoryPostProcessor.updateBusReferencesInContext(getBus(), ctx); } public ApplicationContext getApplicationContext() { diff --git a/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/spring/CxfRsClientFactoryBeanDefinitionParser.java b/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/spring/CxfRsClientFactoryBeanDefinitionParser.java index 70ce212d26677..f1c807a18bfe1 100644 --- a/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/spring/CxfRsClientFactoryBeanDefinitionParser.java +++ b/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/spring/CxfRsClientFactoryBeanDefinitionParser.java @@ -29,6 +29,7 @@ import org.apache.cxf.jaxrs.client.JAXRSClientFactoryBean; import org.apache.cxf.jaxrs.model.UserResource; import org.apache.cxf.jaxrs.utils.ResourceUtils; +import org.apache.cxf.version.Version; import org.springframework.beans.BeansException; import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.xml.ParserContext; @@ -78,13 +79,18 @@ public SpringJAXRSClientFactoryBean() { super(); } + @SuppressWarnings("deprecation") public void setApplicationContext(ApplicationContext ctx) throws BeansException { - if (getBus() == null) { - // Don't relate on the DefaultBus - BusFactory factory = new SpringBusFactory(ctx); - Bus bus = factory.createBus(); - BusWiringBeanFactoryPostProcessor.updateBusReferencesInContext(bus, ctx); - setBus(bus); + if (bus == null) { + if (Version.getCurrentVersion().startsWith("2.3")) { + // Don't relate on the DefaultBus + BusFactory factory = new SpringBusFactory(ctx); + bus = factory.createBus(); + BusWiringBeanFactoryPostProcessor.updateBusReferencesInContext(bus, ctx); + setBus(bus); + } else { + setBus(BusWiringBeanFactoryPostProcessor.addDefaultBus(ctx)); + } } } diff --git a/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/spring/CxfRsServerFactoryBeanDefinitionParser.java b/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/spring/CxfRsServerFactoryBeanDefinitionParser.java index 387a934bc6084..8708ee43ccb4c 100644 --- a/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/spring/CxfRsServerFactoryBeanDefinitionParser.java +++ b/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/spring/CxfRsServerFactoryBeanDefinitionParser.java @@ -30,6 +30,7 @@ import org.apache.cxf.jaxrs.JAXRSServiceFactoryBean; import org.apache.cxf.jaxrs.model.UserResource; import org.apache.cxf.jaxrs.utils.ResourceUtils; +import org.apache.cxf.version.Version; import org.springframework.beans.BeansException; import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.xml.ParserContext; @@ -91,13 +92,18 @@ public SpringJAXRSServerFactoryBean(JAXRSServiceFactoryBean sf) { super(sf); } + @SuppressWarnings("deprecation") public void setApplicationContext(ApplicationContext ctx) throws BeansException { - if (getBus() == null) { - // Don't relate on the DefaultBus - BusFactory factory = new SpringBusFactory(ctx); - Bus bus = factory.createBus(); - BusWiringBeanFactoryPostProcessor.updateBusReferencesInContext(bus, ctx); - setBus(bus); + if (bus == null) { + if (Version.getCurrentVersion().startsWith("2.3")) { + // Don't relate on the DefaultBus + BusFactory factory = new SpringBusFactory(ctx); + bus = factory.createBus(); + setBus(bus); + BusWiringBeanFactoryPostProcessor.updateBusReferencesInContext(bus, ctx); + } else { + setBus(BusWiringBeanFactoryPostProcessor.addDefaultBus(ctx)); + } } } diff --git a/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/transport/CamelTransportFactory.java b/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/transport/CamelTransportFactory.java index 586aa337f4537..6904af1ebe580 100644 --- a/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/transport/CamelTransportFactory.java +++ b/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/transport/CamelTransportFactory.java @@ -17,8 +17,10 @@ package org.apache.camel.component.cxf.transport; import java.io.IOException; +import java.util.Arrays; import java.util.Collection; import java.util.HashSet; +import java.util.List; import java.util.Set; import javax.annotation.PostConstruct; @@ -28,6 +30,7 @@ import org.apache.camel.component.cxf.CxfHeaderFilterStrategy; import org.apache.camel.spi.HeaderFilterStrategy; import org.apache.cxf.Bus; +import org.apache.cxf.common.injection.NoJSR250Annotations; import org.apache.cxf.service.model.EndpointInfo; import org.apache.cxf.transport.AbstractTransportFactory; import org.apache.cxf.transport.Conduit; @@ -41,12 +44,13 @@ /** * @version */ +@NoJSR250Annotations(unlessNull = "bus") public class CamelTransportFactory extends AbstractTransportFactory implements ConduitInitiator, DestinationFactory { public static final String TRANSPORT_ID = "http://cxf.apache.org/transports/camel"; + public static final List<String> DEFAULT_NAMESPACES = Arrays.asList(TRANSPORT_ID); private static final Set<String> URI_PREFIXES = new HashSet<String>(); - private Collection<String> activationNamespaces; private HeaderFilterStrategy headerFilterStrategy; private boolean checkException; @@ -54,7 +58,6 @@ public class CamelTransportFactory extends AbstractTransportFactory implements C URI_PREFIXES.add("camel://"); } - private Bus bus; private CamelContext camelContext; public CamelTransportFactory() { @@ -63,27 +66,17 @@ public CamelTransportFactory() { defaultHeaderFilterStrategy.setOutFilterPattern(null); headerFilterStrategy = defaultHeaderFilterStrategy; } - - @Resource(name = "bus") - public void setBus(Bus b) { - bus = b; - } - - public Bus getBus() { - return bus; - } - - public void setActivationNamespaces(Collection<String> ans) { - activationNamespaces = ans; - } - - public CamelContext getCamelContext() { - return camelContext; + public CamelTransportFactory(Bus b) { + super(DEFAULT_NAMESPACES, b); + CxfHeaderFilterStrategy defaultHeaderFilterStrategy = new CxfHeaderFilterStrategy(); + // Doesn't filter the camel relates headers by default + defaultHeaderFilterStrategy.setOutFilterPattern(null); + headerFilterStrategy = defaultHeaderFilterStrategy; } - @Resource(name = "camelContext") - public void setCamelContext(CamelContext camelContext) { - this.camelContext = camelContext; + @Resource(name = "cxf") + public void setBus(Bus b) { + super.setBus(b); } public void setCheckException(boolean check) { @@ -110,25 +103,6 @@ public Set<String> getUriPrefixes() { return URI_PREFIXES; } - @PostConstruct - void registerWithBindingManager() { - if (null == bus) { - return; - } - ConduitInitiatorManager cim = bus.getExtension(ConduitInitiatorManager.class); - if (null != cim && null != activationNamespaces) { - for (String ns : activationNamespaces) { - cim.registerConduitInitiator(ns, this); - } - } - DestinationFactoryManager dfm = bus.getExtension(DestinationFactoryManager.class); - if (null != dfm && null != activationNamespaces) { - for (String ns : activationNamespaces) { - dfm.registerDestinationFactory(ns, this); - } - } - } - public HeaderFilterStrategy getHeaderFilterStrategy() { return headerFilterStrategy; } @@ -136,6 +110,13 @@ public HeaderFilterStrategy getHeaderFilterStrategy() { public void setHeaderFilterStrategy(HeaderFilterStrategy headerFilterStrategy) { this.headerFilterStrategy = headerFilterStrategy; } + + public CamelContext getCamelContext() { + return camelContext; + } + public void setCamelContext(CamelContext c) { + camelContext = c; + } } diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyTest.java index bc83b10a7ebfd..82a99d68dfca6 100644 --- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyTest.java +++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyTest.java @@ -30,18 +30,23 @@ import org.apache.camel.wsdl_first.PersonImpl; import org.apache.camel.wsdl_first.PersonService; import org.apache.camel.wsdl_first.UnknownPersonFault; +import org.apache.cxf.BusFactory; import org.junit.After; +import org.junit.AfterClass; import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import org.springframework.context.support.ClassPathXmlApplicationContext; public class CXFWsdlOnlyTest extends CamelSpringTestSupport { - private Endpoint endpoint1; - private Endpoint endpoint2; + private static Endpoint endpoint1; + private static Endpoint endpoint2; protected ClassPathXmlApplicationContext createApplicationContext() { + // When the Application is closed, the camel-cxf endpoint will be shutdown, + // this will cause the issue of the new http server doesn't send the response back. return new ClassPathXmlApplicationContext("org/apache/camel/component/cxf/WsdlOnlyBeans.xml"); } @@ -49,8 +54,8 @@ protected void assertValidContext(CamelContext context) { assertNotNull("No context found!", context); } - @Before - public void startServices() { + @BeforeClass + public static void startServices() { Object implementor = new PersonImpl(); String address = "http://localhost:9000/PersonService/"; endpoint1 = Endpoint.publish(address, implementor); @@ -59,8 +64,8 @@ public void startServices() { endpoint2 = Endpoint.publish(address, implementor); } - @After - public void stopServices() { + @AfterClass + public static void stopServices() { if (endpoint1 != null) { endpoint1.stop(); } @@ -71,7 +76,7 @@ public void stopServices() { } @Test - public void testRoutes() throws Exception { + public void testRoutesWithFault() throws Exception { URL wsdlURL = getClass().getClassLoader().getResource("person.wsdl"); PersonService ss = new PersonService(wsdlURL, new QName("http://camel.apache.org/wsdl-first", "PersonService")); @@ -82,27 +87,10 @@ public void testRoutes() throws Exception { Holder<String> name = new Holder<String>(); client.getPerson(personId, ssn, name); assertEquals("Bonjour", name.value); - - Person client2 = ss.getSoap2(); - Holder<String> personId2 = new Holder<String>(); - personId2.value = "hello"; - Holder<String> ssn2 = new Holder<String>(); - Holder<String> name2 = new Holder<String>(); - client2.getPerson(personId2, ssn2, name2); - assertEquals("Bonjour", name2.value); - } - - @Test - public void testSoapFaultRoutes() { - URL wsdlURL = getClass().getClassLoader().getResource("person.wsdl"); - PersonService ss = new PersonService(wsdlURL, new QName("http://camel.apache.org/wsdl-first", - "PersonService")); - // test message mode - Person client = ss.getSoap(); - Holder<String> personId = new Holder<String>(); + personId.value = ""; - Holder<String> ssn = new Holder<String>(); - Holder<String> name = new Holder<String>(); + ssn = new Holder<String>(); + name = new Holder<String>(); Throwable t = null; try { client.getPerson(personId, ssn, name); @@ -112,13 +100,17 @@ public void testSoapFaultRoutes() { } assertTrue(t instanceof UnknownPersonFault); - - // test PAYLOAD mode Person client2 = ss.getSoap2(); Holder<String> personId2 = new Holder<String>(); - personId2.value = ""; + personId2.value = "hello"; Holder<String> ssn2 = new Holder<String>(); Holder<String> name2 = new Holder<String>(); + client2.getPerson(personId2, ssn2, name2); + assertEquals("Bonjour", name2.value); + + personId2.value = ""; + ssn2 = new Holder<String>(); + name2 = new Holder<String>(); try { client2.getPerson(personId2, ssn2, name2); fail("Expect exception"); @@ -127,5 +119,6 @@ public void testSoapFaultRoutes() { } assertTrue(t instanceof UnknownPersonFault); } - + + } diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSpringRouterTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSpringRouterTest.java index 09d3249a15800..b048d142d970e 100644 --- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSpringRouterTest.java +++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSpringRouterTest.java @@ -39,10 +39,9 @@ public void setUp() throws Exception { @After public void tearDown() throws Exception { - if (applicationContext != null) { - applicationContext.destroy(); - } + // Don't close the application context, as it will cause some trouble on the bus shutdown super.tearDown(); + } @Override diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/cxfbean/CxfBeanTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/cxfbean/CxfBeanTest.java index 9b6ad56b25063..59cef13eb04c0 100644 --- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/cxfbean/CxfBeanTest.java +++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/cxfbean/CxfBeanTest.java @@ -18,6 +18,7 @@ import java.io.InputStream; import java.net.URL; +import java.net.URLConnection; import javax.xml.namespace.QName; import javax.xml.ws.Holder; @@ -105,15 +106,12 @@ public void testGetConsumer() throws Exception { invokeRsService("http://localhost:9000/customerservice/orders/223/products/323", "{\"Product\":{\"description\":\"product 323\",\"id\":323}}"); - - } @Test public void testGetConsumerWithQueryParam() throws Exception { invokeRsService("http://localhost:9000/customerservice/customers?id=123", - "{\"Customer\":{\"id\":123,\"name\":\"John\"}}"); - + "{\"Customer\":{\"id\":123,\"name\":\"John\"}}"); } @Test @@ -126,7 +124,6 @@ public void testGetConsumerAfterReStartCamelContext() throws Exception { invokeRsService("http://localhost:9000/customerservice/orders/223/products/323", "{\"Product\":{\"description\":\"product 323\",\"id\":323}}"); - } @Test diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsConsumerTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsConsumerTest.java index bce79c1cf948b..665ca3989e9a8 100644 --- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsConsumerTest.java +++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsConsumerTest.java @@ -19,6 +19,7 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.net.URL; +import java.net.URLConnection; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Response; diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/spring/SpringBusFactoryBeanTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/spring/SpringBusFactoryBeanTest.java index b0f9fcab3ecc1..31d8adf1203e6 100644 --- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/spring/SpringBusFactoryBeanTest.java +++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/spring/SpringBusFactoryBeanTest.java @@ -33,7 +33,7 @@ protected String[] getApplicationContextFiles() { public void getTheBusInstance() { Bus bus = (Bus)ctx.getBean("cxfBus"); assertNotNull("The bus should not be null", bus); - if (!Version.getCurrentVersion().startsWith("2.4")) { + if (Version.getCurrentVersion().startsWith("2.3")) { // This test just for the CXF 2.3.x, we skip this test with CXF 2.4.x CamelTransportFactory factory = bus.getExtension(CamelTransportFactory.class); assertNull("You should not find the factory here", factory); @@ -41,6 +41,7 @@ public void getTheBusInstance() { bus = (Bus)ctx.getBean("myBus"); assertNotNull("The bus should not be null", bus); + CamelTransportFactory factory = bus.getExtension(CamelTransportFactory.class); assertNotNull("You should find the factory here", factory); SoapBindingFactory soapBindingFactory = bus.getExtension(SoapBindingFactory.class);
4b946ebdf22a25d8f01c87845ae177d7343ae616
hadoop
MAPREDUCE-2784. [Gridmix] Bug fixes in- ExecutionSummarizer and ResourceUsageMatcher. (amarrk)--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1237579 13f79535-47bb-0310-9956-ffa450edef68-
c
https://github.com/apache/hadoop
diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index d95872f18fceb..64176dc7b116b 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -176,6 +176,9 @@ Release 0.23.1 - Unreleased on the webapps. (Bhallamudi Venkata Siva Kamesh and Jason Lowe via vinodkv) BUG FIXES + MAPREDUCE-2784. [Gridmix] Bug fixes in ExecutionSummarizer and + ResourceUsageMatcher. (amarrk) + MAPREDUCE-3194. "mapred mradmin" command is broken in mrv2 (Jason Lowe via bobby) diff --git a/hadoop-mapreduce-project/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/ExecutionSummarizer.java b/hadoop-mapreduce-project/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/ExecutionSummarizer.java index f59bf9e66c72c..fc362c5643a31 100644 --- a/hadoop-mapreduce-project/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/ExecutionSummarizer.java +++ b/hadoop-mapreduce-project/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/ExecutionSummarizer.java @@ -149,10 +149,15 @@ void finalize(JobFactory factory, String inputPath, long dataSize, throws IOException { numJobsInInputTrace = factory.numJobsInTrace; endTime = System.currentTimeMillis(); - Path inputTracePath = new Path(inputPath); - FileSystem fs = inputTracePath.getFileSystem(conf); - inputTraceLocation = fs.makeQualified(inputTracePath).toString(); - inputTraceSignature = getTraceSignature(inputTraceLocation); + if ("-".equals(inputPath)) { + inputTraceLocation = Summarizer.NA; + inputTraceSignature = Summarizer.NA; + } else { + Path inputTracePath = new Path(inputPath); + FileSystem fs = inputTracePath.getFileSystem(conf); + inputTraceLocation = fs.makeQualified(inputTracePath).toString(); + inputTraceSignature = getTraceSignature(inputPath); + } jobSubmissionPolicy = Gridmix.getJobSubmissionPolicy(conf).name(); resolver = userResolver.getClass().getName(); if (dataSize > 0) { diff --git a/hadoop-mapreduce-project/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/Gridmix.java b/hadoop-mapreduce-project/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/Gridmix.java index 902351cd7bf8e..b4a0e0b5e2dab 100644 --- a/hadoop-mapreduce-project/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/Gridmix.java +++ b/hadoop-mapreduce-project/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/Gridmix.java @@ -314,9 +314,13 @@ public Integer run() throws Exception { } }); - // print the run summary - System.out.print("\n\n"); - System.out.println(summarizer.toString()); + // print the gridmix summary if the run was successful + if (val == 0) { + // print the run summary + System.out.print("\n\n"); + System.out.println(summarizer.toString()); + } + return val; } diff --git a/hadoop-mapreduce-project/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/ResourceUsageMatcher.java b/hadoop-mapreduce-project/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/ResourceUsageMatcher.java index 10d6e733f1c00..917cd09372a68 100644 --- a/hadoop-mapreduce-project/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/ResourceUsageMatcher.java +++ b/hadoop-mapreduce-project/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/ResourceUsageMatcher.java @@ -52,15 +52,23 @@ public class ResourceUsageMatcher { @SuppressWarnings("unchecked") public void configure(Configuration conf, ResourceCalculatorPlugin monitor, ResourceUsageMetrics metrics, Progressive progress) { - Class[] plugins = - conf.getClasses(RESOURCE_USAGE_EMULATION_PLUGINS, - ResourceUsageEmulatorPlugin.class); + Class[] plugins = conf.getClasses(RESOURCE_USAGE_EMULATION_PLUGINS); if (plugins == null) { System.out.println("No resource usage emulator plugins configured."); } else { - for (Class<? extends ResourceUsageEmulatorPlugin> plugin : plugins) { - if (plugin != null) { - emulationPlugins.add(ReflectionUtils.newInstance(plugin, conf)); + for (Class clazz : plugins) { + if (clazz != null) { + if (ResourceUsageEmulatorPlugin.class.isAssignableFrom(clazz)) { + ResourceUsageEmulatorPlugin plugin = + (ResourceUsageEmulatorPlugin) ReflectionUtils.newInstance(clazz, + conf); + emulationPlugins.add(plugin); + } else { + throw new RuntimeException("Misconfigured resource usage plugins. " + + "Class " + clazz.getClass().getName() + " is not a resource " + + "usage plugin as it does not extend " + + ResourceUsageEmulatorPlugin.class.getName()); + } } } } diff --git a/hadoop-mapreduce-project/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestGridmixSummary.java b/hadoop-mapreduce-project/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestGridmixSummary.java index 452d2f5604201..64af603bec538 100644 --- a/hadoop-mapreduce-project/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestGridmixSummary.java +++ b/hadoop-mapreduce-project/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestGridmixSummary.java @@ -159,7 +159,7 @@ public void update(Object item) { @Override protected Thread createReaderThread() { - return null; + return new Thread(); } } @@ -243,7 +243,7 @@ public void testExecutionSummarizer() throws IOException { tid, es.getInputTraceSignature()); // test trace location Path qPath = fs.makeQualified(testTraceFile); - assertEquals("Mismatch in trace signature", + assertEquals("Mismatch in trace filename", qPath.toString(), es.getInputTraceLocation()); // test expected data size assertEquals("Mismatch in expected data size", @@ -275,7 +275,7 @@ public void testExecutionSummarizer() throws IOException { es.finalize(factory, testTraceFile.toString(), 0L, resolver, dataStats, conf); // test missing expected data size - assertEquals("Mismatch in trace signature", + assertEquals("Mismatch in trace data size", Summarizer.NA, es.getExpectedDataSize()); assertFalse("Mismatch in trace signature", tid.equals(es.getInputTraceSignature())); @@ -295,6 +295,12 @@ public void testExecutionSummarizer() throws IOException { assertEquals("Mismatch in trace signature", tid, es.getInputTraceSignature()); + // finalize trace identifier '-' input + es.finalize(factory, "-", 0L, resolver, dataStats, conf); + assertEquals("Mismatch in trace signature", + Summarizer.NA, es.getInputTraceSignature()); + assertEquals("Mismatch in trace file location", + Summarizer.NA, es.getInputTraceLocation()); } // test the ExecutionSummarizer
64740a5c0769d65dddc4af0dee65179b610270b0
hbase
HBASE-10556 Possible data loss due to non-handled- DroppedSnapshotException for user-triggered flush from client/shell--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1571501 13f79535-47bb-0310-9956-ffa450edef68-
c
https://github.com/apache/hbase
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 12b810e51e22..cb31a9694d02 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -94,6 +94,7 @@ import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; +import org.apache.hadoop.hbase.DroppedSnapshotException; import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException; import org.apache.hadoop.hbase.exceptions.OperationConflictException; import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException; @@ -3782,6 +3783,13 @@ public FlushRegionResponse flushRegion(final RpcController controller, } builder.setLastFlushTime(region.getLastFlushTime()); return builder.build(); + } catch (DroppedSnapshotException ex) { + // Cache flush can fail in a few places. If it fails in a critical + // section, we get a DroppedSnapshotException and a replay of hlog + // is required. Currently the only way to do this is a restart of + // the server. + abort("Replay of HLog required. Forcing server shutdown", ex); + throw new ServiceException(ex); } catch (IOException ie) { throw new ServiceException(ie); }
b5ba6979b7bfd216166f040ec1d66c425307516c
hadoop
YARN-3583. Support of NodeLabel object instead of- plain String in YarnClient side. (Sunil G via wangda)--(cherry picked from commit 563eb1ad2ae848a23bbbf32ebfaf107e8fa14e87)-(cherry picked from commit b0d22b0c606fad6b4ab5443c0aed07c829b46726)-
p
https://github.com/apache/hadoop
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java index 2b7cd5fd6f2d1..90f6876c17a46 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java @@ -444,19 +444,19 @@ public ReservationDeleteResponse deleteReservation( } @Override - public Map<NodeId, Set<String>> getNodeToLabels() throws YarnException, + public Map<NodeId, Set<NodeLabel>> getNodeToLabels() throws YarnException, IOException { return client.getNodeToLabels(); } @Override - public Map<String, Set<NodeId>> getLabelsToNodes() throws YarnException, + public Map<NodeLabel, Set<NodeId>> getLabelsToNodes() throws YarnException, IOException { return client.getLabelsToNodes(); } @Override - public Map<String, Set<NodeId>> getLabelsToNodes(Set<String> labels) + public Map<NodeLabel, Set<NodeId>> getLabelsToNodes(Set<String> labels) throws YarnException, IOException { return client.getLabelsToNodes(labels); } diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index 67c43fbc18934..b3ccc3580ea36 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -205,6 +205,9 @@ Release 2.8.0 - UNRELEASED YARN-3565. NodeHeartbeatRequest/RegisterNodeManagerRequest should use NodeLabel object instead of String. (Naganarasimha G R via wangda) + YARN-3583. Support of NodeLabel object instead of plain String + in YarnClient side. (Sunil G via wangda) + OPTIMIZATIONS YARN-3339. TestDockerContainerExecutor should pull a single image and not diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/GetLabelsToNodesResponse.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/GetLabelsToNodesResponse.java index f105359110418..da2be28830e7d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/GetLabelsToNodesResponse.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/GetLabelsToNodesResponse.java @@ -24,11 +24,12 @@ import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.api.records.NodeLabel; import org.apache.hadoop.yarn.util.Records; public abstract class GetLabelsToNodesResponse { public static GetLabelsToNodesResponse newInstance( - Map<String, Set<NodeId>> map) { + Map<NodeLabel, Set<NodeId>> map) { GetLabelsToNodesResponse response = Records.newRecord(GetLabelsToNodesResponse.class); response.setLabelsToNodes(map); @@ -37,9 +38,9 @@ public static GetLabelsToNodesResponse newInstance( @Public @Evolving - public abstract void setLabelsToNodes(Map<String, Set<NodeId>> map); + public abstract void setLabelsToNodes(Map<NodeLabel, Set<NodeId>> map); @Public @Evolving - public abstract Map<String, Set<NodeId>> getLabelsToNodes(); + public abstract Map<NodeLabel, Set<NodeId>> getLabelsToNodes(); } \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/GetNodesToLabelsResponse.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/GetNodesToLabelsResponse.java index bcd5421e7ebc3..432485c358aad 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/GetNodesToLabelsResponse.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/GetNodesToLabelsResponse.java @@ -24,11 +24,12 @@ import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.api.records.NodeLabel; import org.apache.hadoop.yarn.util.Records; public abstract class GetNodesToLabelsResponse { public static GetNodesToLabelsResponse newInstance( - Map<NodeId, Set<String>> map) { + Map<NodeId, Set<NodeLabel>> map) { GetNodesToLabelsResponse response = Records.newRecord(GetNodesToLabelsResponse.class); response.setNodeToLabels(map); @@ -37,9 +38,9 @@ public static GetNodesToLabelsResponse newInstance( @Public @Evolving - public abstract void setNodeToLabels(Map<NodeId, Set<String>> map); + public abstract void setNodeToLabels(Map<NodeId, Set<NodeLabel>> map); @Public @Evolving - public abstract Map<NodeId, Set<String>> getNodeToLabels(); + public abstract Map<NodeId, Set<NodeLabel>> getNodeToLabels(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/server/yarn_server_resourcemanager_service_protos.proto b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/server/yarn_server_resourcemanager_service_protos.proto index d6d8713a247d2..e20b4aea447a3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/server/yarn_server_resourcemanager_service_protos.proto +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/server/yarn_server_resourcemanager_service_protos.proto @@ -91,7 +91,7 @@ message RemoveFromClusterNodeLabelsResponseProto { } message ReplaceLabelsOnNodeRequestProto { - repeated NodeIdToLabelsProto nodeToLabels = 1; + repeated NodeIdToLabelsNameProto nodeToLabels = 1; } message ReplaceLabelsOnNodeResponseProto { @@ -107,6 +107,11 @@ message CheckForDecommissioningNodesResponseProto { repeated NodeIdProto decommissioningNodes = 1; } +message NodeIdToLabelsNameProto { + optional NodeIdProto nodeId = 1; + repeated string nodeLabels = 2; +} + enum DecommissionTypeProto { NORMAL = 1; GRACEFUL = 2; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto index 3c4aa524c3d9f..b9969b0ef41e6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto @@ -248,13 +248,13 @@ message NodeReportProto { repeated string node_labels = 10; } -message NodeIdToLabelsProto { +message NodeIdToLabelsInfoProto { optional NodeIdProto nodeId = 1; - repeated string nodeLabels = 2; + repeated NodeLabelProto nodeLabels = 2; } message LabelsToNodeIdsProto { - optional string nodeLabels = 1; + optional NodeLabelProto nodeLabels = 1; repeated NodeIdProto nodeId = 2; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_service_protos.proto b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_service_protos.proto index 410b66382c1d5..098785aac5e5e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_service_protos.proto +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_service_protos.proto @@ -198,7 +198,7 @@ message GetNodesToLabelsRequestProto { } message GetNodesToLabelsResponseProto { - repeated NodeIdToLabelsProto nodeToLabels = 1; + repeated NodeIdToLabelsInfoProto nodeToLabels = 1; } message GetLabelsToNodesRequestProto { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/YarnClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/YarnClient.java index 5ce626c46ab2d..ff03c7d5af763 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/YarnClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/YarnClient.java @@ -619,7 +619,7 @@ public abstract ReservationDeleteResponse deleteReservation( */ @Public @Unstable - public abstract Map<NodeId, Set<String>> getNodeToLabels() + public abstract Map<NodeId, Set<NodeLabel>> getNodeToLabels() throws YarnException, IOException; /** @@ -634,7 +634,7 @@ public abstract Map<NodeId, Set<String>> getNodeToLabels() */ @Public @Unstable - public abstract Map<String, Set<NodeId>> getLabelsToNodes() + public abstract Map<NodeLabel, Set<NodeId>> getLabelsToNodes() throws YarnException, IOException; /** @@ -650,8 +650,8 @@ public abstract Map<String, Set<NodeId>> getLabelsToNodes() */ @Public @Unstable - public abstract Map<String, Set<NodeId>> getLabelsToNodes(Set<String> labels) - throws YarnException, IOException; + public abstract Map<NodeLabel, Set<NodeId>> getLabelsToNodes( + Set<String> labels) throws YarnException, IOException; /** * <p> diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java index 42dd5cdb62328..be4c8c4e1bcde 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java @@ -795,21 +795,21 @@ public ReservationDeleteResponse deleteReservation( } @Override - public Map<NodeId, Set<String>> getNodeToLabels() throws YarnException, + public Map<NodeId, Set<NodeLabel>> getNodeToLabels() throws YarnException, IOException { return rmClient.getNodeToLabels(GetNodesToLabelsRequest.newInstance()) .getNodeToLabels(); } @Override - public Map<String, Set<NodeId>> getLabelsToNodes() throws YarnException, + public Map<NodeLabel, Set<NodeId>> getLabelsToNodes() throws YarnException, IOException { return rmClient.getLabelsToNodes(GetLabelsToNodesRequest.newInstance()) .getLabelsToNodes(); } @Override - public Map<String, Set<NodeId>> getLabelsToNodes(Set<String> labels) + public Map<NodeLabel, Set<NodeId>> getLabelsToNodes(Set<String> labels) throws YarnException, IOException { return rmClient.getLabelsToNodes( GetLabelsToNodesRequest.newInstance(labels)).getLabelsToNodes(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestYarnClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestYarnClient.java index 10b9bbbbae6f9..511fa4acb39be 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestYarnClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestYarnClient.java @@ -67,6 +67,8 @@ import org.apache.hadoop.yarn.api.protocolrecords.GetContainersResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesResponse; +import org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsRequest; +import org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsResponse; import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse; import org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest; @@ -87,6 +89,7 @@ import org.apache.hadoop.yarn.api.records.ContainerState; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.api.records.NodeLabel; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.ReservationDefinition; import org.apache.hadoop.yarn.api.records.ReservationId; @@ -458,9 +461,9 @@ public void testGetLabelsToNodes() throws YarnException, IOException { client.start(); // Get labels to nodes mapping - Map<String, Set<NodeId>> expectedLabelsToNodes = + Map<NodeLabel, Set<NodeId>> expectedLabelsToNodes = ((MockYarnClient)client).getLabelsToNodesMap(); - Map<String, Set<NodeId>> labelsToNodes = client.getLabelsToNodes(); + Map<NodeLabel, Set<NodeId>> labelsToNodes = client.getLabelsToNodes(); Assert.assertEquals(labelsToNodes, expectedLabelsToNodes); Assert.assertEquals(labelsToNodes.size(), 3); @@ -476,7 +479,32 @@ public void testGetLabelsToNodes() throws YarnException, IOException { client.close(); } + @Test (timeout = 10000) + public void testGetNodesToLabels() throws YarnException, IOException { + Configuration conf = new Configuration(); + final YarnClient client = new MockYarnClient(); + client.init(conf); + client.start(); + + // Get labels to nodes mapping + Map<NodeId, Set<NodeLabel>> expectedNodesToLabels = ((MockYarnClient) client) + .getNodeToLabelsMap(); + Map<NodeId, Set<NodeLabel>> nodesToLabels = client.getNodeToLabels(); + Assert.assertEquals(nodesToLabels, expectedNodesToLabels); + Assert.assertEquals(nodesToLabels.size(), 1); + + // Verify exclusivity + Set<NodeLabel> labels = nodesToLabels.get(NodeId.newInstance("host", 0)); + for (NodeLabel label : labels) { + Assert.assertFalse(label.isExclusive()); + } + + client.stop(); + client.close(); + } + private static class MockYarnClient extends YarnClientImpl { + private ApplicationReport mockReport; private List<ApplicationReport> reports; private HashMap<ApplicationId, List<ApplicationAttemptReport>> attempts = @@ -498,6 +526,8 @@ private static class MockYarnClient extends YarnClientImpl { mock(GetContainerReportResponse.class); GetLabelsToNodesResponse mockLabelsToNodesResponse = mock(GetLabelsToNodesResponse.class); + GetNodesToLabelsResponse mockNodeToLabelsResponse = + mock(GetNodesToLabelsResponse.class); public MockYarnClient() { super(); @@ -537,6 +567,9 @@ public void start() { when(rmClient.getLabelsToNodes(any(GetLabelsToNodesRequest.class))) .thenReturn(mockLabelsToNodesResponse); + when(rmClient.getNodeToLabels(any(GetNodesToLabelsRequest.class))) + .thenReturn(mockNodeToLabelsResponse); + historyClient = mock(AHSClient.class); } catch (YarnException e) { @@ -704,7 +737,7 @@ private List<ApplicationReport> getApplicationReports( } @Override - public Map<String, Set<NodeId>> getLabelsToNodes() + public Map<NodeLabel, Set<NodeId>> getLabelsToNodes() throws YarnException, IOException { when(mockLabelsToNodesResponse.getLabelsToNodes()).thenReturn( getLabelsToNodesMap()); @@ -712,35 +745,52 @@ public Map<String, Set<NodeId>> getLabelsToNodes() } @Override - public Map<String, Set<NodeId>> getLabelsToNodes(Set<String> labels) + public Map<NodeLabel, Set<NodeId>> getLabelsToNodes(Set<String> labels) throws YarnException, IOException { when(mockLabelsToNodesResponse.getLabelsToNodes()).thenReturn( getLabelsToNodesMap(labels)); return super.getLabelsToNodes(labels); } - public Map<String, Set<NodeId>> getLabelsToNodesMap() { - Map<String, Set<NodeId>> map = new HashMap<String, Set<NodeId>>(); + public Map<NodeLabel, Set<NodeId>> getLabelsToNodesMap() { + Map<NodeLabel, Set<NodeId>> map = new HashMap<NodeLabel, Set<NodeId>>(); Set<NodeId> setNodeIds = new HashSet<NodeId>(Arrays.asList( NodeId.newInstance("host1", 0), NodeId.newInstance("host2", 0))); - map.put("x", setNodeIds); - map.put("y", setNodeIds); - map.put("z", setNodeIds); + map.put(NodeLabel.newInstance("x"), setNodeIds); + map.put(NodeLabel.newInstance("y"), setNodeIds); + map.put(NodeLabel.newInstance("z"), setNodeIds); return map; } - public Map<String, Set<NodeId>> getLabelsToNodesMap(Set<String> labels) { - Map<String, Set<NodeId>> map = new HashMap<String, Set<NodeId>>(); + public Map<NodeLabel, Set<NodeId>> getLabelsToNodesMap(Set<String> labels) { + Map<NodeLabel, Set<NodeId>> map = new HashMap<NodeLabel, Set<NodeId>>(); Set<NodeId> setNodeIds = new HashSet<NodeId>(Arrays.asList( NodeId.newInstance("host1", 0), NodeId.newInstance("host2", 0))); for(String label : labels) { - map.put(label, setNodeIds); + map.put(NodeLabel.newInstance(label), setNodeIds); } return map; } + @Override + public Map<NodeId, Set<NodeLabel>> getNodeToLabels() throws YarnException, + IOException { + when(mockNodeToLabelsResponse.getNodeToLabels()).thenReturn( + getNodeToLabelsMap()); + return super.getNodeToLabels(); + } + + public Map<NodeId, Set<NodeLabel>> getNodeToLabelsMap() { + Map<NodeId, Set<NodeLabel>> map = new HashMap<NodeId, Set<NodeLabel>>(); + Set<NodeLabel> setNodeLabels = new HashSet<NodeLabel>(Arrays.asList( + NodeLabel.newInstance("x", false), + NodeLabel.newInstance("y", false))); + map.put(NodeId.newInstance("host", 0), setNodeLabels); + return map; + } + @Override public List<ApplicationAttemptReport> getApplicationAttempts( ApplicationId appId) throws YarnException, IOException { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetLabelsToNodesResponsePBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetLabelsToNodesResponsePBImpl.java index e1979973320c4..418fcbd4cc6f8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetLabelsToNodesResponsePBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetLabelsToNodesResponsePBImpl.java @@ -29,11 +29,13 @@ import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.api.records.NodeLabel; import org.apache.hadoop.yarn.api.records.impl.pb.NodeIdPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.NodeLabelPBImpl; import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto; import org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesResponse; - import org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto; +import org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProtoOrBuilder; @@ -44,7 +46,7 @@ public class GetLabelsToNodesResponsePBImpl extends GetLabelsToNodesResponseProto.Builder builder = null; boolean viaProto = false; - private Map<String, Set<NodeId>> labelsToNodes; + private Map<NodeLabel, Set<NodeId>> labelsToNodes; public GetLabelsToNodesResponsePBImpl() { this.builder = GetLabelsToNodesResponseProto.newBuilder(); @@ -61,7 +63,7 @@ private void initLabelsToNodes() { } GetLabelsToNodesResponseProtoOrBuilder p = viaProto ? proto : builder; List<LabelsToNodeIdsProto> list = p.getLabelsToNodesList(); - this.labelsToNodes = new HashMap<String, Set<NodeId>>(); + this.labelsToNodes = new HashMap<NodeLabel, Set<NodeId>>(); for (LabelsToNodeIdsProto c : list) { Set<NodeId> setNodes = new HashSet<NodeId>(); @@ -69,8 +71,9 @@ private void initLabelsToNodes() { NodeId node = new NodeIdPBImpl(n); setNodes.add(node); } - if(!setNodes.isEmpty()) { - this.labelsToNodes.put(c.getNodeLabels(), setNodes); + if (!setNodes.isEmpty()) { + this.labelsToNodes + .put(new NodeLabelPBImpl(c.getNodeLabels()), setNodes); } } } @@ -94,7 +97,7 @@ private void addLabelsToNodesToProto() { public Iterator<LabelsToNodeIdsProto> iterator() { return new Iterator<LabelsToNodeIdsProto>() { - Iterator<Entry<String, Set<NodeId>>> iter = + Iterator<Entry<NodeLabel, Set<NodeId>>> iter = labelsToNodes.entrySet().iterator(); @Override @@ -104,13 +107,14 @@ public void remove() { @Override public LabelsToNodeIdsProto next() { - Entry<String, Set<NodeId>> now = iter.next(); + Entry<NodeLabel, Set<NodeId>> now = iter.next(); Set<NodeIdProto> nodeProtoSet = new HashSet<NodeIdProto>(); for(NodeId n : now.getValue()) { nodeProtoSet.add(convertToProtoFormat(n)); } return LabelsToNodeIdsProto.newBuilder() - .setNodeLabels(now.getKey()).addAllNodeId(nodeProtoSet) + .setNodeLabels(convertToProtoFormat(now.getKey())) + .addAllNodeId(nodeProtoSet) .build(); } @@ -149,6 +153,10 @@ private NodeIdProto convertToProtoFormat(NodeId t) { return ((NodeIdPBImpl)t).getProto(); } + private NodeLabelProto convertToProtoFormat(NodeLabel l) { + return ((NodeLabelPBImpl)l).getProto(); + } + @Override public int hashCode() { assert false : "hashCode not designed"; @@ -168,7 +176,7 @@ public boolean equals(Object other) { @Override @Public @Evolving - public void setLabelsToNodes(Map<String, Set<NodeId>> map) { + public void setLabelsToNodes(Map<NodeLabel, Set<NodeId>> map) { initLabelsToNodes(); labelsToNodes.clear(); labelsToNodes.putAll(map); @@ -177,7 +185,7 @@ public void setLabelsToNodes(Map<String, Set<NodeId>> map) { @Override @Public @Evolving - public Map<String, Set<NodeId>> getLabelsToNodes() { + public Map<NodeLabel, Set<NodeId>> getLabelsToNodes() { initLabelsToNodes(); return this.labelsToNodes; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetNodesToLabelsResponsePBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetNodesToLabelsResponsePBImpl.java index 340483008032c..52be73f6a4b9e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetNodesToLabelsResponsePBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetNodesToLabelsResponsePBImpl.java @@ -19,6 +19,7 @@ package org.apache.hadoop.yarn.api.protocolrecords.impl.pb; import java.util.HashMap; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -26,12 +27,13 @@ import java.util.Set; import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.api.records.NodeLabel; import org.apache.hadoop.yarn.api.records.impl.pb.NodeIdPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.NodeLabelPBImpl; import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto; import org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsResponse; - -import com.google.common.collect.Sets; -import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto; +import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsInfoProto; +import org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProtoOrBuilder; @@ -42,8 +44,8 @@ public class GetNodesToLabelsResponsePBImpl extends GetNodesToLabelsResponseProto.Builder builder = null; boolean viaProto = false; - private Map<NodeId, Set<String>> nodeToLabels; - + private Map<NodeId, Set<NodeLabel>> nodeToLabels; + public GetNodesToLabelsResponsePBImpl() { this.builder = GetNodesToLabelsResponseProto.newBuilder(); } @@ -58,12 +60,15 @@ private void initNodeToLabels() { return; } GetNodesToLabelsResponseProtoOrBuilder p = viaProto ? proto : builder; - List<NodeIdToLabelsProto> list = p.getNodeToLabelsList(); - this.nodeToLabels = new HashMap<NodeId, Set<String>>(); - - for (NodeIdToLabelsProto c : list) { - this.nodeToLabels.put(new NodeIdPBImpl(c.getNodeId()), - Sets.newHashSet(c.getNodeLabelsList())); + List<NodeIdToLabelsInfoProto> list = p.getNodeToLabelsList(); + this.nodeToLabels = new HashMap<NodeId, Set<NodeLabel>>(); + + for (NodeIdToLabelsInfoProto c : list) { + Set<NodeLabel> labels = new HashSet<NodeLabel>(); + for (NodeLabelProto l : c.getNodeLabelsList()) { + labels.add(new NodeLabelPBImpl(l)); + } + this.nodeToLabels.put(new NodeIdPBImpl(c.getNodeId()), labels); } } @@ -80,13 +85,13 @@ private void addNodeToLabelsToProto() { if (nodeToLabels == null) { return; } - Iterable<NodeIdToLabelsProto> iterable = - new Iterable<NodeIdToLabelsProto>() { + Iterable<NodeIdToLabelsInfoProto> iterable = + new Iterable<NodeIdToLabelsInfoProto>() { @Override - public Iterator<NodeIdToLabelsProto> iterator() { - return new Iterator<NodeIdToLabelsProto>() { + public Iterator<NodeIdToLabelsInfoProto> iterator() { + return new Iterator<NodeIdToLabelsInfoProto>() { - Iterator<Entry<NodeId, Set<String>>> iter = nodeToLabels + Iterator<Entry<NodeId, Set<NodeLabel>>> iter = nodeToLabels .entrySet().iterator(); @Override @@ -95,11 +100,16 @@ public void remove() { } @Override - public NodeIdToLabelsProto next() { - Entry<NodeId, Set<String>> now = iter.next(); - return NodeIdToLabelsProto.newBuilder() + public NodeIdToLabelsInfoProto next() { + Entry<NodeId, Set<NodeLabel>> now = iter.next(); + Set<NodeLabelProto> labelProtoList = + new HashSet<NodeLabelProto>(); + for (NodeLabel l : now.getValue()) { + labelProtoList.add(convertToProtoFormat(l)); + } + return NodeIdToLabelsInfoProto.newBuilder() .setNodeId(convertToProtoFormat(now.getKey())) - .addAllNodeLabels(now.getValue()).build(); + .addAllNodeLabels(labelProtoList).build(); } @Override @@ -134,13 +144,13 @@ public GetNodesToLabelsResponseProto getProto() { } @Override - public Map<NodeId, Set<String>> getNodeToLabels() { + public Map<NodeId, Set<NodeLabel>> getNodeToLabels() { initNodeToLabels(); return this.nodeToLabels; } @Override - public void setNodeToLabels(Map<NodeId, Set<String>> map) { + public void setNodeToLabels(Map<NodeId, Set<NodeLabel>> map) { initNodeToLabels(); nodeToLabels.clear(); nodeToLabels.putAll(map); @@ -150,6 +160,10 @@ private NodeIdProto convertToProtoFormat(NodeId t) { return ((NodeIdPBImpl)t).getProto(); } + private NodeLabelProto convertToProtoFormat(NodeLabel t) { + return ((NodeLabelPBImpl)t).getProto(); + } + @Override public int hashCode() { assert false : "hashCode not designed"; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReplaceLabelsOnNodeRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReplaceLabelsOnNodeRequestPBImpl.java index e296aaf13b8b0..22e561cd94a7a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReplaceLabelsOnNodeRequestPBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReplaceLabelsOnNodeRequestPBImpl.java @@ -28,7 +28,7 @@ import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.impl.pb.NodeIdPBImpl; import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto; -import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsNameProto; import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto; import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProtoOrBuilder; import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeRequest; @@ -58,10 +58,10 @@ private void initNodeToLabels() { return; } ReplaceLabelsOnNodeRequestProtoOrBuilder p = viaProto ? proto : builder; - List<NodeIdToLabelsProto> list = p.getNodeToLabelsList(); + List<NodeIdToLabelsNameProto> list = p.getNodeToLabelsList(); this.nodeIdToLabels = new HashMap<NodeId, Set<String>>(); - for (NodeIdToLabelsProto c : list) { + for (NodeIdToLabelsNameProto c : list) { this.nodeIdToLabels.put(new NodeIdPBImpl(c.getNodeId()), Sets.newHashSet(c.getNodeLabelsList())); } @@ -80,11 +80,11 @@ private void addNodeToLabelsToProto() { if (nodeIdToLabels == null) { return; } - Iterable<NodeIdToLabelsProto> iterable = - new Iterable<NodeIdToLabelsProto>() { + Iterable<NodeIdToLabelsNameProto> iterable = + new Iterable<NodeIdToLabelsNameProto>() { @Override - public Iterator<NodeIdToLabelsProto> iterator() { - return new Iterator<NodeIdToLabelsProto>() { + public Iterator<NodeIdToLabelsNameProto> iterator() { + return new Iterator<NodeIdToLabelsNameProto>() { Iterator<Entry<NodeId, Set<String>>> iter = nodeIdToLabels .entrySet().iterator(); @@ -95,9 +95,9 @@ public void remove() { } @Override - public NodeIdToLabelsProto next() { + public NodeIdToLabelsNameProto next() { Entry<NodeId, Set<String>> now = iter.next(); - return NodeIdToLabelsProto.newBuilder() + return NodeIdToLabelsNameProto.newBuilder() .setNodeId(convertToProtoFormat(now.getKey())).clearNodeLabels() .addAllNodeLabels(now.getValue()).build(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java index b927b47fb70f7..0ef43888dabf2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java @@ -1226,7 +1226,7 @@ public GetNodesToLabelsResponse getNodeToLabels( GetNodesToLabelsRequest request) throws YarnException, IOException { RMNodeLabelsManager labelsMgr = rmContext.getNodeLabelManager(); GetNodesToLabelsResponse response = - GetNodesToLabelsResponse.newInstance(labelsMgr.getNodeLabels()); + GetNodesToLabelsResponse.newInstance(labelsMgr.getNodeLabelsInfo()); return response; } @@ -1236,10 +1236,10 @@ public GetLabelsToNodesResponse getLabelsToNodes( RMNodeLabelsManager labelsMgr = rmContext.getNodeLabelManager(); if (request.getNodeLabels() == null || request.getNodeLabels().isEmpty()) { return GetLabelsToNodesResponse.newInstance( - labelsMgr.getLabelsToNodes()); + labelsMgr.getLabelsInfoToNodes()); } else { return GetLabelsToNodesResponse.newInstance( - labelsMgr.getLabelsToNodes(request.getNodeLabels())); + labelsMgr.getLabelsInfoToNodes(request.getNodeLabels())); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java index a39f94f07a243..20343a51856cd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java @@ -1407,8 +1407,10 @@ protected ClientRMService createClientRMService() { }; }; rm.start(); + NodeLabel labelX = NodeLabel.newInstance("x", false); + NodeLabel labelY = NodeLabel.newInstance("y"); RMNodeLabelsManager labelsMgr = rm.getRMContext().getNodeLabelManager(); - labelsMgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x", "y")); + labelsMgr.addToCluserNodeLabels(ImmutableSet.of(labelX, labelY)); NodeId node1 = NodeId.newInstance("host1", 1234); NodeId node2 = NodeId.newInstance("host2", 1234); @@ -1422,25 +1424,37 @@ protected ClientRMService createClientRMService() { YarnRPC rpc = YarnRPC.create(conf); InetSocketAddress rmAddress = rm.getClientRMService().getBindAddress(); LOG.info("Connecting to ResourceManager at " + rmAddress); - ApplicationClientProtocol client = - (ApplicationClientProtocol) rpc.getProxy( - ApplicationClientProtocol.class, rmAddress, conf); + ApplicationClientProtocol client = (ApplicationClientProtocol) rpc + .getProxy(ApplicationClientProtocol.class, rmAddress, conf); // Get node labels collection - GetClusterNodeLabelsResponse response = - client.getClusterNodeLabels(GetClusterNodeLabelsRequest.newInstance()); + GetClusterNodeLabelsResponse response = client + .getClusterNodeLabels(GetClusterNodeLabelsRequest.newInstance()); Assert.assertTrue(response.getNodeLabels().containsAll( - Arrays.asList(NodeLabel.newInstance("x"), NodeLabel.newInstance("y")))); + Arrays.asList(labelX, labelY))); // Get node labels mapping - GetNodesToLabelsResponse response1 = - client.getNodeToLabels(GetNodesToLabelsRequest.newInstance()); - Map<NodeId, Set<String>> nodeToLabels = response1.getNodeToLabels(); + GetNodesToLabelsResponse response1 = client + .getNodeToLabels(GetNodesToLabelsRequest.newInstance()); + Map<NodeId, Set<NodeLabel>> nodeToLabels = response1.getNodeToLabels(); Assert.assertTrue(nodeToLabels.keySet().containsAll( Arrays.asList(node1, node2))); - Assert.assertTrue(nodeToLabels.get(node1).containsAll(Arrays.asList("x"))); - Assert.assertTrue(nodeToLabels.get(node2).containsAll(Arrays.asList("y"))); - + Assert.assertTrue(nodeToLabels.get(node1) + .containsAll(Arrays.asList(labelX))); + Assert.assertTrue(nodeToLabels.get(node2) + .containsAll(Arrays.asList(labelY))); + // Verify whether labelX's exclusivity is false + for (NodeLabel x : nodeToLabels.get(node1)) { + Assert.assertFalse(x.isExclusive()); + } + // Verify whether labelY's exclusivity is true + for (NodeLabel y : nodeToLabels.get(node2)) { + Assert.assertTrue(y.isExclusive()); + } + // Below label "x" is not present in the response as exclusivity is true + Assert.assertFalse(nodeToLabels.get(node1).containsAll( + Arrays.asList(NodeLabel.newInstance("x")))); + rpc.stopProxy(client, conf); rm.close(); } @@ -1456,8 +1470,12 @@ protected ClientRMService createClientRMService() { }; }; rm.start(); + + NodeLabel labelX = NodeLabel.newInstance("x", false); + NodeLabel labelY = NodeLabel.newInstance("y", false); + NodeLabel labelZ = NodeLabel.newInstance("z", false); RMNodeLabelsManager labelsMgr = rm.getRMContext().getNodeLabelManager(); - labelsMgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x", "y", "z")); + labelsMgr.addToCluserNodeLabels(ImmutableSet.of(labelX, labelY, labelZ)); NodeId node1A = NodeId.newInstance("host1", 1234); NodeId node1B = NodeId.newInstance("host1", 5678); @@ -1477,43 +1495,49 @@ protected ClientRMService createClientRMService() { YarnRPC rpc = YarnRPC.create(conf); InetSocketAddress rmAddress = rm.getClientRMService().getBindAddress(); LOG.info("Connecting to ResourceManager at " + rmAddress); - ApplicationClientProtocol client = - (ApplicationClientProtocol) rpc.getProxy( - ApplicationClientProtocol.class, rmAddress, conf); + ApplicationClientProtocol client = (ApplicationClientProtocol) rpc + .getProxy(ApplicationClientProtocol.class, rmAddress, conf); // Get node labels collection - GetClusterNodeLabelsResponse response = - client.getClusterNodeLabels(GetClusterNodeLabelsRequest.newInstance()); + GetClusterNodeLabelsResponse response = client + .getClusterNodeLabels(GetClusterNodeLabelsRequest.newInstance()); Assert.assertTrue(response.getNodeLabels().containsAll( - Arrays.asList(NodeLabel.newInstance("x"), NodeLabel.newInstance("y"), - NodeLabel.newInstance("z")))); + Arrays.asList(labelX, labelY, labelZ))); // Get labels to nodes mapping - GetLabelsToNodesResponse response1 = - client.getLabelsToNodes(GetLabelsToNodesRequest.newInstance()); - Map<String, Set<NodeId>> labelsToNodes = response1.getLabelsToNodes(); - Assert.assertTrue( - labelsToNodes.keySet().containsAll(Arrays.asList("x", "y", "z"))); - Assert.assertTrue( - labelsToNodes.get("x").containsAll(Arrays.asList(node1A))); - Assert.assertTrue( - labelsToNodes.get("y").containsAll(Arrays.asList(node2A, node3A))); - Assert.assertTrue( - labelsToNodes.get("z").containsAll(Arrays.asList(node1B, node3B))); + GetLabelsToNodesResponse response1 = client + .getLabelsToNodes(GetLabelsToNodesRequest.newInstance()); + Map<NodeLabel, Set<NodeId>> labelsToNodes = response1.getLabelsToNodes(); + // Verify whether all NodeLabel's exclusivity are false + for (Map.Entry<NodeLabel, Set<NodeId>> nltn : labelsToNodes.entrySet()) { + Assert.assertFalse(nltn.getKey().isExclusive()); + } + Assert.assertTrue(labelsToNodes.keySet().containsAll( + Arrays.asList(labelX, labelY, labelZ))); + Assert.assertTrue(labelsToNodes.get(labelX).containsAll( + Arrays.asList(node1A))); + Assert.assertTrue(labelsToNodes.get(labelY).containsAll( + Arrays.asList(node2A, node3A))); + Assert.assertTrue(labelsToNodes.get(labelZ).containsAll( + Arrays.asList(node1B, node3B))); // Get labels to nodes mapping for specific labels - Set<String> setlabels = - new HashSet<String>(Arrays.asList(new String[]{"x", "z"})); - GetLabelsToNodesResponse response2 = - client.getLabelsToNodes(GetLabelsToNodesRequest.newInstance(setlabels)); + Set<String> setlabels = new HashSet<String>(Arrays.asList(new String[]{"x", + "z"})); + GetLabelsToNodesResponse response2 = client + .getLabelsToNodes(GetLabelsToNodesRequest.newInstance(setlabels)); labelsToNodes = response2.getLabelsToNodes(); - Assert.assertTrue( - labelsToNodes.keySet().containsAll(Arrays.asList("x", "z"))); - Assert.assertTrue( - labelsToNodes.get("x").containsAll(Arrays.asList(node1A))); - Assert.assertTrue( - labelsToNodes.get("z").containsAll(Arrays.asList(node1B, node3B))); - Assert.assertEquals(labelsToNodes.get("y"), null); + // Verify whether all NodeLabel's exclusivity are false + for (Map.Entry<NodeLabel, Set<NodeId>> nltn : labelsToNodes.entrySet()) { + Assert.assertFalse(nltn.getKey().isExclusive()); + } + Assert.assertTrue(labelsToNodes.keySet().containsAll( + Arrays.asList(labelX, labelZ))); + Assert.assertTrue(labelsToNodes.get(labelX).containsAll( + Arrays.asList(node1A))); + Assert.assertTrue(labelsToNodes.get(labelZ).containsAll( + Arrays.asList(node1B, node3B))); + Assert.assertEquals(labelsToNodes.get(labelY), null); rpc.stopProxy(client, conf); rm.close();
f9c5c308e4f713daf8a6e57dbd9f9b3bf2e81d03
hadoop
YARN-1913. With Fair Scheduler, cluster can logjam- when all resources are consumed by AMs (Wei Yan via Sandy Ryza)--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1599401 13f79535-47bb-0310-9956-ffa450edef68-
c
https://github.com/apache/hadoop
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index 751b083319c2f..f8c4af37dcc1b 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -117,6 +117,9 @@ Release 2.5.0 - UNRELEASED YARN-1474. Make sechedulers services. (Tsuyoshi Ozawa via kasha) + YARN-1913. With Fair Scheduler, cluster can logjam when all resources are + consumed by AMs (Wei Yan via Sandy Ryza) + OPTIMIZATIONS BUG FIXES diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerApplicationAttempt.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerApplicationAttempt.java index fc7e04762d9e9..cce2e46cadd81 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerApplicationAttempt.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerApplicationAttempt.java @@ -32,6 +32,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport; +import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.NMToken; @@ -76,6 +77,8 @@ public class SchedulerApplicationAttempt { protected final Resource currentReservation = Resource.newInstance(0, 0); private Resource resourceLimit = Resource.newInstance(0, 0); protected Resource currentConsumption = Resource.newInstance(0, 0); + private Resource amResource; + private boolean unmanagedAM = true; protected List<RMContainer> newlyAllocatedContainers = new ArrayList<RMContainer>(); @@ -106,6 +109,19 @@ public SchedulerApplicationAttempt(ApplicationAttemptId applicationAttemptId, new AppSchedulingInfo(applicationAttemptId, user, queue, activeUsersManager); this.queue = queue; + + + if (rmContext != null && rmContext.getRMApps() != null && + rmContext.getRMApps() + .containsKey(applicationAttemptId.getApplicationId())) { + ApplicationSubmissionContext appSubmissionContext = + rmContext.getRMApps().get(applicationAttemptId.getApplicationId()) + .getApplicationSubmissionContext(); + if (appSubmissionContext != null) { + amResource = appSubmissionContext.getResource(); + unmanagedAM = appSubmissionContext.getUnmanagedAM(); + } + } } /** @@ -168,6 +184,14 @@ public String getQueueName() { return appSchedulingInfo.getQueueName(); } + public Resource getAMResource() { + return amResource; + } + + public boolean getUnmanagedAM() { + return unmanagedAM; + } + public synchronized RMContainer getRMContainer(ContainerId id) { return liveContainers.get(id); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationConfiguration.java index 0f9d906920472..237cad29c1914 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationConfiguration.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationConfiguration.java @@ -53,6 +53,10 @@ public class AllocationConfiguration { private final int userMaxAppsDefault; private final int queueMaxAppsDefault; + // Maximum resource share for each leaf queue that can be used to run AMs + final Map<String, Float> queueMaxAMShares; + private final float queueMaxAMShareDefault; + // ACL's for each queue. Only specifies non-default ACL's from configuration. private final Map<String, Map<QueueACL, AccessControlList>> queueAcls; @@ -84,8 +88,9 @@ public class AllocationConfiguration { public AllocationConfiguration(Map<String, Resource> minQueueResources, Map<String, Resource> maxQueueResources, Map<String, Integer> queueMaxApps, Map<String, Integer> userMaxApps, - Map<String, ResourceWeights> queueWeights, int userMaxAppsDefault, - int queueMaxAppsDefault, + Map<String, ResourceWeights> queueWeights, + Map<String, Float> queueMaxAMShares, int userMaxAppsDefault, + int queueMaxAppsDefault, float queueMaxAMShareDefault, Map<String, SchedulingPolicy> schedulingPolicies, SchedulingPolicy defaultSchedulingPolicy, Map<String, Long> minSharePreemptionTimeouts, @@ -97,9 +102,11 @@ public AllocationConfiguration(Map<String, Resource> minQueueResources, this.maxQueueResources = maxQueueResources; this.queueMaxApps = queueMaxApps; this.userMaxApps = userMaxApps; + this.queueMaxAMShares = queueMaxAMShares; this.queueWeights = queueWeights; this.userMaxAppsDefault = userMaxAppsDefault; this.queueMaxAppsDefault = queueMaxAppsDefault; + this.queueMaxAMShareDefault = queueMaxAMShareDefault; this.defaultSchedulingPolicy = defaultSchedulingPolicy; this.schedulingPolicies = schedulingPolicies; this.minSharePreemptionTimeouts = minSharePreemptionTimeouts; @@ -116,8 +123,10 @@ public AllocationConfiguration(Configuration conf) { queueWeights = new HashMap<String, ResourceWeights>(); queueMaxApps = new HashMap<String, Integer>(); userMaxApps = new HashMap<String, Integer>(); + queueMaxAMShares = new HashMap<String, Float>(); userMaxAppsDefault = Integer.MAX_VALUE; queueMaxAppsDefault = Integer.MAX_VALUE; + queueMaxAMShareDefault = 1.0f; queueAcls = new HashMap<String, Map<QueueACL, AccessControlList>>(); minSharePreemptionTimeouts = new HashMap<String, Long>(); defaultMinSharePreemptionTimeout = Long.MAX_VALUE; @@ -184,6 +193,11 @@ public int getQueueMaxApps(String queue) { return (maxApps == null) ? queueMaxAppsDefault : maxApps; } + public float getQueueMaxAMShare(String queue) { + Float maxAMShare = queueMaxAMShares.get(queue); + return (maxAMShare == null) ? queueMaxAMShareDefault : maxAMShare; + } + /** * Get the minimum resource allocation for the given queue. * @return the cap set on this queue, or 0 if not set. diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java index 6c3563089183e..3a962a8ce52a8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java @@ -209,6 +209,7 @@ public synchronized void reloadAllocations() throws IOException, Map<String, Resource> maxQueueResources = new HashMap<String, Resource>(); Map<String, Integer> queueMaxApps = new HashMap<String, Integer>(); Map<String, Integer> userMaxApps = new HashMap<String, Integer>(); + Map<String, Float> queueMaxAMShares = new HashMap<String, Float>(); Map<String, ResourceWeights> queueWeights = new HashMap<String, ResourceWeights>(); Map<String, SchedulingPolicy> queuePolicies = new HashMap<String, SchedulingPolicy>(); Map<String, Long> minSharePreemptionTimeouts = new HashMap<String, Long>(); @@ -216,6 +217,7 @@ public synchronized void reloadAllocations() throws IOException, new HashMap<String, Map<QueueACL, AccessControlList>>(); int userMaxAppsDefault = Integer.MAX_VALUE; int queueMaxAppsDefault = Integer.MAX_VALUE; + float queueMaxAMShareDefault = 1.0f; long fairSharePreemptionTimeout = Long.MAX_VALUE; long defaultMinSharePreemptionTimeout = Long.MAX_VALUE; SchedulingPolicy defaultSchedPolicy = SchedulingPolicy.DEFAULT_POLICY; @@ -282,6 +284,11 @@ public synchronized void reloadAllocations() throws IOException, String text = ((Text)element.getFirstChild()).getData().trim(); int val = Integer.parseInt(text); queueMaxAppsDefault = val; + } else if ("queueMaxAMShareDefault".equals(element.getTagName())) { + String text = ((Text)element.getFirstChild()).getData().trim(); + float val = Float.parseFloat(text); + val = Math.min(val, 1.0f); + queueMaxAMShareDefault = val; } else if ("defaultQueueSchedulingPolicy".equals(element.getTagName()) || "defaultQueueSchedulingMode".equals(element.getTagName())) { String text = ((Text)element.getFirstChild()).getData().trim(); @@ -306,8 +313,8 @@ public synchronized void reloadAllocations() throws IOException, parent = null; } loadQueue(parent, element, minQueueResources, maxQueueResources, - queueMaxApps, userMaxApps, queueWeights, queuePolicies, - minSharePreemptionTimeouts, queueAcls, + queueMaxApps, userMaxApps, queueMaxAMShares, queueWeights, + queuePolicies, minSharePreemptionTimeouts, queueAcls, configuredQueues); } @@ -322,8 +329,8 @@ public synchronized void reloadAllocations() throws IOException, } AllocationConfiguration info = new AllocationConfiguration(minQueueResources, maxQueueResources, - queueMaxApps, userMaxApps, queueWeights, userMaxAppsDefault, - queueMaxAppsDefault, queuePolicies, defaultSchedPolicy, minSharePreemptionTimeouts, + queueMaxApps, userMaxApps, queueWeights, queueMaxAMShares, userMaxAppsDefault, + queueMaxAppsDefault, queueMaxAMShareDefault, queuePolicies, defaultSchedPolicy, minSharePreemptionTimeouts, queueAcls, fairSharePreemptionTimeout, defaultMinSharePreemptionTimeout, newPlacementPolicy, configuredQueues); @@ -338,7 +345,8 @@ public synchronized void reloadAllocations() throws IOException, */ private void loadQueue(String parentName, Element element, Map<String, Resource> minQueueResources, Map<String, Resource> maxQueueResources, Map<String, Integer> queueMaxApps, - Map<String, Integer> userMaxApps, Map<String, ResourceWeights> queueWeights, + Map<String, Integer> userMaxApps, Map<String, Float> queueMaxAMShares, + Map<String, ResourceWeights> queueWeights, Map<String, SchedulingPolicy> queuePolicies, Map<String, Long> minSharePreemptionTimeouts, Map<String, Map<QueueACL, AccessControlList>> queueAcls, @@ -370,6 +378,11 @@ private void loadQueue(String parentName, Element element, Map<String, Resource> String text = ((Text)field.getFirstChild()).getData().trim(); int val = Integer.parseInt(text); queueMaxApps.put(queueName, val); + } else if ("maxAMShare".equals(field.getTagName())) { + String text = ((Text)field.getFirstChild()).getData().trim(); + float val = Float.parseFloat(text); + val = Math.min(val, 1.0f); + queueMaxAMShares.put(queueName, val); } else if ("weight".equals(field.getTagName())) { String text = ((Text)field.getFirstChild()).getData().trim(); double val = Double.parseDouble(text); @@ -392,8 +405,9 @@ private void loadQueue(String parentName, Element element, Map<String, Resource> } else if ("queue".endsWith(field.getTagName()) || "pool".equals(field.getTagName())) { loadQueue(queueName, field, minQueueResources, maxQueueResources, - queueMaxApps, userMaxApps, queueWeights, queuePolicies, - minSharePreemptionTimeouts, queueAcls, configuredQueues); + queueMaxApps, userMaxApps, queueMaxAMShares, queueWeights, + queuePolicies, minSharePreemptionTimeouts, queueAcls, + configuredQueues); configuredQueues.get(FSQueueType.PARENT).add(queueName); isLeaf = false; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AppSchedulable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AppSchedulable.java index 4dc0bf4ceb870..32edc8a264144 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AppSchedulable.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AppSchedulable.java @@ -267,6 +267,12 @@ private Resource assignContainer(FSSchedulerNode node, node.allocateContainer(app.getApplicationId(), allocatedContainer); + // If this container is used to run AM, update the leaf queue's AM usage + if (app.getLiveContainers().size() == 1 && + !app.getUnmanagedAM()) { + queue.addAMResourceUsage(container.getResource()); + } + return container.getResource(); } else { // The desired container won't fit here, so reserve @@ -297,6 +303,14 @@ private Resource assignContainer(FSSchedulerNode node, boolean reserved) { app.addSchedulingOpportunity(priority); + // Check the AM resource usage for the leaf queue + if (app.getLiveContainers().size() == 0 + && !app.getUnmanagedAM()) { + if (!queue.canRunAppAM(app.getAMResource())) { + return Resources.none(); + } + } + ResourceRequest rackLocalRequest = app.getResourceRequest(priority, node.getRackName()); ResourceRequest localRequest = app.getResourceRequest(priority, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSLeafQueue.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSLeafQueue.java index fe738da7d4611..cecfbfc8e1de4 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSLeafQueue.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSLeafQueue.java @@ -55,6 +55,9 @@ public class FSLeafQueue extends FSQueue { private long lastTimeAtMinShare; private long lastTimeAtHalfFairShare; + // Track the AM resource usage for this queue + private Resource amResourceUsage; + private final ActiveUsersManager activeUsersManager; public FSLeafQueue(String name, FairScheduler scheduler, @@ -63,6 +66,7 @@ public FSLeafQueue(String name, FairScheduler scheduler, this.lastTimeAtMinShare = scheduler.getClock().getTime(); this.lastTimeAtHalfFairShare = scheduler.getClock().getTime(); activeUsersManager = new ActiveUsersManager(getMetrics()); + amResourceUsage = Resource.newInstance(0, 0); } public void addApp(FSSchedulerApp app, boolean runnable) { @@ -86,6 +90,10 @@ void addAppSchedulable(AppSchedulable appSched) { */ public boolean removeApp(FSSchedulerApp app) { if (runnableAppScheds.remove(app.getAppSchedulable())) { + // Update AM resource usage + if (app.getAMResource() != null) { + Resources.subtractFrom(amResourceUsage, app.getAMResource()); + } return true; } else if (nonRunnableAppScheds.remove(app.getAppSchedulable())) { return false; @@ -284,4 +292,26 @@ public int getNumRunnableApps() { public ActiveUsersManager getActiveUsersManager() { return activeUsersManager; } + + /** + * Check whether this queue can run this application master under the + * maxAMShare limit + * + * @param amResource + * @return true if this queue can run + */ + public boolean canRunAppAM(Resource amResource) { + float maxAMShare = + scheduler.getAllocationConfiguration().getQueueMaxAMShare(getName()); + Resource maxAMResource = Resources.multiply(getFairShare(), maxAMShare); + Resource ifRunAMResource = Resources.add(amResourceUsage, amResource); + return !policy + .checkIfAMResourceUsageOverLimit(ifRunAMResource, maxAMResource); + } + + public void addAMResourceUsage(Resource amResource) { + if (amResource != null) { + Resources.addTo(amResourceUsage, amResource); + } + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/SchedulingPolicy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/SchedulingPolicy.java index 1d77a43ce7588..1087c73aa19a7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/SchedulingPolicy.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/SchedulingPolicy.java @@ -149,4 +149,15 @@ public abstract void computeShares( */ public abstract boolean checkIfUsageOverFairShare( Resource usage, Resource fairShare); + + /** + * Check if a leaf queue's AM resource usage over its limit under this policy + * + * @param usage {@link Resource} the resource used by application masters + * @param maxAMResource {@link Resource} the maximum allowed resource for + * application masters + * @return true if AM resource usage is over the limit + */ + public abstract boolean checkIfAMResourceUsageOverLimit( + Resource usage, Resource maxAMResource); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/DominantResourceFairnessPolicy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/DominantResourceFairnessPolicy.java index 4b663d95de8b9..af674b96056c2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/DominantResourceFairnessPolicy.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/DominantResourceFairnessPolicy.java @@ -74,6 +74,11 @@ public boolean checkIfUsageOverFairShare(Resource usage, Resource fairShare) { return !Resources.fitsIn(usage, fairShare); } + @Override + public boolean checkIfAMResourceUsageOverLimit(Resource usage, Resource maxAMResource) { + return !Resources.fitsIn(usage, maxAMResource); + } + @Override public void initialize(Resource clusterCapacity) { comparator.setClusterCapacity(clusterCapacity); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/FairSharePolicy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/FairSharePolicy.java index ca7297ff46c38..5976cea52302f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/FairSharePolicy.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/FairSharePolicy.java @@ -124,6 +124,11 @@ public boolean checkIfUsageOverFairShare(Resource usage, Resource fairShare) { return Resources.greaterThan(RESOURCE_CALCULATOR, null, usage, fairShare); } + @Override + public boolean checkIfAMResourceUsageOverLimit(Resource usage, Resource maxAMResource) { + return usage.getMemory() > maxAMResource.getMemory(); + } + @Override public byte getApplicableDepth() { return SchedulingPolicy.DEPTH_ANY; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/FifoPolicy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/FifoPolicy.java index d996944681157..0f4309759d42c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/FifoPolicy.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/FifoPolicy.java @@ -94,6 +94,11 @@ public boolean checkIfUsageOverFairShare(Resource usage, Resource fairShare) { "as FifoPolicy only works for FSLeafQueue."); } + @Override + public boolean checkIfAMResourceUsageOverLimit(Resource usage, Resource maxAMResource) { + return usage.getMemory() > maxAMResource.getMemory(); + } + @Override public byte getApplicableDepth() { return SchedulingPolicy.DEPTH_LEAF; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerTestBase.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerTestBase.java index 5f926763d9b0a..fb864a2ac708a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerTestBase.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerTestBase.java @@ -20,14 +20,21 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; +import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.Priority; +import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; +import org.apache.hadoop.yarn.server.resourcemanager.RMContext; import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager; +import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; +import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppImpl; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler; +import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAddedSchedulerEvent; +import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptAddedSchedulerEvent; import org.apache.hadoop.yarn.server.utils.BuilderUtils; import org.apache.hadoop.yarn.util.Clock; @@ -169,4 +176,20 @@ protected void createSchedulingRequestExistingApplication( ask.add(request); scheduler.allocate(attId, ask, new ArrayList<ContainerId>(), null, null); } + + protected void createApplicationWithAMResource(ApplicationAttemptId attId, + String queue, String user, Resource amResource) { + RMContext rmContext = resourceManager.getRMContext(); + RMApp rmApp = new RMAppImpl(attId.getApplicationId(), rmContext, conf, + null, null, null, ApplicationSubmissionContext.newInstance(null, null, + null, null, null, false, false, 0, amResource, null), null, null, + 0, null, null); + rmContext.getRMApps().put(attId.getApplicationId(), rmApp); + AppAddedSchedulerEvent appAddedEvent = new AppAddedSchedulerEvent( + attId.getApplicationId(), queue, user); + scheduler.handle(appAddedEvent); + AppAttemptAddedSchedulerEvent attempAddedEvent = + new AppAttemptAddedSchedulerEvent(attId, false); + scheduler.handle(attempAddedEvent); + } } \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestAllocationFileLoaderService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestAllocationFileLoaderService.java index 2a725d8bbf44d..2a4992c32ab90 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestAllocationFileLoaderService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestAllocationFileLoaderService.java @@ -174,9 +174,10 @@ public void testAllocationFileParsing() throws Exception { out.println("<queue name=\"queueC\">"); out.println("<aclSubmitApps>alice,bob admins</aclSubmitApps>"); out.println("</queue>"); - // Give queue D a limit of 3 running apps + // Give queue D a limit of 3 running apps and 0.4f maxAMShare out.println("<queue name=\"queueD\">"); out.println("<maxRunningApps>3</maxRunningApps>"); + out.println("<maxAMShare>0.4</maxAMShare>"); out.println("</queue>"); // Give queue E a preemption timeout of one minute out.println("<queue name=\"queueE\">"); @@ -194,6 +195,8 @@ public void testAllocationFileParsing() throws Exception { out.println("<queueMaxAppsDefault>15</queueMaxAppsDefault>"); // Set default limit of apps per user to 5 out.println("<userMaxAppsDefault>5</userMaxAppsDefault>"); + // Set default limit of AMResourceShare to 0.5f + out.println("<queueMaxAMShareDefault>0.5f</queueMaxAMShareDefault>"); // Give user1 a limit of 10 jobs out.println("<user name=\"user1\">"); out.println("<maxRunningApps>10</maxRunningApps>"); @@ -240,6 +243,13 @@ public void testAllocationFileParsing() throws Exception { assertEquals(10, queueConf.getUserMaxApps("user1")); assertEquals(5, queueConf.getUserMaxApps("user2")); + assertEquals(.5f, queueConf.getQueueMaxAMShare("root." + YarnConfiguration.DEFAULT_QUEUE_NAME), 0.01); + assertEquals(.5f, queueConf.getQueueMaxAMShare("root.queueA"), 0.01); + assertEquals(.5f, queueConf.getQueueMaxAMShare("root.queueB"), 0.01); + assertEquals(.5f, queueConf.getQueueMaxAMShare("root.queueC"), 0.01); + assertEquals(.4f, queueConf.getQueueMaxAMShare("root.queueD"), 0.01); + assertEquals(.5f, queueConf.getQueueMaxAMShare("root.queueE"), 0.01); + // Root should get * ACL assertEquals("*", queueConf.getQueueAcl("root", QueueACL.ADMINISTER_QUEUE).getAclString()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairScheduler.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairScheduler.java index b9f40b3854b21..c7141b1d90a15 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairScheduler.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairScheduler.java @@ -64,7 +64,6 @@ import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.server.resourcemanager.ApplicationMasterService; import org.apache.hadoop.yarn.server.resourcemanager.MockNodes; -import org.apache.hadoop.yarn.server.resourcemanager.RMContext; import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager; import org.apache.hadoop.yarn.server.resourcemanager.resource.ResourceType; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.MockRMApp; @@ -73,12 +72,12 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppEventType; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppImpl; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState; +import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptImpl; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.AbstractYarnScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics; -import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplicationAttempt; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNode; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.TestSchedulerUtils; @@ -510,26 +509,14 @@ public void testUserAsDefaultQueue() throws Exception { scheduler.init(conf); scheduler.start(); scheduler.reinitialize(conf, resourceManager.getRMContext()); - RMContext rmContext = resourceManager.getRMContext(); - Map<ApplicationId, RMApp> appsMap = rmContext.getRMApps(); ApplicationAttemptId appAttemptId = createAppAttemptId(1, 1); - RMApp rmApp = new RMAppImpl(appAttemptId.getApplicationId(), rmContext, conf, - null, null, null, ApplicationSubmissionContext.newInstance(null, null, - null, null, null, false, false, 0, null, null), null, null, 0, null, null); - appsMap.put(appAttemptId.getApplicationId(), rmApp); - - AppAddedSchedulerEvent appAddedEvent = - new AppAddedSchedulerEvent(appAttemptId.getApplicationId(), "default", - "user1"); - scheduler.handle(appAddedEvent); - AppAttemptAddedSchedulerEvent attempAddedEvent = - new AppAttemptAddedSchedulerEvent(appAttemptId, false); - scheduler.handle(attempAddedEvent); + createApplicationWithAMResource(appAttemptId, "default", "user1", null); assertEquals(1, scheduler.getQueueManager().getLeafQueue("user1", true) .getRunnableAppSchedulables().size()); assertEquals(0, scheduler.getQueueManager().getLeafQueue("default", true) .getRunnableAppSchedulables().size()); - assertEquals("root.user1", rmApp.getQueue()); + assertEquals("root.user1", resourceManager.getRMContext().getRMApps() + .get(appAttemptId.getApplicationId()).getQueue()); } @Test @@ -538,21 +525,8 @@ public void testNotUserAsDefaultQueue() throws Exception { scheduler.init(conf); scheduler.start(); scheduler.reinitialize(conf, resourceManager.getRMContext()); - RMContext rmContext = resourceManager.getRMContext(); - Map<ApplicationId, RMApp> appsMap = rmContext.getRMApps(); ApplicationAttemptId appAttemptId = createAppAttemptId(1, 1); - RMApp rmApp = new RMAppImpl(appAttemptId.getApplicationId(), rmContext, conf, - null, null, null, ApplicationSubmissionContext.newInstance(null, null, - null, null, null, false, false, 0, null, null), null, null, 0, null, null); - appsMap.put(appAttemptId.getApplicationId(), rmApp); - - AppAddedSchedulerEvent appAddedEvent = - new AppAddedSchedulerEvent(appAttemptId.getApplicationId(), "default", - "user2"); - scheduler.handle(appAddedEvent); - AppAttemptAddedSchedulerEvent attempAddedEvent = - new AppAttemptAddedSchedulerEvent(appAttemptId, false); - scheduler.handle(attempAddedEvent); + createApplicationWithAMResource(appAttemptId, "default", "user2", null); assertEquals(0, scheduler.getQueueManager().getLeafQueue("user1", true) .getRunnableAppSchedulables().size()); assertEquals(1, scheduler.getQueueManager().getLeafQueue("default", true) @@ -2329,6 +2303,121 @@ public void testUserAndQueueMaxRunningApps() throws Exception { verifyQueueNumRunnable("queue1", 2, 1); } + @Test + public void testQueueMaxAMShare() throws Exception { + conf.set(FairSchedulerConfiguration.ALLOCATION_FILE, ALLOC_FILE); + + PrintWriter out = new PrintWriter(new FileWriter(ALLOC_FILE)); + out.println("<?xml version=\"1.0\"?>"); + out.println("<allocations>"); + out.println("<queue name=\"queue1\">"); + out.println("<maxAMShare>0.2</maxAMShare>"); + out.println("</queue>"); + out.println("</allocations>"); + out.close(); + + scheduler.init(conf); + scheduler.start(); + scheduler.reinitialize(conf, resourceManager.getRMContext()); + + RMNode node = + MockNodes.newNodeInfo(1, Resources.createResource(20480, 20), + 0, "127.0.0.1"); + NodeAddedSchedulerEvent nodeEvent = new NodeAddedSchedulerEvent(node); + NodeUpdateSchedulerEvent updateEvent = new NodeUpdateSchedulerEvent(node); + scheduler.handle(nodeEvent); + scheduler.update(); + + assertEquals("Queue queue1's fair share should be 10240", + 10240, scheduler.getQueueManager().getLeafQueue("queue1", true) + .getFairShare().getMemory()); + + Resource amResource1 = Resource.newInstance(1024, 1); + Resource amResource2 = Resource.newInstance(2048, 2); + int amPriority = RMAppAttemptImpl.AM_CONTAINER_PRIORITY.getPriority(); + // Exceeds no limits + ApplicationAttemptId attId1 = createAppAttemptId(1, 1); + createApplicationWithAMResource(attId1, "queue1", "user1", amResource1); + createSchedulingRequestExistingApplication(1024, 1, amPriority, attId1); + FSSchedulerApp app1 = scheduler.getSchedulerApp(attId1); + scheduler.update(); + scheduler.handle(updateEvent); + assertEquals("Application1's AM requests 1024 MB memory", + 1024, app1.getAMResource().getMemory()); + assertEquals("Application1's AM should be running", + 1, app1.getLiveContainers().size()); + + // Exceeds no limits + ApplicationAttemptId attId2 = createAppAttemptId(2, 1); + createApplicationWithAMResource(attId2, "queue1", "user1", amResource1); + createSchedulingRequestExistingApplication(1024, 1, amPriority, attId2); + FSSchedulerApp app2 = scheduler.getSchedulerApp(attId2); + scheduler.update(); + scheduler.handle(updateEvent); + assertEquals("Application2's AM requests 1024 MB memory", + 1024, app2.getAMResource().getMemory()); + assertEquals("Application2's AM should be running", + 1, app2.getLiveContainers().size()); + + // Exceeds queue limit + ApplicationAttemptId attId3 = createAppAttemptId(3, 1); + createApplicationWithAMResource(attId3, "queue1", "user1", amResource1); + createSchedulingRequestExistingApplication(1024, 1, amPriority, attId3); + FSSchedulerApp app3 = scheduler.getSchedulerApp(attId3); + scheduler.update(); + scheduler.handle(updateEvent); + assertEquals("Application3's AM requests 1024 MB memory", + 1024, app3.getAMResource().getMemory()); + assertEquals("Application3's AM should not be running", + 0, app3.getLiveContainers().size()); + + // Still can run non-AM container + createSchedulingRequestExistingApplication(1024, 1, attId1); + scheduler.update(); + scheduler.handle(updateEvent); + assertEquals("Application1 should have two running containers", + 2, app1.getLiveContainers().size()); + + // Remove app1, app3's AM should become running + AppAttemptRemovedSchedulerEvent appRemovedEvent1 = + new AppAttemptRemovedSchedulerEvent(attId1, RMAppAttemptState.FINISHED, false); + scheduler.update(); + scheduler.handle(appRemovedEvent1); + scheduler.handle(updateEvent); + assertEquals("Application1's AM should be finished", + 0, app1.getLiveContainers().size()); + assertEquals("Application3's AM should be running", + 1, app3.getLiveContainers().size()); + + // Exceeds queue limit + ApplicationAttemptId attId4 = createAppAttemptId(4, 1); + createApplicationWithAMResource(attId4, "queue1", "user1", amResource2); + createSchedulingRequestExistingApplication(2048, 2, amPriority, attId4); + FSSchedulerApp app4 = scheduler.getSchedulerApp(attId4); + scheduler.update(); + scheduler.handle(updateEvent); + assertEquals("Application4's AM requests 2048 MB memory", + 2048, app4.getAMResource().getMemory()); + assertEquals("Application4's AM should not be running", + 0, app4.getLiveContainers().size()); + + // Remove app2 and app3, app4's AM should become running + AppAttemptRemovedSchedulerEvent appRemovedEvent2 = + new AppAttemptRemovedSchedulerEvent(attId2, RMAppAttemptState.FINISHED, false); + AppAttemptRemovedSchedulerEvent appRemovedEvent3 = + new AppAttemptRemovedSchedulerEvent(attId3, RMAppAttemptState.FINISHED, false); + scheduler.handle(appRemovedEvent2); + scheduler.handle(appRemovedEvent3); + scheduler.update(); + scheduler.handle(updateEvent); + assertEquals("Application2's AM should be finished", + 0, app2.getLiveContainers().size()); + assertEquals("Application3's AM should be finished", + 0, app3.getLiveContainers().size()); + assertEquals("Application4's AM should be running", + 1, app4.getLiveContainers().size()); + } + @Test public void testMaxRunningAppsHierarchicalQueues() throws Exception { conf.set(FairSchedulerConfiguration.ALLOCATION_FILE, ALLOC_FILE); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/FairScheduler.apt.vm b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/FairScheduler.apt.vm index 54daf2da85911..23faf27bf266d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/FairScheduler.apt.vm +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/FairScheduler.apt.vm @@ -237,6 +237,11 @@ Allocation file format * maxRunningApps: limit the number of apps from the queue to run at once + * maxAMShare: limit the fraction of the queue's fair share that can be used + to run application masters. This property can only be used for leaf queues. + Default value is 1.0f, which means AMs in the leaf queue can take up to 100% + of both the memory and CPU fair share. + * weight: to share the cluster non-proportionally with other queues. Weights default to 1, and a queue with weight 2 should receive approximately twice as many resources as a queue with the default weight. @@ -279,6 +284,9 @@ Allocation file format * <<A queueMaxAppsDefault element>>, which sets the default running app limit for queues; overriden by maxRunningApps element in each queue. + * <<A queueMaxAMShareDefault element>>, which sets the default AM resource + limit for queue; overriden by maxAMShare element in each queue. + * <<A defaultQueueSchedulingPolicy element>>, which sets the default scheduling policy for queues; overriden by the schedulingPolicy element in each queue if specified. Defaults to "fair". @@ -328,6 +336,7 @@ Allocation file format <minResources>10000 mb,0vcores</minResources> <maxResources>90000 mb,0vcores</maxResources> <maxRunningApps>50</maxRunningApps> + <maxAMShare>0.1</maxAMShare> <weight>2.0</weight> <schedulingPolicy>fair</schedulingPolicy> <queue name="sample_sub_queue"> @@ -336,6 +345,8 @@ Allocation file format </queue> </queue> + <queueMaxAMShareDefault>0.5</queueMaxAMShareDefault> + <!—- Queue ‘secondary_group_queue’ is a parent queue and may have user queues under it -—> <queue name=“secondary_group_queue” type=“parent”>
a29e41b9fa93162b7731ed2d45a2ac368384decd
spring-framework
Fix Jackson @JSONView when using XML- serialization--Issue: SPR-12149-
c
https://github.com/spring-projects/spring-framework
diff --git a/spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/AbstractMappingJacksonResponseBodyAdvice.java b/spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/AbstractMappingJacksonResponseBodyAdvice.java index dd87024c59f6..185a151d9999 100644 --- a/spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/AbstractMappingJacksonResponseBodyAdvice.java +++ b/spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/AbstractMappingJacksonResponseBodyAdvice.java @@ -19,6 +19,7 @@ import org.springframework.core.MethodParameter; import org.springframework.http.MediaType; import org.springframework.http.converter.HttpMessageConverter; +import org.springframework.http.converter.json.AbstractJackson2HttpMessageConverter; import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; import org.springframework.http.converter.json.MappingJacksonValue; import org.springframework.http.server.ServerHttpRequest; @@ -36,7 +37,7 @@ public abstract class AbstractMappingJacksonResponseBodyAdvice implements Respon @Override public boolean supports(MethodParameter returnType, Class<? extends HttpMessageConverter<?>> converterType) { - return MappingJackson2HttpMessageConverter.class.equals(converterType); + return AbstractJackson2HttpMessageConverter.class.isAssignableFrom(converterType); } @Override
37a9214ecf57b5c85c866a90a5e1a52cc3092e8e
intellij-community
symlink support in vfs & SOE protection--
a
https://github.com/JetBrains/intellij-community
diff --git a/platform/platform-api/src/com/intellij/openapi/roots/impl/FileIndexImplUtil.java b/platform/platform-api/src/com/intellij/openapi/roots/impl/FileIndexImplUtil.java index 94c2485a5f38f..c922e2388dfb6 100644 --- a/platform/platform-api/src/com/intellij/openapi/roots/impl/FileIndexImplUtil.java +++ b/platform/platform-api/src/com/intellij/openapi/roots/impl/FileIndexImplUtil.java @@ -33,7 +33,7 @@ public static boolean iterateRecursively(@NotNull final VirtualFile root, @NotNu VfsUtilCore.visitChildrenRecursively(root, new VirtualFileVisitor() { @Override public boolean visitFile(VirtualFile file) { - if (!file.isValid() || !filter.accept(file)) return true; + if (!file.isValid() || !filter.accept(file)) return false; if (!iterator.processFile(file)) throw new StopItException(); return true;
05ef7eb774bd373c627ec0a5601ffb4bb245a45c
intellij-community
Major changes in data structures.- Reimplementation & beautification.--
p
https://github.com/JetBrains/intellij-community
diff --git a/jps/antLayout/antlayout.iml b/jps/antLayout/antlayout.iml index 1c77fe401738c..63b6c029c41ca 100644 --- a/jps/antLayout/antlayout.iml +++ b/jps/antLayout/antlayout.iml @@ -5,7 +5,7 @@ <content url="file://$MODULE_DIR$"> <sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" /> </content> - <orderEntry type="inheritedJdk" /> + <orderEntry type="jdk" jdkName="IDEA jdk" jdkType="JavaSDK" /> <orderEntry type="sourceFolder" forTests="false" /> <orderEntry type="library" name="Ant" level="project" /> </component> diff --git a/jps/jps.iml b/jps/jps.iml index 725fd836a13f0..6bc3adbc818f0 100644 --- a/jps/jps.iml +++ b/jps/jps.iml @@ -7,22 +7,15 @@ <sourceFolder url="file://$MODULE_DIR$/testSrc" isTestSource="true" /> <excludeFolder url="file://$MODULE_DIR$/build" /> </content> - <orderEntry type="inheritedJdk" /> + <orderEntry type="jdk" jdkName="IDEA jdk" jdkType="JavaSDK" /> <orderEntry type="sourceFolder" forTests="false" /> <orderEntry type="library" name="Groovy" level="project" /> <orderEntry type="library" name="Ant" level="project" /> <orderEntry type="library" name="Javac2" level="project" /> <orderEntry type="module" module-name="antlayout" /> <orderEntry type="library" scope="TEST" name="JUnit" level="project" /> - <orderEntry type="module-library"> - <library name="Gant"> - <CLASSES> - <root url="jar:///usr/share/groovy/lib/gant-1.8.1.jar!/" /> - </CLASSES> - <JAVADOC /> - <SOURCES /> - </library> - </orderEntry> + <orderEntry type="library" exported="" name="Gant" level="application" /> + <orderEntry type="library" name="Gant" level="application" /> </component> </module> diff --git a/jps/plugins/gwt/gwt.iml b/jps/plugins/gwt/gwt.iml index ced8806f42f00..8a514adfdbda9 100644 --- a/jps/plugins/gwt/gwt.iml +++ b/jps/plugins/gwt/gwt.iml @@ -5,7 +5,7 @@ <content url="file://$MODULE_DIR$"> <sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" /> </content> - <orderEntry type="inheritedJdk" /> + <orderEntry type="jdk" jdkName="IDEA jdk" jdkType="JavaSDK" /> <orderEntry type="sourceFolder" forTests="false" /> <orderEntry type="module" module-name="jps" /> <orderEntry type="library" name="Groovy" level="project" /> diff --git a/jps/plugins/javaee/javaee.iml b/jps/plugins/javaee/javaee.iml index f8fe61eb4d9e4..b21e687f59bb2 100644 --- a/jps/plugins/javaee/javaee.iml +++ b/jps/plugins/javaee/javaee.iml @@ -5,7 +5,7 @@ <content url="file://$MODULE_DIR$"> <sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" /> </content> - <orderEntry type="inheritedJdk" /> + <orderEntry type="jdk" jdkName="IDEA jdk" jdkType="JavaSDK" /> <orderEntry type="sourceFolder" forTests="false" /> <orderEntry type="library" name="Groovy" level="project" /> <orderEntry type="module" module-name="jps" /> diff --git a/jps/plugins/jpa/jpa.iml b/jps/plugins/jpa/jpa.iml index f8fe61eb4d9e4..b21e687f59bb2 100644 --- a/jps/plugins/jpa/jpa.iml +++ b/jps/plugins/jpa/jpa.iml @@ -5,7 +5,7 @@ <content url="file://$MODULE_DIR$"> <sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" /> </content> - <orderEntry type="inheritedJdk" /> + <orderEntry type="jdk" jdkName="IDEA jdk" jdkType="JavaSDK" /> <orderEntry type="sourceFolder" forTests="false" /> <orderEntry type="library" name="Groovy" level="project" /> <orderEntry type="module" module-name="jps" /> diff --git a/jps/src/org/jetbrains/ether/DirectoryScanner.java b/jps/src/org/jetbrains/ether/DirectoryScanner.java index bc318ae1a0c74..ca50b38b626a6 100644 --- a/jps/src/org/jetbrains/ether/DirectoryScanner.java +++ b/jps/src/org/jetbrains/ether/DirectoryScanner.java @@ -3,8 +3,7 @@ import javax.xml.transform.Result; import java.io.File; import java.io.FileFilter; -import java.util.ArrayList; -import java.util.List; +import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -15,43 +14,92 @@ * Time: 2:01 * To change this template use File | Settings | File Templates. */ + public class DirectoryScanner { - private static FileFilter myDirectoryFilter = new FileFilter() { - public boolean accept (File f) { - final String name = f.getName(); - return f.isDirectory() && !name.equals(".") && !name.equals("..") ; + public static class Result { + final Set<ProjectWrapper.FileWrapper> myFiles; + long myLatest; + long myEarliest; + + public Result () { + myFiles = new HashSet<ProjectWrapper.FileWrapper> (); + myLatest = 0; + myEarliest = Long.MAX_VALUE; + } + + public void update (final ProjectWrapper.FileWrapper w) { + final long t = w.getStamp (); + + if (t > myLatest) + myLatest = t; + + if (t< myEarliest) + myEarliest = t; + + myFiles.add(w); + } + + public long getEarliest () { + return myEarliest; + } + + public long getLatest () { + return myLatest; + } + + public Set<ProjectWrapper.FileWrapper> getFiles () { + return myFiles; + } + } + + private static class Crawler { + final Result myResult; + final FileFilter myFilter; + final ProjectWrapper myProjectWrapper; + + public Crawler (final FileFilter ff, final ProjectWrapper pw) { + myResult = new Result(); + myFilter = ff; + myProjectWrapper = pw; + } + + public Result getResult () { + return myResult; } - }; - private static FileFilter filterByExtensions (final String[] exts) { - return new FileFilter (){ - public boolean accept (File path) { - final String filename = path.getName(); + public void run(File root) { + if (root.exists()) { + final File[] files = root.listFiles(myFilter); - for (int i = 0; i<exts.length; i++) { - if (filename.endsWith(exts[i])) - return true; + for (int i = 0; i < files.length; i++) { + myResult.update (myProjectWrapper.new FileWrapper(files[i])); } - return false; + final File[] subdirs = root.listFiles(myDirectoryFilter); + + for (int i = 0; i < subdirs.length; i++) { + run(subdirs[i]); + } } - }; + } } - public static class Result { - public List<String> myFiles = new ArrayList<String> (); - public long myEarliest = Long.MAX_VALUE; - public long myLatest = 0; - } + private static FileFilter myDirectoryFilter = new FileFilter() { + public boolean accept(File f) { + final String name = f.getName(); + + return f.isDirectory() && !name.equals(".") && !name.equals(".."); + } + }; private static FileFilter myTrueFilter = new FileFilter() { - public boolean accept (File s) { - return true; - } - }; + public boolean accept(File s) { + return s.isFile(); + } + }; - private static FileFilter createFilter (final List<String> excludes) { + private static FileFilter createFilter(final Collection<String> excludes) { if (excludes == null) { return myTrueFilter; } @@ -62,7 +110,7 @@ private static FileFilter createFilter (final List<String> excludes) { StringBuffer alternative = new StringBuffer(); if (exclude != null) { - for (int i = 0; i<exclude.length(); i++) { + for (int i = 0; i < exclude.length(); i++) { final char c = exclude.charAt(i); switch (c) { @@ -104,11 +152,11 @@ private static FileFilter createFilter (final List<String> excludes) { final Pattern patt = Pattern.compile(buf.toString()); return new FileFilter() { - public boolean accept (File f) { + public boolean accept(File f) { final Matcher m = patt.matcher(f.getAbsolutePath()); final boolean ok = !m.matches(); - return ok; + return ok && f.isFile(); } }; } @@ -116,36 +164,21 @@ public boolean accept (File f) { return myTrueFilter; } - public static Result getFiles (final String root, final List<String> excludes) { - final Result result = new Result (); - final FileFilter ff = createFilter(excludes); - - new Object(){ - public void run (File root) { - if (root.exists()) { - final File[] files = root.listFiles(ff); + public static Result getFiles(final String root, final Set<String> excludes, final ProjectWrapper pw) { + final Crawler cw = new Crawler(createFilter(excludes), pw); - for (int i = 0; i<files.length; i++) { - long t = files[i].lastModified(); + if (root != null) + cw.run(new File(pw.getAbsolutePath(root))); - if (t > result.myLatest) - result.myLatest = t; - - if (t < result.myEarliest) - result.myEarliest = t; - - result.myFiles.add(files[i].getAbsolutePath()); - } + return cw.getResult(); + } - final File[] subdirs = root.listFiles(myDirectoryFilter); + public static Result getFiles(final Set<String> roots, final Set<String> excludes, final ProjectWrapper pw) { + final Crawler cw = new Crawler(createFilter(excludes), pw); - for (int i=0; i<subdirs.length; i++) { - run (subdirs [i]); - } - } - } - }.run(new File (root)); + for (String root : roots) + cw.run(new File(pw.getAbsolutePath(root))); - return result; + return cw.getResult(); } } diff --git a/jps/src/org/jetbrains/ether/Main.java b/jps/src/org/jetbrains/ether/Main.java index 2809ad79383b9..816e657b6e76d 100644 --- a/jps/src/org/jetbrains/ether/Main.java +++ b/jps/src/org/jetbrains/ether/Main.java @@ -134,14 +134,14 @@ public static void main(String[] args) { } for (String prj : projects) { - final ProjectWrapper project = new ProjectWrapper(prj); boolean saved = false; + ProjectWrapper project = null; switch (getAction()) { case CLEAN: - System.out.println("Cleaning project \"" + prj + "\""); - project.load(); + project = ProjectWrapper.load(prj); + project.clean(); project.save(); saved = true; @@ -149,7 +149,7 @@ public static void main(String[] args) { case REBUILD: System.out.println("Rebuilding project \"" + prj + "\""); - project.load(); + project = ProjectWrapper.load(prj); project.rebuild(); project.save(); saved = true; @@ -162,13 +162,13 @@ public static void main(String[] args) { final String module = ((Options.Value) make).get(); System.out.println("Making module \"" + module + "\" in project \"" + prj + "\""); - project.load(); + project = ProjectWrapper.load(prj); project.makeModule(module, doForce(), doTests()); project.save(); saved = true; } else if (make instanceof Options.Switch) { System.out.println("Making project \"" + prj + "\""); - project.load(); + project = ProjectWrapper.load(prj); project.make(doForce(), doTests()); project.save(); saved = true; @@ -179,14 +179,14 @@ public static void main(String[] args) { final Options.Argument inspect = doInspect(); if (inspect instanceof Options.Switch) { - project.load(); + project = ProjectWrapper.load(prj); project.report(); if (doSave()) { project.save(); saved = true; } } else if (inspect instanceof Options.Value) { - project.load(); + project = ProjectWrapper.load(prj); project.report(((Options.Value) inspect).get()); if (doSave()) { project.save(); @@ -195,7 +195,7 @@ public static void main(String[] args) { } if (doSave() && !saved) { - project.load(); + project = ProjectWrapper.load(prj); project.save(); } } diff --git a/jps/src/org/jetbrains/ether/ModuleStatus.java b/jps/src/org/jetbrains/ether/ModuleStatus.java deleted file mode 100644 index 5435bada96398..0000000000000 --- a/jps/src/org/jetbrains/ether/ModuleStatus.java +++ /dev/null @@ -1,62 +0,0 @@ -package org.jetbrains.ether; - -import java.io.Serializable; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * Created by IntelliJ IDEA. - * User: db - * Date: 19.11.10 - * Time: 3:40 - * To change this template use File | Settings | File Templates. - */ -public class ModuleStatus { - private static Pattern myPattern = Pattern.compile("([^ ]+) ([0-9]+) ([0-9]+) ([0-9]+) ([0-9]+)"); - - String myName; - long mySourceStamp; - long myOutputStamp; - long myTestSourceStamp; - long myTestOutputStamp; - - public ModuleStatus(String name, long ss, long os, long tss, long tos) { - myName = name; - mySourceStamp = ss; - myOutputStamp = os; - myTestSourceStamp = tss; - myTestOutputStamp = tos; - } - - public String getName () { - return myName; - } - - public String toString () { - return myName + " " + mySourceStamp + " " + myOutputStamp + " " + myTestSourceStamp + " " + myTestOutputStamp; - } - - public ModuleStatus(final String s) { - final Matcher m = myPattern.matcher(s); - - if (m.matches()) { - myName = m.group(1); - mySourceStamp = Long.parseLong(m.group(2)); - myOutputStamp = Long.parseLong(m.group(3)); - myTestSourceStamp = Long.parseLong(m.group(4)); - myTestOutputStamp = Long.parseLong(m.group(5)); - } - else - System.err.println("Error converting string \"" + s + "\" to ModuleStatus"); - } - - private static boolean wiseCompare (long input, long output) { - final boolean result = (input > 0 && output == Long.MAX_VALUE) || (output <= input); - return result; - } - - public boolean isOutdated(boolean tests) { - final boolean result = wiseCompare(mySourceStamp, myOutputStamp) || (tests && wiseCompare(myTestSourceStamp, myTestOutputStamp)); - return result; - } -} diff --git a/jps/src/org/jetbrains/ether/ProjectSnapshot.java b/jps/src/org/jetbrains/ether/ProjectSnapshot.java deleted file mode 100644 index 421f298272bea..0000000000000 --- a/jps/src/org/jetbrains/ether/ProjectSnapshot.java +++ /dev/null @@ -1,73 +0,0 @@ -package org.jetbrains.ether; - -import java.io.*; -import java.util.HashMap; -import java.util.Map; - -/** - * Created by IntelliJ IDEA. - * User: db - * Date: 19.11.10 - * Time: 3:05 - * To change this template use File | Settings | File Templates. - */ -public class ProjectSnapshot { - String myProjectStructure; - Map<String, ModuleStatus> myModuleHistories; - - public ProjectSnapshot(final String prjStruct, final Map<String, ModuleStatus> moduleHistories) { - myProjectStructure = prjStruct; - myModuleHistories = moduleHistories; - } - - public String toString () { - StringBuffer buf = new StringBuffer(); - - buf.append(myModuleHistories.size() + "\n"); - - for (ModuleStatus h : myModuleHistories.values()) { - buf.append(h.toString() + "\n"); - } - - buf.append(myProjectStructure); - - return buf.toString(); - } - - public ProjectSnapshot(final String s) { - BufferedReader rd = new BufferedReader(new StringReader(s)); - - try { - final int n = Integer.parseInt(rd.readLine()); - - myModuleHistories = new HashMap<String, ModuleStatus>(); - - for (int i = 0; i<n; i++) { - ModuleStatus h = new ModuleStatus(rd.readLine()); - myModuleHistories.put(h.getName(), h); - } - - StringBuffer buf = new StringBuffer(); - - while (true) { - final String str = rd.readLine(); - - if (str == null) - break; - - buf.append(str); - buf.append("\n"); - } - - - myProjectStructure = buf.toString(); - } - catch (IOException e) { - e.printStackTrace(); - } - } - - public boolean structureChanged (final ProjectSnapshot p) { - return ! p.myProjectStructure.equals(myProjectStructure); - } -} diff --git a/jps/src/org/jetbrains/ether/ProjectWrapper.java b/jps/src/org/jetbrains/ether/ProjectWrapper.java index eeeaaff01707c..cf31cadfb1e43 100644 --- a/jps/src/org/jetbrains/ether/ProjectWrapper.java +++ b/jps/src/org/jetbrains/ether/ProjectWrapper.java @@ -1,11 +1,9 @@ package org.jetbrains.ether; -import com.sun.org.apache.xpath.internal.operations.Mod; import org.codehaus.gant.GantBinding; -import org.jetbrains.jps.ClasspathItem; -import org.jetbrains.jps.Module; -import org.jetbrains.jps.Project; +import org.jetbrains.jps.*; import org.jetbrains.jps.idea.IdeaProjectLoader; +import org.jetbrains.jps.resolvers.PathEntry; import java.io.*; import java.util.*; @@ -17,6 +15,7 @@ * Time: 2:58 * To change this template use File | Settings | File Templates. */ + public class ProjectWrapper { // Home directory private static final String myHomeDir = System.getProperty("user.home"); @@ -24,16 +23,182 @@ public class ProjectWrapper { // JPS directory private static final String myJPSDir = ".jps"; + // IDEA project structure directory name + private static final String myIDEADir = ".idea"; + // JPS directory initialization - private static void initJPSDirectory () { + private static void initJPSDirectory() { final File f = new File(myHomeDir + File.separator + myJPSDir); - if (! f.exists()) + if (!f.exists()) f.mkdir(); } + private static <T> List<T> sort(final Collection<T> coll, final Comparator<? super T> comp) { + List<T> list = new ArrayList<T>(); + + for (T elem : coll) { + if (elem != null) { + list.add(elem); + } + } + + Collections.sort(list, comp); + + return list; + } + + private static <T extends Comparable<? super T>> List<T> sort(final Collection<T> coll) { + return sort(coll, new Comparator<T>() { + public int compare(T a, T b) { + return a.compareTo(b); + } + }); + } + + private interface Writable extends Comparable { + public void write(BufferedWriter w); + } + + private static void writeln(final BufferedWriter w, final Collection<String> c, final String desc) { + writeln(w, Integer.toString(c.size())); + + if (c instanceof List) { + for (String e : c) { + writeln(w, e); + } + } else { + final List<String> sorted = sort(c); + + for (String e : sorted) { + writeln(w, e); + } + } + } + + private static void writeln(final BufferedWriter w, final Collection<? extends Writable> c) { + writeln(w, Integer.toString(c.size())); + + if (c instanceof List) { + for (Writable e : c) { + e.write(w); + } + } else { + final List<? extends Writable> sorted = sort(c); + + for (Writable e : sorted) { + e.write(w); + } + } + } + + private static void writeln(final BufferedWriter w, final String s) { + try { + w.write(s); + w.newLine(); + } catch (IOException e) { + e.printStackTrace(); + } + } + + private interface Constructor<T> { + public T read(BufferedReader r); + } + + private static Constructor<String> myStringConstructor = new Constructor<String>() { + public String read(final BufferedReader r) { + try { + return r.readLine(); + } catch (IOException e) { + e.printStackTrace(); + return null; + } + } + }; + + private static <T> Collection<T> readMany(final BufferedReader r, final Constructor<T> c, final Collection<T> acc) { + final int size = readInt(r); + + for (int i = 0; i < size; i++) { + acc.add(c.read(r)); + } + + return acc; + } + + private static String lookString(final BufferedReader r) { + try { + r.mark(256); + final String s = r.readLine(); + r.reset(); + + return s; + } catch (IOException e) { + e.printStackTrace(); + return null; + } + } + + private static void readTag(final BufferedReader r, final String tag) { + try { + final String s = r.readLine(); + + if (!s.equals(tag)) + System.err.println("Parsing error: expected \"" + tag + "\", but found \"" + s + "\""); + } catch (IOException e) { + e.printStackTrace(); + } + } + + private static String readString(final BufferedReader r) { + try { + return r.readLine(); + } catch (IOException e) { + e.printStackTrace(); + return null; + } + } + + private static long readLong(final BufferedReader r) { + final String s = readString(r); + + try { + return Long.parseLong(s); + } catch (Exception n) { + System.err.println("Parsing error: expected long, but found \"" + s + "\""); + return 0; + } + } + + private static int readInt(final BufferedReader r) { + final String s = readString(r); + + try { + return Integer.parseInt(s); + } catch (Exception n) { + System.err.println("Parsing error: expected integer, but found \"" + s + "\""); + return 0; + } + } + + private static String readStringAttribute(final BufferedReader r, final String tag) { + try { + final String s = r.readLine(); + + if (s.startsWith(tag)) + return s.substring(tag.length()); + + System.err.println("Parsing error: expected \"" + tag + "\", but found \"" + s + "\""); + + return null; + } catch (IOException e) { + e.printStackTrace(); + return null; + } + } + // File separator replacement - private static final char myFileSeparatorReplacement = '-'; + private static final char myFileSeparatorReplacement = '.'; // Original JPS Project private final Project myProject; @@ -41,60 +206,596 @@ private static void initJPSDirectory () { // Project directory private final String myRoot; - // Project snapshot file + // Project snapshot file name private final String myProjectSnapshot; - // Project history - private ProjectSnapshot mySnapshot; - private ProjectSnapshot myPresent; + public interface ClasspathItemWrapper extends Writable { + public List<String> getClassPath(ClasspathKind kind); + } + + public final Constructor<LibraryWrapper> myLibraryWrapperConstructor = + new Constructor<LibraryWrapper>() { + public LibraryWrapper read(final BufferedReader r) { + return new LibraryWrapper(r); + } + }; + + public class LibraryWrapper implements ClasspathItemWrapper { + final String myName; + final List<String> myClassPath; + + public void write(final BufferedWriter w) { + writeln(w, "Library:" + myName); + writeln(w, "Classpath:"); + writeln(w, myClassPath, null); + } + + public LibraryWrapper(final BufferedReader r) { + myName = readStringAttribute(r, "Library:"); + + readTag(r, "Classpath:"); + myClassPath = (List<String>) readMany(r, myStringConstructor, new ArrayList<String>()); + } + + public LibraryWrapper(final Library lib) { + lib.forceInit(); + myName = lib.getName(); + myClassPath = (List<String>) getRelativePaths(lib.getClasspath(), new ArrayList<String>()); + } + + public String getName() { + return myName; + } + + public List<String> getClassPath(final ClasspathKind kind) { + return myClassPath; + } + + public int compareTo(Object o) { + return getName().compareTo(((LibraryWrapper) o).getName()); + } + } + + public final Constructor<ClasspathItemWrapper> myClasspathItemWrapperConstructor = + new Constructor<ClasspathItemWrapper>() { + public ClasspathItemWrapper read(final BufferedReader r) { + final String s = lookString(r); + if (s.startsWith("Library:")) { + return new LibraryWrapper(r); + } + if (s.startsWith("Module:")) { + return new ModuleWrapper(r); + } else { + return new GenericClasspathItemWrapper(r); + } + } + }; + + public class GenericClasspathItemWrapper implements ClasspathItemWrapper { + final List<String> myClassPath; + final String myType; + + public GenericClasspathItemWrapper(final ClasspathItem item) { + if (item instanceof PathEntry) + myType = "PathEntry"; + else if (item instanceof JavaSdk) + myType = "JavaSdk"; + else if (item instanceof Sdk) + myType = "Sdk"; + else + myType = null; + + myClassPath = (List<String>) getRelativePaths(item.getClasspathRoots(null), new ArrayList<String>()); + } + + public GenericClasspathItemWrapper(final BufferedReader r) { + myType = readString(r); + + readTag(r, "Classpath:"); + myClassPath = (List<String>) readMany(r, myStringConstructor, new ArrayList<String>()); + } + + public String getType() { + return myType; + } + + public List<String> getClassPath(final ClasspathKind kind) { + return myClassPath; + } + + public void write(final BufferedWriter w) { + writeln(w, myType); + writeln(w, "Classpath:"); + writeln(w, myClassPath, ""); + } + + public int compareTo(Object o) { + final GenericClasspathItemWrapper w = (GenericClasspathItemWrapper) o; + final int c = getType().compareTo(w.getType()); + return + c == 0 ? + (new Object() { + public int compare(Iterator<String> x, Iterator<String> y) { + if (x.hasNext()) { + if (y.hasNext()) { + final int c = x.next().compareTo(y.next()); + + return c == 0 ? compare(x, y) : c; + } + + return 1; + } else if (y.hasNext()) { + return -1; + } + + return 0; + } + } + ).compare(getClassPath(null).iterator(), w.getClassPath(null).iterator()) + : c; + } + } + + public final Constructor<FileWrapper> myFileWrapperConstructor = + new Constructor<FileWrapper>() { + public FileWrapper read(final BufferedReader r) { + return new FileWrapper(r); + } + }; + + public class FileWrapper implements Writable { + final String myName; + final long myModificationTime; + + FileWrapper(final File f) { + myName = getRelativePath(f.getAbsolutePath()); + myModificationTime = f.lastModified(); + } + + FileWrapper(final BufferedReader r) { + myName = readString(r); + myModificationTime = 0; // readLong(r); + } + + public String getName() { + return myName; + } + + public long getStamp() { + return myModificationTime; + } + + public void write(final BufferedWriter w) { + writeln(w, getName()); + // writeln(w, Long.toString(getStamp())); + } + + public int compareTo(Object o) { + return getName().compareTo(((FileWrapper) o).getName()); + } + } + + public final Constructor<ModuleWrapper> myModuleWrapperConstructor = + new Constructor<ModuleWrapper>() { + public ModuleWrapper read(final BufferedReader r) { + return new ModuleWrapper(r); + } + }; + + public class ModuleWrapper implements ClasspathItemWrapper { + + private class Properties implements Writable { + + final Set<String> myRoots; + final Set<FileWrapper> mySources; + + final String myOutput; + final Set<FileWrapper> myOutputs; + + final long myLatestSource; + final long myEarliestSource; + + final long myLatestOutput; + final long myEarliestOutput; + + public void write(final BufferedWriter w) { + writeln(w, "Roots:"); + writeln(w, myRoots, null); + + writeln(w, "Sources:"); + writeln(w, mySources); + + writeln(w, "Output:"); + writeln(w, myOutput == null ? "" : myOutput); + + writeln(w, "Outputs:"); + writeln(w, myOutputs); + + //writeln(w, "EarliestSource:"); + //writeln(w, Long.toString(myEarliestSource)); + + //writeln(w, "LatestSource:"); + //writeln(w, Long.toString(myLatestSource)); + + //writeln(w, "EarliestOutput:"); + //writeln(w, Long.toString(myEarliestOutput)); + + //writeln(w, "LatestOutput:"); + //writeln(w, Long.toString(myLatestOutput)); + } + + public Properties(final BufferedReader r) { + readTag(r, "Roots:"); + myRoots = (Set<String>) readMany(r, myStringConstructor, new HashSet<String>()); + + readTag(r, "Sources:"); + mySources = (Set<FileWrapper>) readMany(r, myFileWrapperConstructor, new HashSet<FileWrapper>()); + + readTag(r, "Output:"); + final String s = readString(r); + myOutput = s.equals("") ? null : s; + + readTag(r, "Outputs:"); + myOutputs = (Set<FileWrapper>) readMany(r, myFileWrapperConstructor, new HashSet<FileWrapper>()); + + //readTag(r, "EarliestSource:"); + myEarliestSource = 0;//readLong(r); - public ProjectWrapper(final String prjDir) { + //readTag(r, "LatestSource:"); + myLatestSource = 0;//readLong(r); + + //readTag(r, "EarliestOutput:"); + myEarliestOutput = 0;//readLong(r); + + //readTag(r, "LatestOutput:"); + myLatestOutput = 0;//readLong(r); + } + + public Properties(final List<String> sources, final String output, final Set<String> excludes) { + myRoots = (Set<String>) getRelativePaths(sources, new HashSet<String>()); + + { + final DirectoryScanner.Result result = DirectoryScanner.getFiles(myRoots, excludes, ProjectWrapper.this); + mySources = result.getFiles(); + myEarliestSource = result.getEarliest(); + myLatestSource = result.getLatest(); + } + + { + myOutput = getRelativePath(output); + final DirectoryScanner.Result result = DirectoryScanner.getFiles(myOutput, excludes, ProjectWrapper.this); + myOutputs = result.getFiles(); + myEarliestOutput = result.getEarliest(); + myLatestOutput = result.getLatest(); + } + } + + public Set<String> getRoots() { + return myRoots; + } + + public Set<FileWrapper> getSources() { + return mySources; + } + + public String getOutputPath() { + return myOutput; + } + + public Set<FileWrapper> getOutputs() { + return myOutputs; + } + + public long getEarliestOutput() { + return myEarliestOutput; + } + + public long getLatestOutput() { + return myLatestOutput; + } + + public long getEarliestSource() { + return myEarliestSource; + } + + public long getLatestSource() { + return myLatestSource; + } + + public boolean emptySource() { + return mySources.isEmpty(); + } + + public boolean emptyOutput() { + return myOutputs.isEmpty(); + } + + public boolean isOutdated() { + return (!emptySource() && emptyOutput()) || (getLatestSource() > getEarliestOutput()); + } + + public int compareTo(Object o) { + return 0; + } + } + + final String myName; + final Properties mySource; + final Properties myTest; + + final Set<String> myExcludes; + + final Module myModule; + List<ClasspathItemWrapper> myDependsOn; + + final Set<LibraryWrapper> myLibraries; + + public void write(final BufferedWriter w) { + writeln(w, "Module:" + myName); + + writeln(w, "SourceProperties:"); + mySource.write(w); + + writeln(w, "TestProperties:"); + myTest.write(w); + + writeln(w, "Excludes:"); + writeln(w, myExcludes, null); + + writeln(w, "Libraries:"); + writeln(w, myLibraries); + + writeln(w, "Dependencies:"); + writeln(w, dependsOn()); + } + + public ModuleWrapper(final BufferedReader r) { + myModule = null; + myName = readStringAttribute(r, "Module:"); + + readTag(r, "SourceProperties:"); + mySource = new Properties(r); + + readTag(r, "TestProperties:"); + myTest = new Properties(r); + + readTag(r, "Excludes:"); + myExcludes = (Set<String>) readMany(r, myStringConstructor, new HashSet<String>()); + + readTag(r, "Libraries:"); + myLibraries = (Set<LibraryWrapper>) readMany(r, myLibraryWrapperConstructor, new HashSet<LibraryWrapper>()); + + readTag(r, "Dependencies:"); + myDependsOn = (List<ClasspathItemWrapper>) readMany(r, myClasspathItemWrapperConstructor, new ArrayList<ClasspathItemWrapper>()); + } + + public ModuleWrapper(final Module m) { + m.forceInit(); + myModule = m; + myDependsOn = null; + myName = m.getName(); + myExcludes = (Set<String>) getRelativePaths(m.getExcludes(), new HashSet<String>()); + mySource = new Properties(m.getSourceRoots(), m.getOutputPath(), myExcludes); + myTest = new Properties(m.getTestRoots(), m.getTestOutputPath(), myExcludes); + + myLibraries = new HashSet<LibraryWrapper>(); + + for (Library lib : m.getLibraries().values()) { + myLibraries.add(new LibraryWrapper(lib)); + } + } + + public String getName() { + return myName; + } + + public Set<String> getSourceRoots() { + return mySource.getRoots(); + } + + public Set<FileWrapper> getSourceFiles() { + return mySource.getSources(); + } + + public String getOutputPath() { + return mySource.getOutputPath(); + } + + public Set<String> getTestSourceRoots() { + return myTest.getRoots(); + } + + public Set<FileWrapper> getTestSourceFiles() { + return myTest.getSources(); + } + + public String getTestOutputPath() { + return myTest.getOutputPath(); + } + + public List<ClasspathItemWrapper> dependsOn() { + if (myDependsOn != null) + return myDependsOn; + + myDependsOn = new ArrayList<ClasspathItemWrapper>(); + + for (Module.ModuleDependency dep : myModule.getDependencies()) { + final ClasspathItem cpi = dep.getItem(); + + if (cpi instanceof Module) { + myDependsOn.add(getModule(((Module) cpi).getName())); + } else if (cpi instanceof Library) { + myDependsOn.add(new LibraryWrapper((Library) cpi)); + } else { + myDependsOn.add(new GenericClasspathItemWrapper(cpi)); + } + } + + return myDependsOn; + } + + public List<String> getClassPath(final ClasspathKind kind) { + final List<String> result = new ArrayList<String>(); + + result.add(getOutputPath()); + + if (kind.isTestsIncluded()) { + result.add(getTestOutputPath()); + } + + return result; + } + + public boolean isOutdated(final boolean tests) { + return mySource.isOutdated() || (tests && myTest.isOutdated()); + } + + public int compareTo(Object o) { + return getName().compareTo(((ModuleWrapper) o).getName()); + } + } + + final Map<String, ModuleWrapper> myModules = new HashMap<String, ModuleWrapper>(); + final Map<String, LibraryWrapper> myLibraries = new HashMap<String, LibraryWrapper>(); + final ProjectWrapper myHistory; + + public ModuleWrapper getModule(final String name) { + return myModules.get(name); + } + + public LibraryWrapper getLibrary(final String name) { + return myLibraries.get(name); + } + + public Collection<LibraryWrapper> getLibraries() { + return myLibraries.values(); + } + + public Collection<ModuleWrapper> getModules() { + return myModules.values(); + } + + private ProjectWrapper(final String prjDir) { myProject = new Project(new GantBinding()); - myRoot = new File (prjDir).getAbsolutePath(); + myRoot = new File(prjDir).getAbsolutePath(); myProjectSnapshot = myHomeDir + File.separator + myJPSDir + File.separator + myRoot.replace(File.separatorChar, myFileSeparatorReplacement); + + IdeaProjectLoader.loadFromPath(myProject, getAbsolutePath(myIDEADir)); + + for (Module m : myProject.getModules().values()) { + myModules.put(m.getName(), new ModuleWrapper(m)); + } + + for (Library l : myProject.getLibraries().values()) { + myLibraries.put(l.getName(), new LibraryWrapper(l)); + } + + myHistory = loadSnapshot(); } - private String getProjectSnapshotFileName() { - return myProjectSnapshot; + public String getAbsolutePath(final String relative) { + if (relative == null) + return relative; + + if (new File(relative).isAbsolute()) + return relative; + + return myRoot + File.separator + relative; } - private ProjectSnapshot loadSnapshot() { - initJPSDirectory(); + public String getRelativePath(final String absolute) { + if (absolute == null) + return absolute; - ProjectSnapshot result = null; + if (absolute.startsWith(myRoot)) { + return absolute.substring(myRoot.length() + 1); + } - try { - final String path = getProjectSnapshotFileName(); + return absolute; + } - byte[] buffer = new byte[(int) new File(path).length()]; + public Collection<String> getAbsolutePaths(final Collection<String> paths, final Collection<String> result) { + for (String path : paths) { + if (path != null) + result.add(getAbsolutePath(path)); + } - BufferedInputStream f = new BufferedInputStream(new FileInputStream(path)); + return result; + } - f.read(buffer); + public Collection<String> getRelativePaths(final Collection<String> paths, final Collection<String> result) { + for (String path : paths) { + if (path != null) + result.add(getRelativePath(path)); + } + + return result; + } + + private boolean isHistory() { + return myProject == null; + } + + private ProjectWrapper(final BufferedReader r) { + myProject = null; + myHistory = null; - f.close(); + myRoot = readStringAttribute(r, "Root:"); + myProjectSnapshot = myHomeDir + File.separator + myJPSDir + File.separator + myRoot.replace(File.separatorChar, myFileSeparatorReplacement); - result = new ProjectSnapshot(new String(buffer)); + readTag(r, "Libraries:"); + final Set<LibraryWrapper> libs = (Set<LibraryWrapper>) readMany(r, myLibraryWrapperConstructor, new HashSet<LibraryWrapper>()); + + for (LibraryWrapper l : libs) { + myLibraries.put(l.getName(), l); } - catch (FileNotFoundException e) { + readTag(r, "Modules:"); + final Set<ModuleWrapper> mods = (Set<ModuleWrapper>) readMany(r, myModuleWrapperConstructor, new HashSet<ModuleWrapper>()); + + for (ModuleWrapper m : mods) { + myModules.put(m.getName(), m); } - catch (IOException e) { + } + + public void write(final BufferedWriter w) { + writeln(w, "Root:" + myRoot); + + writeln(w, "Libraries:"); + writeln(w, getLibraries()); + + writeln(w, "Modules:"); + writeln(w, getModules()); + } + + private String getProjectSnapshotFileName() { + return myProjectSnapshot; + } + + private ProjectWrapper loadSnapshot() { + initJPSDirectory(); + + try { + final BufferedReader r = new BufferedReader(new FileReader(getProjectSnapshotFileName())); + final ProjectWrapper w = new ProjectWrapper(r); + r.close(); + + return w; + } catch (FileNotFoundException e) { + } catch (IOException e) { e.printStackTrace(); } - return result; + return null; } private void saveSnapshot() { initJPSDirectory(); - final ProjectSnapshot snapshot = StatusCollector.collectHistory(myProject); - try { BufferedWriter bw = new BufferedWriter(new FileWriter(getProjectSnapshotFileName())); - bw.write(snapshot.toString()); + write(bw); bw.close(); } catch (IOException e) { @@ -102,14 +803,12 @@ private void saveSnapshot() { } } - public void load() { - IdeaProjectLoader.loadFromPath(myProject, myRoot); - mySnapshot = loadSnapshot(); - myPresent = StatusCollector.collectHistory(myProject); + public static ProjectWrapper load(final String path) { + return new ProjectWrapper(path); } public void report(final String module) { - final ModuleStatus m = myPresent.myModuleHistories.get(module); + final ModuleWrapper m = getModule(module); if (m == null) { System.out.println("No module \"" + module + "\" found in project \""); @@ -120,10 +819,36 @@ public void report(final String module) { } private boolean structureChanged() { - if (mySnapshot == null) + if (myHistory == null) return true; - return myPresent.structureChanged(mySnapshot); + try { + final StringWriter my = new StringWriter(); + final StringWriter history = new StringWriter(); + + myHistory.write(new BufferedWriter(my)); + write(new BufferedWriter(history)); + + my.close(); + history.close(); + + final String myString = my.getBuffer().toString(); + final String hisString = history.getBuffer().toString(); + + FileWriter f1 = new FileWriter("/home/db/tmp/1.jps"); + FileWriter f2 = new FileWriter("/home/db/tmp/2.jps"); + + f1.write(myString); + f2.write(hisString); + + f1.close(); + f2.close(); + + return !myString.equals(hisString); + } catch (IOException e) { + e.printStackTrace(); + return true; + } } public void report() { @@ -131,7 +856,7 @@ public void report() { System.out.println("Project \"" + myRoot + "\" report:"); - if (mySnapshot == null) { + if (myHistory == null) { System.out.println(" no project history found"); } else { if (structureChanged()) { @@ -141,9 +866,9 @@ public void report() { } if (moduleReport) { - for (ModuleStatus mh : myPresent.myModuleHistories.values()) { - System.out.println(" module " + mh.myName + " " + (mh.isOutdated(false) ? "is outdated" : "is up-to-date")); - System.out.println(" module " + mh.myName + " tests " + (mh.isOutdated(true) ? "are outdated" : "are up-to-date")); + for (ModuleWrapper m : myModules.values()) { + System.out.println(" module " + m.getName() + " " + (m.isOutdated(false) ? "is outdated" : "is up-to-date")); + System.out.println(" module " + m.getName() + " tests " + (m.isOutdated(true) ? "are outdated" : "are up-to-date")); } } } @@ -169,7 +894,7 @@ public void make(final boolean force, final boolean tests) { final List<Module> modules = new ArrayList<Module>(); - for (Map.Entry<String, ModuleStatus> entry : myPresent.myModuleHistories.entrySet()) { + for (Map.Entry<String, ModuleWrapper> entry : myModules.entrySet()) { if (entry.getValue().isOutdated(tests)) modules.add(myProject.getModules().get(entry.getKey())); } @@ -189,7 +914,7 @@ public void make(final boolean force, final boolean tests) { private void makeModules(final List<Module> initial, final boolean tests) { final Set<Module> modules = new HashSet<Module>(); - final Map<Module, Set<Module>> reversedDependencies = new HashMap<Module, Set<Module>> (); + final Map<Module, Set<Module>> reversedDependencies = new HashMap<Module, Set<Module>>(); for (Module m : myProject.getModules().values()) { for (Module.ModuleDependency mdep : m.getDependencies()) { @@ -199,7 +924,7 @@ private void makeModules(final List<Module> initial, final boolean tests) { Set<Module> sm = reversedDependencies.get(cpi); if (sm == null) { - sm = new HashSet<Module> (); + sm = new HashSet<Module>(); reversedDependencies.put((Module) cpi, sm); } @@ -244,7 +969,7 @@ public void makeModule(final String modName, final boolean force, final boolean return; } - final ModuleStatus h = myPresent.myModuleHistories.get(modName); + final ModuleWrapper h = getModule(modName); if (h != null && !h.isOutdated(tests) && !force) { System.out.println("Module \"" + modName + "\" in project \"" + myRoot + "\" is up-to-date."); return; diff --git a/jps/src/org/jetbrains/ether/StatusCollector.java b/jps/src/org/jetbrains/ether/StatusCollector.java deleted file mode 100644 index 9d231313ba539..0000000000000 --- a/jps/src/org/jetbrains/ether/StatusCollector.java +++ /dev/null @@ -1,186 +0,0 @@ -package org.jetbrains.ether; - -import com.sun.tools.javac.util.Pair; -import org.jetbrains.jps.*; -import org.jetbrains.jps.resolvers.PathEntry; - -import java.util.*; - -/** - * Created by IntelliJ IDEA. - * User: db - * Date: 18.11.10 - * Time: 19:57 - * To change this template use File | Settings | File Templates. - */ -public class StatusCollector { - private static Pair<Long, Long> myDefaultPair = new Pair<Long, Long> (Long.MAX_VALUE, 0l); - - private static Pair<Long, Long> join (final Pair<Long, Long> a, final Pair<Long, Long> b) { - if (a == null) - return b; - - if (b == null) - return a; - - return new Pair<Long, Long> (Math.min(a.fst, b.fst), Math.max(a.snd, b.snd)); - } - - private static Comparator<Library> myLibraryComparator = new Comparator<Library>() { - public int compare (Library a, Library b) { - return a.getName().compareTo(b.getName()); - } - }; - - private static Comparator<Module> myModuleComparator = new Comparator<Module>() { - public int compare (Module a, Module b) { - return a.getName().compareTo(b.getName()); - } - }; - - private static <T> List<T> prepare (final Collection<T> coll, final Comparator<T> comp) { - List<T> list = new ArrayList<T> (); - - for (T elem : coll) { - if (elem != null) { - list.add(elem); - } - } - - Collections.sort(list, comp); - - return list; - } - - private static <T extends Comparable<? super T>> List<T> prepare (final Collection<T> coll) { - return prepare(coll, new Comparator<T> () { - public int compare (T a, T b) { - return a.compareTo(b); - } - }); - } - - private static void listToBuffer (StringBuffer buf, final List list) { - for (Object o : prepare (list)) { - if (o instanceof String) { - buf.append(o + "\n"); - } - else { - buf.append("*** <" + o.getClass().getName() + "> is not String ***\n"); - } - } - } - - private static void namedListToBuffer (StringBuffer buf, final String name, final List list) { - buf.append(name + ":\n"); - listToBuffer(buf, list); - } - - private static Pair<Long, Long> directoryToBuffer (StringBuffer buf, final String dir, final List<String> excludes) { - if (dir != null) { - final DirectoryScanner.Result result = DirectoryScanner.getFiles(dir, excludes); - - for (String name : prepare (result.myFiles)) { - buf.append(name + "\n"); - } - - return new Pair<Long, Long> (result.myEarliest, result.myLatest); - } - - return myDefaultPair; - } - - private static Pair<Long, Long> sourceRootToBuffer (StringBuffer buf, final String name, final List<String> dir, final List<String> excludes) { - Pair<Long, Long> result = myDefaultPair; - - buf.append(name + ":\n"); - - for (String d : prepare (dir)) { - if (dir != null) { - buf.append(d + ":\n"); - result = join (result, directoryToBuffer(buf, d, excludes)); - } - } - - return result; - } - - private static void classPathItemToBuffer (StringBuffer buf, final ClasspathItem cpi, boolean all) { - final ClasspathKind[] allKinds = {ClasspathKind.PRODUCTION_COMPILE, ClasspathKind.PRODUCTION_RUNTIME, ClasspathKind.TEST_COMPILE, ClasspathKind.TEST_RUNTIME}; - final ClasspathKind[] oneKind = {ClasspathKind.PRODUCTION_COMPILE}; - final ClasspathKind[] kinds = all ? allKinds : oneKind; - - for (int i=0; i<kinds.length; i++) { - final ClasspathKind kind = kinds[i]; - final String name = kind.name(); - - namedListToBuffer(buf, "classpath" + (all ? " (" + name + ")" : ""), cpi.getClasspathRoots(kind)); - } - } - - public static void libraryToBuffer (StringBuffer buf, final Library library) { - library.forceInit(); - buf.append ("Library: " + library.getName() + "\n"); - classPathItemToBuffer(buf, library, false); - } - - public static ModuleStatus moduleToBuffer (StringBuffer buf, final Module module) { - buf.append("Module: " + module.getName() + "\n"); - - classPathItemToBuffer(buf, module, true); - namedListToBuffer(buf, "Excludes", module.getExcludes()); - - buf.append("Libraries:\n"); - for (Library lib : prepare (module.getLibraries().values(), myLibraryComparator)) { - libraryToBuffer(buf, lib); - } - - long ss = sourceRootToBuffer(buf, "SourceRoots", module.getSourceRoots(), module.getExcludes()).snd; - buf.append("OutputPath: " + module.getOutputPath() + "\n"); - long os = directoryToBuffer(buf, module.getOutputPath(), null).fst; - - long tss = sourceRootToBuffer(buf, "TestRoots", module.getTestRoots(), module.getExcludes()).snd; - buf.append("TestOutputPath: " + module.getTestOutputPath() + "\n"); - long tos = directoryToBuffer(buf, module.getTestOutputPath(), null).fst; - - buf.append("Dependencies:\n"); - for (Module.ModuleDependency dep : module.getDependencies()){ - final ClasspathItem item = dep.getItem(); - if (item instanceof Module) { - buf.append("module " + ((Module) item).getName() + "\n"); - } - else if (item instanceof Library) { - buf.append("library " + ((Library) item).getName() + "\n"); - } - else if (item instanceof JavaSdk) { - buf.append("javaSdk " + ((JavaSdk) item).getName() + "\n"); - } - else if (item instanceof Sdk) { - buf.append("Sdk " + ((Sdk) item).getName() + "\n"); - } - else if (item instanceof PathEntry) { - buf.append("pathEntry " + ((PathEntry) item).getPath() + "\n"); - } - else { - buf.append("unknown ClasspathItem implementation in dependencies: <" + item.getClass().getName() + ">\n"); - } - } - - return new ModuleStatus(module.getName(), ss, os, tss, tos); - } - - public static ProjectSnapshot collectHistory (final Project prj) { - StringBuffer buf = new StringBuffer(); - Map<String, ModuleStatus> moduleHistories = new HashMap<String, ModuleStatus> (); - - for (Library lib : prepare (prj.getLibraries().values(), myLibraryComparator)) { - libraryToBuffer(buf, lib); - } - - for (Module mod : prepare (prj.getModules().values(), myModuleComparator)) { - moduleHistories.put(mod.getName(), moduleToBuffer(buf, mod)); - } - - return new ProjectSnapshot(buf.toString(), moduleHistories); - } -}
603451ed0b64de2aa5c4251d669056701ba788b9
drools
JBRULES-130--git-svn-id: https://svn.jboss.org/repos/labs/trunk/labs/jbossrules@3283 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
c
https://github.com/kiegroup/drools
diff --git a/drools-compiler/src/main/java/org/drools/lang/RuleParser.java b/drools-compiler/src/main/java/org/drools/lang/RuleParser.java index 424a8b81d9c..4ace9c9518c 100644 --- a/drools-compiler/src/main/java/org/drools/lang/RuleParser.java +++ b/drools-compiler/src/main/java/org/drools/lang/RuleParser.java @@ -1,4 +1,4 @@ -// $ANTLR 3.0ea8 C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g 2006-03-28 11:19:23 +// $ANTLR 3.0ea8 C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g 2006-03-28 14:38:45 package org.drools.lang; import java.util.List; @@ -15,7 +15,7 @@ public class RuleParser extends Parser { public static final String[] tokenNames = new String[] { - "<invalid>", "<EOR>", "<DOWN>", "<UP>", "EOL", "ID", "INT", "BOOL", "STRING", "FLOAT", "MISC", "WS", "SH_STYLE_SINGLE_LINE_COMMENT", "C_STYLE_SINGLE_LINE_COMMENT", "MULTI_LINE_COMMENT", "\'package\'", "\';\'", "\'import\'", "\'expander\'", "\'global\'", "\'function\'", "\'(\'", "\',\'", "\')\'", "\'{\'", "\'}\'", "\'query\'", "\'end\'", "\'rule\'", "\'when\'", "\':\'", "\'then\'", "\'attributes\'", "\'salience\'", "\'no-loop\'", "\'xor-group\'", "\'agenda-group\'", "\'duration\'", "\'or\'", "\'==\'", "\'>\'", "\'>=\'", "\'<\'", "\'<=\'", "\'!=\'", "\'contains\'", "\'matches\'", "\'->\'", "\'||\'", "\'and\'", "\'&&\'", "\'exists\'", "\'not\'", "\'eval\'", "\'.\'", "\'use\'" + "<invalid>", "<EOR>", "<DOWN>", "<UP>", "EOL", "ID", "INT", "BOOL", "STRING", "FLOAT", "MISC", "WS", "SH_STYLE_SINGLE_LINE_COMMENT", "C_STYLE_SINGLE_LINE_COMMENT", "MULTI_LINE_COMMENT", "\'package\'", "\';\'", "\'import\'", "\'expander\'", "\'global\'", "\'function\'", "\'(\'", "\',\'", "\')\'", "\'{\'", "\'}\'", "\'query\'", "\'end\'", "\'rule\'", "\'when\'", "\':\'", "\'then\'", "\'attributes\'", "\'salience\'", "\'no-loop\'", "\'auto-focus\'", "\'xor-group\'", "\'agenda-group\'", "\'duration\'", "\'or\'", "\'==\'", "\'>\'", "\'>=\'", "\'<\'", "\'<=\'", "\'!=\'", "\'contains\'", "\'matches\'", "\'->\'", "\'||\'", "\'and\'", "\'&&\'", "\'exists\'", "\'not\'", "\'eval\'", "\'.\'", "\'use\'" }; public static final int BOOL=7; public static final int INT=6; @@ -1006,8 +1006,9 @@ else if ( true ) { case 48: case 49: case 50: - case 54: + case 51: case 55: + case 56: alt18=1; break; case 27: @@ -1025,7 +1026,7 @@ else if ( true ) { throw nvae; } break; - case 51: + case 52: int LA18_4 = input.LA(2); if ( expander != null ) { alt18=1; @@ -1040,7 +1041,7 @@ else if ( true ) { throw nvae; } break; - case 52: + case 53: int LA18_5 = input.LA(2); if ( expander != null ) { alt18=1; @@ -1055,7 +1056,7 @@ else if ( true ) { throw nvae; } break; - case 53: + case 54: int LA18_6 = input.LA(2); if ( expander != null ) { alt18=1; @@ -1188,6 +1189,7 @@ public RuleDescr rule() throws RecognitionException { case 35: case 36: case 37: + case 38: alt19=1; break; case 29: @@ -1260,7 +1262,7 @@ else if ( expander != null ) { throw nvae; } } - else if ( (LA20_0>=EOL && LA20_0<=29)||(LA20_0>=31 && LA20_0<=55) ) { + else if ( (LA20_0>=EOL && LA20_0<=29)||(LA20_0>=31 && LA20_0<=56) ) { alt20=2; } else { @@ -1350,8 +1352,9 @@ else if ( true ) { case 48: case 49: case 50: - case 54: + case 51: case 55: + case 56: alt21=1; break; case 31: @@ -1369,7 +1372,7 @@ else if ( true ) { throw nvae; } break; - case 51: + case 52: int LA21_4 = input.LA(2); if ( expander != null ) { alt21=1; @@ -1384,7 +1387,7 @@ else if ( true ) { throw nvae; } break; - case 52: + case 53: int LA21_5 = input.LA(2); if ( expander != null ) { alt21=1; @@ -1399,7 +1402,7 @@ else if ( true ) { throw nvae; } break; - case 53: + case 54: int LA21_6 = input.LA(2); if ( expander != null ) { alt21=1; @@ -1476,7 +1479,7 @@ else if ( true ) { if ( LA23_0==30 ) { alt23=1; } - else if ( (LA23_0>=EOL && LA23_0<=29)||(LA23_0>=31 && LA23_0<=55) ) { + else if ( (LA23_0>=EOL && LA23_0<=29)||(LA23_0>=31 && LA23_0<=56) ) { alt23=2; } else { @@ -1508,7 +1511,7 @@ else if ( (LA23_0>=EOL && LA23_0<=29)||(LA23_0>=31 && LA23_0<=55) ) { if ( LA24_0==27 ) { alt24=2; } - else if ( (LA24_0>=EOL && LA24_0<=26)||(LA24_0>=28 && LA24_0<=55) ) { + else if ( (LA24_0>=EOL && LA24_0<=26)||(LA24_0>=28 && LA24_0<=56) ) { alt24=1; } @@ -1575,7 +1578,7 @@ public void rule_attributes(RuleDescr rule) throws RecognitionException { if ( LA25_0==32 ) { alt25=1; } - else if ( LA25_0==EOL||LA25_0==22||(LA25_0>=29 && LA25_0<=31)||(LA25_0>=33 && LA25_0<=37) ) { + else if ( LA25_0==EOL||LA25_0==22||(LA25_0>=29 && LA25_0<=31)||(LA25_0>=33 && LA25_0<=38) ) { alt25=2; } else { @@ -1601,7 +1604,7 @@ else if ( LA25_0==EOL||LA25_0==22||(LA25_0>=29 && LA25_0<=31)||(LA25_0>=33 && LA if ( LA26_0==30 ) { alt26=1; } - else if ( LA26_0==EOL||LA26_0==22||LA26_0==29||LA26_0==31||(LA26_0>=33 && LA26_0<=37) ) { + else if ( LA26_0==EOL||LA26_0==22||LA26_0==29||LA26_0==31||(LA26_0>=33 && LA26_0<=38) ) { alt26=2; } else { @@ -1630,7 +1633,7 @@ else if ( LA26_0==EOL||LA26_0==22||LA26_0==29||LA26_0==31||(LA26_0>=33 && LA26_0 do { int alt28=2; int LA28_0 = input.LA(1); - if ( LA28_0==22||(LA28_0>=33 && LA28_0<=37) ) { + if ( LA28_0==22||(LA28_0>=33 && LA28_0<=38) ) { alt28=1; } @@ -1645,7 +1648,7 @@ else if ( LA26_0==EOL||LA26_0==22||LA26_0==29||LA26_0==31||(LA26_0>=33 && LA26_0 if ( LA27_0==22 ) { alt27=1; } - else if ( (LA27_0>=33 && LA27_0<=37) ) { + else if ( (LA27_0>=33 && LA27_0<=38) ) { alt27=2; } else { @@ -1701,7 +1704,7 @@ else if ( (LA27_0>=33 && LA27_0<=37) ) { // $ANTLR start rule_attribute - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:265:1: rule_attribute returns [AttributeDescr d] : (a= salience | a= no_loop | a= agenda_group | a= duration | a= xor_group ); + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:265:1: rule_attribute returns [AttributeDescr d] : (a= salience | a= no_loop | a= agenda_group | a= duration | a= xor_group | a= auto_focus ); public AttributeDescr rule_attribute() throws RecognitionException { AttributeDescr d; AttributeDescr a = null; @@ -1711,8 +1714,8 @@ public AttributeDescr rule_attribute() throws RecognitionException { d = null; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:270:25: (a= salience | a= no_loop | a= agenda_group | a= duration | a= xor_group ) - int alt29=5; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:270:25: (a= salience | a= no_loop | a= agenda_group | a= duration | a= xor_group | a= auto_focus ) + int alt29=6; switch ( input.LA(1) ) { case 33: alt29=1; @@ -1720,18 +1723,21 @@ public AttributeDescr rule_attribute() throws RecognitionException { case 34: alt29=2; break; - case 36: + case 37: alt29=3; break; - case 37: + case 38: alt29=4; break; - case 35: + case 36: alt29=5; break; + case 35: + alt29=6; + break; default: NoViableAltException nvae = - new NoViableAltException("265:1: rule_attribute returns [AttributeDescr d] : (a= salience | a= no_loop | a= agenda_group | a= duration | a= xor_group );", 29, 0, input); + new NoViableAltException("265:1: rule_attribute returns [AttributeDescr d] : (a= salience | a= no_loop | a= agenda_group | a= duration | a= xor_group | a= auto_focus );", 29, 0, input); throw nvae; } @@ -1792,6 +1798,17 @@ public AttributeDescr rule_attribute() throws RecognitionException { } break; + case 6 : + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:275:25: a= auto_focus + { + following.push(FOLLOW_auto_focus_in_rule_attribute812); + a=auto_focus(); + following.pop(); + + d = a; + + } + break; } } @@ -1807,7 +1824,7 @@ public AttributeDescr rule_attribute() throws RecognitionException { // $ANTLR start salience - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:278:1: salience returns [AttributeDescr d ] : loc= 'salience' opt_eol i= INT ( ';' )? opt_eol ; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:279:1: salience returns [AttributeDescr d ] : loc= 'salience' opt_eol i= INT ( ';' )? opt_eol ; public AttributeDescr salience() throws RecognitionException { AttributeDescr d; Token loc=null; @@ -1817,44 +1834,44 @@ public AttributeDescr salience() throws RecognitionException { d = null; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:283:17: (loc= 'salience' opt_eol i= INT ( ';' )? opt_eol ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:283:17: loc= 'salience' opt_eol i= INT ( ';' )? opt_eol + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:284:17: (loc= 'salience' opt_eol i= INT ( ';' )? opt_eol ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:284:17: loc= 'salience' opt_eol i= INT ( ';' )? opt_eol { loc=(Token)input.LT(1); - match(input,33,FOLLOW_33_in_salience834); - following.push(FOLLOW_opt_eol_in_salience836); + match(input,33,FOLLOW_33_in_salience845); + following.push(FOLLOW_opt_eol_in_salience847); opt_eol(); following.pop(); i=(Token)input.LT(1); - match(input,INT,FOLLOW_INT_in_salience840); - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:283:46: ( ';' )? + match(input,INT,FOLLOW_INT_in_salience851); + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:284:46: ( ';' )? int alt30=2; int LA30_0 = input.LA(1); if ( LA30_0==16 ) { alt30=1; } - else if ( LA30_0==EOL||LA30_0==22||LA30_0==29||LA30_0==31||(LA30_0>=33 && LA30_0<=37) ) { + else if ( LA30_0==EOL||LA30_0==22||LA30_0==29||LA30_0==31||(LA30_0>=33 && LA30_0<=38) ) { alt30=2; } else { NoViableAltException nvae = - new NoViableAltException("283:46: ( \';\' )?", 30, 0, input); + new NoViableAltException("284:46: ( \';\' )?", 30, 0, input); throw nvae; } switch (alt30) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:283:46: ';' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:284:46: ';' { - match(input,16,FOLLOW_16_in_salience842); + match(input,16,FOLLOW_16_in_salience853); } break; } - following.push(FOLLOW_opt_eol_in_salience845); + following.push(FOLLOW_opt_eol_in_salience856); opt_eol(); following.pop(); @@ -1878,7 +1895,7 @@ else if ( LA30_0==EOL||LA30_0==22||LA30_0==29||LA30_0==31||(LA30_0>=33 && LA30_0 // $ANTLR start no_loop - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:290:1: no_loop returns [AttributeDescr d] : ( (loc= 'no-loop' opt_eol ( ';' )? opt_eol ) | (loc= 'no-loop' t= BOOL opt_eol ( ';' )? opt_eol ) ); + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:291:1: no_loop returns [AttributeDescr d] : ( (loc= 'no-loop' opt_eol ( ';' )? opt_eol ) | (loc= 'no-loop' t= BOOL opt_eol ( ';' )? opt_eol ) ); public AttributeDescr no_loop() throws RecognitionException { AttributeDescr d; Token loc=null; @@ -1888,7 +1905,7 @@ public AttributeDescr no_loop() throws RecognitionException { d = null; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:295:17: ( (loc= 'no-loop' opt_eol ( ';' )? opt_eol ) | (loc= 'no-loop' t= BOOL opt_eol ( ';' )? opt_eol ) ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:296:17: ( (loc= 'no-loop' opt_eol ( ';' )? opt_eol ) | (loc= 'no-loop' t= BOOL opt_eol ( ';' )? opt_eol ) ) int alt33=2; int LA33_0 = input.LA(1); if ( LA33_0==34 ) { @@ -1896,62 +1913,62 @@ public AttributeDescr no_loop() throws RecognitionException { if ( LA33_1==BOOL ) { alt33=2; } - else if ( LA33_1==EOL||LA33_1==16||LA33_1==22||LA33_1==29||LA33_1==31||(LA33_1>=33 && LA33_1<=37) ) { + else if ( LA33_1==EOL||LA33_1==16||LA33_1==22||LA33_1==29||LA33_1==31||(LA33_1>=33 && LA33_1<=38) ) { alt33=1; } else { NoViableAltException nvae = - new NoViableAltException("290:1: no_loop returns [AttributeDescr d] : ( (loc= \'no-loop\' opt_eol ( \';\' )? opt_eol ) | (loc= \'no-loop\' t= BOOL opt_eol ( \';\' )? opt_eol ) );", 33, 1, input); + new NoViableAltException("291:1: no_loop returns [AttributeDescr d] : ( (loc= \'no-loop\' opt_eol ( \';\' )? opt_eol ) | (loc= \'no-loop\' t= BOOL opt_eol ( \';\' )? opt_eol ) );", 33, 1, input); throw nvae; } } else { NoViableAltException nvae = - new NoViableAltException("290:1: no_loop returns [AttributeDescr d] : ( (loc= \'no-loop\' opt_eol ( \';\' )? opt_eol ) | (loc= \'no-loop\' t= BOOL opt_eol ( \';\' )? opt_eol ) );", 33, 0, input); + new NoViableAltException("291:1: no_loop returns [AttributeDescr d] : ( (loc= \'no-loop\' opt_eol ( \';\' )? opt_eol ) | (loc= \'no-loop\' t= BOOL opt_eol ( \';\' )? opt_eol ) );", 33, 0, input); throw nvae; } switch (alt33) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:295:17: (loc= 'no-loop' opt_eol ( ';' )? opt_eol ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:296:17: (loc= 'no-loop' opt_eol ( ';' )? opt_eol ) { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:295:17: (loc= 'no-loop' opt_eol ( ';' )? opt_eol ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:296:25: loc= 'no-loop' opt_eol ( ';' )? opt_eol + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:296:17: (loc= 'no-loop' opt_eol ( ';' )? opt_eol ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:297:25: loc= 'no-loop' opt_eol ( ';' )? opt_eol { loc=(Token)input.LT(1); - match(input,34,FOLLOW_34_in_no_loop880); - following.push(FOLLOW_opt_eol_in_no_loop882); + match(input,34,FOLLOW_34_in_no_loop891); + following.push(FOLLOW_opt_eol_in_no_loop893); opt_eol(); following.pop(); - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:296:47: ( ';' )? + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:297:47: ( ';' )? int alt31=2; int LA31_0 = input.LA(1); if ( LA31_0==16 ) { alt31=1; } - else if ( LA31_0==EOL||LA31_0==22||LA31_0==29||LA31_0==31||(LA31_0>=33 && LA31_0<=37) ) { + else if ( LA31_0==EOL||LA31_0==22||LA31_0==29||LA31_0==31||(LA31_0>=33 && LA31_0<=38) ) { alt31=2; } else { NoViableAltException nvae = - new NoViableAltException("296:47: ( \';\' )?", 31, 0, input); + new NoViableAltException("297:47: ( \';\' )?", 31, 0, input); throw nvae; } switch (alt31) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:296:47: ';' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:297:47: ';' { - match(input,16,FOLLOW_16_in_no_loop884); + match(input,16,FOLLOW_16_in_no_loop895); } break; } - following.push(FOLLOW_opt_eol_in_no_loop887); + following.push(FOLLOW_opt_eol_in_no_loop898); opt_eol(); following.pop(); @@ -1966,46 +1983,46 @@ else if ( LA31_0==EOL||LA31_0==22||LA31_0==29||LA31_0==31||(LA31_0>=33 && LA31_0 } break; case 2 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:303:17: (loc= 'no-loop' t= BOOL opt_eol ( ';' )? opt_eol ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:304:17: (loc= 'no-loop' t= BOOL opt_eol ( ';' )? opt_eol ) { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:303:17: (loc= 'no-loop' t= BOOL opt_eol ( ';' )? opt_eol ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:304:25: loc= 'no-loop' t= BOOL opt_eol ( ';' )? opt_eol + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:304:17: (loc= 'no-loop' t= BOOL opt_eol ( ';' )? opt_eol ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:305:25: loc= 'no-loop' t= BOOL opt_eol ( ';' )? opt_eol { loc=(Token)input.LT(1); - match(input,34,FOLLOW_34_in_no_loop912); + match(input,34,FOLLOW_34_in_no_loop923); t=(Token)input.LT(1); - match(input,BOOL,FOLLOW_BOOL_in_no_loop916); - following.push(FOLLOW_opt_eol_in_no_loop918); + match(input,BOOL,FOLLOW_BOOL_in_no_loop927); + following.push(FOLLOW_opt_eol_in_no_loop929); opt_eol(); following.pop(); - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:304:54: ( ';' )? + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:305:54: ( ';' )? int alt32=2; int LA32_0 = input.LA(1); if ( LA32_0==16 ) { alt32=1; } - else if ( LA32_0==EOL||LA32_0==22||LA32_0==29||LA32_0==31||(LA32_0>=33 && LA32_0<=37) ) { + else if ( LA32_0==EOL||LA32_0==22||LA32_0==29||LA32_0==31||(LA32_0>=33 && LA32_0<=38) ) { alt32=2; } else { NoViableAltException nvae = - new NoViableAltException("304:54: ( \';\' )?", 32, 0, input); + new NoViableAltException("305:54: ( \';\' )?", 32, 0, input); throw nvae; } switch (alt32) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:304:54: ';' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:305:54: ';' { - match(input,16,FOLLOW_16_in_no_loop920); + match(input,16,FOLLOW_16_in_no_loop931); } break; } - following.push(FOLLOW_opt_eol_in_no_loop923); + following.push(FOLLOW_opt_eol_in_no_loop934); opt_eol(); following.pop(); @@ -2033,8 +2050,164 @@ else if ( LA32_0==EOL||LA32_0==22||LA32_0==29||LA32_0==31||(LA32_0>=33 && LA32_0 // $ANTLR end no_loop + // $ANTLR start auto_focus + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:315:1: auto_focus returns [AttributeDescr d] : ( (loc= 'auto-focus' opt_eol ( ';' )? opt_eol ) | (loc= 'auto-focus' t= BOOL opt_eol ( ';' )? opt_eol ) ); + public AttributeDescr auto_focus() throws RecognitionException { + AttributeDescr d; + Token loc=null; + Token t=null; + + + d = null; + + try { + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:320:17: ( (loc= 'auto-focus' opt_eol ( ';' )? opt_eol ) | (loc= 'auto-focus' t= BOOL opt_eol ( ';' )? opt_eol ) ) + int alt36=2; + int LA36_0 = input.LA(1); + if ( LA36_0==35 ) { + int LA36_1 = input.LA(2); + if ( LA36_1==BOOL ) { + alt36=2; + } + else if ( LA36_1==EOL||LA36_1==16||LA36_1==22||LA36_1==29||LA36_1==31||(LA36_1>=33 && LA36_1<=38) ) { + alt36=1; + } + else { + NoViableAltException nvae = + new NoViableAltException("315:1: auto_focus returns [AttributeDescr d] : ( (loc= \'auto-focus\' opt_eol ( \';\' )? opt_eol ) | (loc= \'auto-focus\' t= BOOL opt_eol ( \';\' )? opt_eol ) );", 36, 1, input); + + throw nvae; + } + } + else { + NoViableAltException nvae = + new NoViableAltException("315:1: auto_focus returns [AttributeDescr d] : ( (loc= \'auto-focus\' opt_eol ( \';\' )? opt_eol ) | (loc= \'auto-focus\' t= BOOL opt_eol ( \';\' )? opt_eol ) );", 36, 0, input); + + throw nvae; + } + switch (alt36) { + case 1 : + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:320:17: (loc= 'auto-focus' opt_eol ( ';' )? opt_eol ) + { + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:320:17: (loc= 'auto-focus' opt_eol ( ';' )? opt_eol ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:321:25: loc= 'auto-focus' opt_eol ( ';' )? opt_eol + { + loc=(Token)input.LT(1); + match(input,35,FOLLOW_35_in_auto_focus980); + following.push(FOLLOW_opt_eol_in_auto_focus982); + opt_eol(); + following.pop(); + + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:321:50: ( ';' )? + int alt34=2; + int LA34_0 = input.LA(1); + if ( LA34_0==16 ) { + alt34=1; + } + else if ( LA34_0==EOL||LA34_0==22||LA34_0==29||LA34_0==31||(LA34_0>=33 && LA34_0<=38) ) { + alt34=2; + } + else { + NoViableAltException nvae = + new NoViableAltException("321:50: ( \';\' )?", 34, 0, input); + + throw nvae; + } + switch (alt34) { + case 1 : + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:321:50: ';' + { + match(input,16,FOLLOW_16_in_auto_focus984); + + } + break; + + } + + following.push(FOLLOW_opt_eol_in_auto_focus987); + opt_eol(); + following.pop(); + + + d = new AttributeDescr( "auto-focus", "true" ); + d.setLocation( loc.getLine(), loc.getCharPositionInLine() ); + + + } + + + } + break; + case 2 : + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:328:17: (loc= 'auto-focus' t= BOOL opt_eol ( ';' )? opt_eol ) + { + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:328:17: (loc= 'auto-focus' t= BOOL opt_eol ( ';' )? opt_eol ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:329:25: loc= 'auto-focus' t= BOOL opt_eol ( ';' )? opt_eol + { + loc=(Token)input.LT(1); + match(input,35,FOLLOW_35_in_auto_focus1012); + t=(Token)input.LT(1); + match(input,BOOL,FOLLOW_BOOL_in_auto_focus1016); + following.push(FOLLOW_opt_eol_in_auto_focus1018); + opt_eol(); + following.pop(); + + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:329:57: ( ';' )? + int alt35=2; + int LA35_0 = input.LA(1); + if ( LA35_0==16 ) { + alt35=1; + } + else if ( LA35_0==EOL||LA35_0==22||LA35_0==29||LA35_0==31||(LA35_0>=33 && LA35_0<=38) ) { + alt35=2; + } + else { + NoViableAltException nvae = + new NoViableAltException("329:57: ( \';\' )?", 35, 0, input); + + throw nvae; + } + switch (alt35) { + case 1 : + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:329:57: ';' + { + match(input,16,FOLLOW_16_in_auto_focus1020); + + } + break; + + } + + following.push(FOLLOW_opt_eol_in_auto_focus1023); + opt_eol(); + following.pop(); + + + d = new AttributeDescr( "auto-focus", t.getText() ); + d.setLocation( loc.getLine(), loc.getCharPositionInLine() ); + + + } + + + } + break; + + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + } + finally { + } + return d; + } + // $ANTLR end auto_focus + + // $ANTLR start xor_group - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:314:1: xor_group returns [AttributeDescr d] : loc= 'xor-group' opt_eol name= STRING ( ';' )? opt_eol ; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:339:1: xor_group returns [AttributeDescr d] : loc= 'xor-group' opt_eol name= STRING ( ';' )? opt_eol ; public AttributeDescr xor_group() throws RecognitionException { AttributeDescr d; Token loc=null; @@ -2044,44 +2217,44 @@ public AttributeDescr xor_group() throws RecognitionException { d = null; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:319:17: (loc= 'xor-group' opt_eol name= STRING ( ';' )? opt_eol ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:319:17: loc= 'xor-group' opt_eol name= STRING ( ';' )? opt_eol + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:344:17: (loc= 'xor-group' opt_eol name= STRING ( ';' )? opt_eol ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:344:17: loc= 'xor-group' opt_eol name= STRING ( ';' )? opt_eol { loc=(Token)input.LT(1); - match(input,35,FOLLOW_35_in_xor_group964); - following.push(FOLLOW_opt_eol_in_xor_group966); + match(input,36,FOLLOW_36_in_xor_group1065); + following.push(FOLLOW_opt_eol_in_xor_group1067); opt_eol(); following.pop(); name=(Token)input.LT(1); - match(input,STRING,FOLLOW_STRING_in_xor_group970); - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:319:53: ( ';' )? - int alt34=2; - int LA34_0 = input.LA(1); - if ( LA34_0==16 ) { - alt34=1; + match(input,STRING,FOLLOW_STRING_in_xor_group1071); + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:344:53: ( ';' )? + int alt37=2; + int LA37_0 = input.LA(1); + if ( LA37_0==16 ) { + alt37=1; } - else if ( LA34_0==EOL||LA34_0==22||LA34_0==29||LA34_0==31||(LA34_0>=33 && LA34_0<=37) ) { - alt34=2; + else if ( LA37_0==EOL||LA37_0==22||LA37_0==29||LA37_0==31||(LA37_0>=33 && LA37_0<=38) ) { + alt37=2; } else { NoViableAltException nvae = - new NoViableAltException("319:53: ( \';\' )?", 34, 0, input); + new NoViableAltException("344:53: ( \';\' )?", 37, 0, input); throw nvae; } - switch (alt34) { + switch (alt37) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:319:53: ';' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:344:53: ';' { - match(input,16,FOLLOW_16_in_xor_group972); + match(input,16,FOLLOW_16_in_xor_group1073); } break; } - following.push(FOLLOW_opt_eol_in_xor_group975); + following.push(FOLLOW_opt_eol_in_xor_group1076); opt_eol(); following.pop(); @@ -2105,7 +2278,7 @@ else if ( LA34_0==EOL||LA34_0==22||LA34_0==29||LA34_0==31||(LA34_0>=33 && LA34_0 // $ANTLR start agenda_group - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:326:1: agenda_group returns [AttributeDescr d] : loc= 'agenda-group' opt_eol name= STRING ( ';' )? opt_eol ; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:351:1: agenda_group returns [AttributeDescr d] : loc= 'agenda-group' opt_eol name= STRING ( ';' )? opt_eol ; public AttributeDescr agenda_group() throws RecognitionException { AttributeDescr d; Token loc=null; @@ -2115,44 +2288,44 @@ public AttributeDescr agenda_group() throws RecognitionException { d = null; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:331:17: (loc= 'agenda-group' opt_eol name= STRING ( ';' )? opt_eol ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:331:17: loc= 'agenda-group' opt_eol name= STRING ( ';' )? opt_eol + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:356:17: (loc= 'agenda-group' opt_eol name= STRING ( ';' )? opt_eol ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:356:17: loc= 'agenda-group' opt_eol name= STRING ( ';' )? opt_eol { loc=(Token)input.LT(1); - match(input,36,FOLLOW_36_in_agenda_group1004); - following.push(FOLLOW_opt_eol_in_agenda_group1006); + match(input,37,FOLLOW_37_in_agenda_group1105); + following.push(FOLLOW_opt_eol_in_agenda_group1107); opt_eol(); following.pop(); name=(Token)input.LT(1); - match(input,STRING,FOLLOW_STRING_in_agenda_group1010); - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:331:56: ( ';' )? - int alt35=2; - int LA35_0 = input.LA(1); - if ( LA35_0==16 ) { - alt35=1; + match(input,STRING,FOLLOW_STRING_in_agenda_group1111); + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:356:56: ( ';' )? + int alt38=2; + int LA38_0 = input.LA(1); + if ( LA38_0==16 ) { + alt38=1; } - else if ( LA35_0==EOL||LA35_0==22||LA35_0==29||LA35_0==31||(LA35_0>=33 && LA35_0<=37) ) { - alt35=2; + else if ( LA38_0==EOL||LA38_0==22||LA38_0==29||LA38_0==31||(LA38_0>=33 && LA38_0<=38) ) { + alt38=2; } else { NoViableAltException nvae = - new NoViableAltException("331:56: ( \';\' )?", 35, 0, input); + new NoViableAltException("356:56: ( \';\' )?", 38, 0, input); throw nvae; } - switch (alt35) { + switch (alt38) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:331:56: ';' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:356:56: ';' { - match(input,16,FOLLOW_16_in_agenda_group1012); + match(input,16,FOLLOW_16_in_agenda_group1113); } break; } - following.push(FOLLOW_opt_eol_in_agenda_group1015); + following.push(FOLLOW_opt_eol_in_agenda_group1116); opt_eol(); following.pop(); @@ -2176,7 +2349,7 @@ else if ( LA35_0==EOL||LA35_0==22||LA35_0==29||LA35_0==31||(LA35_0>=33 && LA35_0 // $ANTLR start duration - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:339:1: duration returns [AttributeDescr d] : loc= 'duration' opt_eol i= INT ( ';' )? opt_eol ; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:364:1: duration returns [AttributeDescr d] : loc= 'duration' opt_eol i= INT ( ';' )? opt_eol ; public AttributeDescr duration() throws RecognitionException { AttributeDescr d; Token loc=null; @@ -2186,44 +2359,44 @@ public AttributeDescr duration() throws RecognitionException { d = null; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:344:17: (loc= 'duration' opt_eol i= INT ( ';' )? opt_eol ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:344:17: loc= 'duration' opt_eol i= INT ( ';' )? opt_eol + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:369:17: (loc= 'duration' opt_eol i= INT ( ';' )? opt_eol ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:369:17: loc= 'duration' opt_eol i= INT ( ';' )? opt_eol { loc=(Token)input.LT(1); - match(input,37,FOLLOW_37_in_duration1047); - following.push(FOLLOW_opt_eol_in_duration1049); + match(input,38,FOLLOW_38_in_duration1148); + following.push(FOLLOW_opt_eol_in_duration1150); opt_eol(); following.pop(); i=(Token)input.LT(1); - match(input,INT,FOLLOW_INT_in_duration1053); - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:344:46: ( ';' )? - int alt36=2; - int LA36_0 = input.LA(1); - if ( LA36_0==16 ) { - alt36=1; + match(input,INT,FOLLOW_INT_in_duration1154); + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:369:46: ( ';' )? + int alt39=2; + int LA39_0 = input.LA(1); + if ( LA39_0==16 ) { + alt39=1; } - else if ( LA36_0==EOL||LA36_0==22||LA36_0==29||LA36_0==31||(LA36_0>=33 && LA36_0<=37) ) { - alt36=2; + else if ( LA39_0==EOL||LA39_0==22||LA39_0==29||LA39_0==31||(LA39_0>=33 && LA39_0<=38) ) { + alt39=2; } else { NoViableAltException nvae = - new NoViableAltException("344:46: ( \';\' )?", 36, 0, input); + new NoViableAltException("369:46: ( \';\' )?", 39, 0, input); throw nvae; } - switch (alt36) { + switch (alt39) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:344:46: ';' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:369:46: ';' { - match(input,16,FOLLOW_16_in_duration1055); + match(input,16,FOLLOW_16_in_duration1156); } break; } - following.push(FOLLOW_opt_eol_in_duration1058); + following.push(FOLLOW_opt_eol_in_duration1159); opt_eol(); following.pop(); @@ -2247,30 +2420,30 @@ else if ( LA36_0==EOL||LA36_0==22||LA36_0==29||LA36_0==31||(LA36_0>=33 && LA36_0 // $ANTLR start normal_lhs_block - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:352:1: normal_lhs_block[AndDescr descrs] : (d= lhs )* ; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:377:1: normal_lhs_block[AndDescr descrs] : (d= lhs )* ; public void normal_lhs_block(AndDescr descrs) throws RecognitionException { PatternDescr d = null; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:354:17: ( (d= lhs )* ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:354:17: (d= lhs )* + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:379:17: ( (d= lhs )* ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:379:17: (d= lhs )* { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:354:17: (d= lhs )* - loop37: + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:379:17: (d= lhs )* + loop40: do { - int alt37=2; - int LA37_0 = input.LA(1); - if ( LA37_0==ID||LA37_0==21||(LA37_0>=51 && LA37_0<=53) ) { - alt37=1; + int alt40=2; + int LA40_0 = input.LA(1); + if ( LA40_0==ID||LA40_0==21||(LA40_0>=52 && LA40_0<=54) ) { + alt40=1; } - switch (alt37) { + switch (alt40) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:354:25: d= lhs + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:379:25: d= lhs { - following.push(FOLLOW_lhs_in_normal_lhs_block1084); + following.push(FOLLOW_lhs_in_normal_lhs_block1185); d=lhs(); following.pop(); @@ -2280,7 +2453,7 @@ public void normal_lhs_block(AndDescr descrs) throws RecognitionException { break; default : - break loop37; + break loop40; } } while (true); @@ -2300,25 +2473,25 @@ public void normal_lhs_block(AndDescr descrs) throws RecognitionException { // $ANTLR start expander_lhs_block - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:362:1: expander_lhs_block[AndDescr descrs] : ( options {greedy=false; } : text= paren_chunk EOL )* ; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:387:1: expander_lhs_block[AndDescr descrs] : ( options {greedy=false; } : text= paren_chunk EOL )* ; public void expander_lhs_block(AndDescr descrs) throws RecognitionException { String text = null; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:365:17: ( ( options {greedy=false; } : text= paren_chunk EOL )* ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:365:17: ( options {greedy=false; } : text= paren_chunk EOL )* + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:390:17: ( ( options {greedy=false; } : text= paren_chunk EOL )* ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:390:17: ( options {greedy=false; } : text= paren_chunk EOL )* { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:365:17: ( options {greedy=false; } : text= paren_chunk EOL )* - loop38: + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:390:17: ( options {greedy=false; } : text= paren_chunk EOL )* + loop41: do { - int alt38=2; + int alt41=2; switch ( input.LA(1) ) { case 27: - alt38=2; + alt41=2; break; case 31: - alt38=2; + alt41=2; break; case EOL: case ID: @@ -2370,20 +2543,21 @@ public void expander_lhs_block(AndDescr descrs) throws RecognitionException { case 53: case 54: case 55: - alt38=1; + case 56: + alt41=1; break; } - switch (alt38) { + switch (alt41) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:366:25: text= paren_chunk EOL + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:391:25: text= paren_chunk EOL { - following.push(FOLLOW_paren_chunk_in_expander_lhs_block1130); + following.push(FOLLOW_paren_chunk_in_expander_lhs_block1231); text=paren_chunk(); following.pop(); - match(input,EOL,FOLLOW_EOL_in_expander_lhs_block1132); + match(input,EOL,FOLLOW_EOL_in_expander_lhs_block1233); //only expand non null if (text != null) { @@ -2396,7 +2570,7 @@ public void expander_lhs_block(AndDescr descrs) throws RecognitionException { break; default : - break loop38; + break loop41; } } while (true); @@ -2416,7 +2590,7 @@ public void expander_lhs_block(AndDescr descrs) throws RecognitionException { // $ANTLR start lhs - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:381:1: lhs returns [PatternDescr d] : l= lhs_or ; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:406:1: lhs returns [PatternDescr d] : l= lhs_or ; public PatternDescr lhs() throws RecognitionException { PatternDescr d; PatternDescr l = null; @@ -2426,10 +2600,10 @@ public PatternDescr lhs() throws RecognitionException { d=null; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:385:17: (l= lhs_or ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:385:17: l= lhs_or + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:410:17: (l= lhs_or ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:410:17: l= lhs_or { - following.push(FOLLOW_lhs_or_in_lhs1176); + following.push(FOLLOW_lhs_or_in_lhs1277); l=lhs_or(); following.pop(); @@ -2450,7 +2624,7 @@ public PatternDescr lhs() throws RecognitionException { // $ANTLR start lhs_column - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:389:1: lhs_column returns [PatternDescr d] : (f= fact_binding | f= fact ); + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:414:1: lhs_column returns [PatternDescr d] : (f= fact_binding | f= fact ); public PatternDescr lhs_column() throws RecognitionException { PatternDescr d; PatternDescr f = null; @@ -2460,14 +2634,14 @@ public PatternDescr lhs_column() throws RecognitionException { d=null; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:393:17: (f= fact_binding | f= fact ) - int alt39=2; - alt39 = dfa39.predict(input); - switch (alt39) { + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:418:17: (f= fact_binding | f= fact ) + int alt42=2; + alt42 = dfa42.predict(input); + switch (alt42) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:393:17: f= fact_binding + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:418:17: f= fact_binding { - following.push(FOLLOW_fact_binding_in_lhs_column1203); + following.push(FOLLOW_fact_binding_in_lhs_column1304); f=fact_binding(); following.pop(); @@ -2476,9 +2650,9 @@ public PatternDescr lhs_column() throws RecognitionException { } break; case 2 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:394:17: f= fact + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:419:17: f= fact { - following.push(FOLLOW_fact_in_lhs_column1212); + following.push(FOLLOW_fact_in_lhs_column1313); f=fact(); following.pop(); @@ -2501,7 +2675,7 @@ public PatternDescr lhs_column() throws RecognitionException { // $ANTLR start fact_binding - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:397:1: fact_binding returns [PatternDescr d] : id= ID opt_eol ':' opt_eol f= fact opt_eol ( 'or' f= fact )* ; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:422:1: fact_binding returns [PatternDescr d] : id= ID opt_eol ':' opt_eol f= fact opt_eol ( 'or' f= fact )* ; public PatternDescr fact_binding() throws RecognitionException { PatternDescr d; Token id=null; @@ -2513,25 +2687,25 @@ public PatternDescr fact_binding() throws RecognitionException { boolean multi=false; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:403:17: (id= ID opt_eol ':' opt_eol f= fact opt_eol ( 'or' f= fact )* ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:403:17: id= ID opt_eol ':' opt_eol f= fact opt_eol ( 'or' f= fact )* + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:428:17: (id= ID opt_eol ':' opt_eol f= fact opt_eol ( 'or' f= fact )* ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:428:17: id= ID opt_eol ':' opt_eol f= fact opt_eol ( 'or' f= fact )* { id=(Token)input.LT(1); - match(input,ID,FOLLOW_ID_in_fact_binding1244); - following.push(FOLLOW_opt_eol_in_fact_binding1254); + match(input,ID,FOLLOW_ID_in_fact_binding1345); + following.push(FOLLOW_opt_eol_in_fact_binding1355); opt_eol(); following.pop(); - match(input,30,FOLLOW_30_in_fact_binding1256); - following.push(FOLLOW_opt_eol_in_fact_binding1258); + match(input,30,FOLLOW_30_in_fact_binding1357); + following.push(FOLLOW_opt_eol_in_fact_binding1359); opt_eol(); following.pop(); - following.push(FOLLOW_fact_in_fact_binding1266); + following.push(FOLLOW_fact_in_fact_binding1367); f=fact(); following.pop(); - following.push(FOLLOW_opt_eol_in_fact_binding1268); + following.push(FOLLOW_opt_eol_in_fact_binding1369); opt_eol(); following.pop(); @@ -2539,21 +2713,21 @@ public PatternDescr fact_binding() throws RecognitionException { ((ColumnDescr)f).setIdentifier( id.getText() ); d = f; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:411:17: ( 'or' f= fact )* - loop40: + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:436:17: ( 'or' f= fact )* + loop43: do { - int alt40=2; - int LA40_0 = input.LA(1); - if ( LA40_0==38 ) { - alt40=1; + int alt43=2; + int LA43_0 = input.LA(1); + if ( LA43_0==39 ) { + alt43=1; } - switch (alt40) { + switch (alt43) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:411:25: 'or' f= fact + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:436:25: 'or' f= fact { - match(input,38,FOLLOW_38_in_fact_binding1280); + match(input,39,FOLLOW_39_in_fact_binding1381); if ( ! multi ) { PatternDescr first = d; d = new OrDescr(); @@ -2561,7 +2735,7 @@ public PatternDescr fact_binding() throws RecognitionException { multi=true; } - following.push(FOLLOW_fact_in_fact_binding1294); + following.push(FOLLOW_fact_in_fact_binding1395); f=fact(); following.pop(); @@ -2574,7 +2748,7 @@ public PatternDescr fact_binding() throws RecognitionException { break; default : - break loop40; + break loop43; } } while (true); @@ -2594,7 +2768,7 @@ public PatternDescr fact_binding() throws RecognitionException { // $ANTLR start fact - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:427:1: fact returns [PatternDescr d] : id= ID opt_eol '(' opt_eol (c= constraints )? opt_eol ')' opt_eol ; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:452:1: fact returns [PatternDescr d] : id= ID opt_eol '(' opt_eol (c= constraints )? opt_eol ')' opt_eol ; public PatternDescr fact() throws RecognitionException { PatternDescr d; Token id=null; @@ -2605,32 +2779,32 @@ public PatternDescr fact() throws RecognitionException { d=null; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:431:17: (id= ID opt_eol '(' opt_eol (c= constraints )? opt_eol ')' opt_eol ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:431:17: id= ID opt_eol '(' opt_eol (c= constraints )? opt_eol ')' opt_eol + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:456:17: (id= ID opt_eol '(' opt_eol (c= constraints )? opt_eol ')' opt_eol ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:456:17: id= ID opt_eol '(' opt_eol (c= constraints )? opt_eol ')' opt_eol { id=(Token)input.LT(1); - match(input,ID,FOLLOW_ID_in_fact1334); + match(input,ID,FOLLOW_ID_in_fact1435); d = new ColumnDescr( id.getText() ); d.setLocation( id.getLine(), id.getCharPositionInLine() ); - following.push(FOLLOW_opt_eol_in_fact1342); + following.push(FOLLOW_opt_eol_in_fact1443); opt_eol(); following.pop(); - match(input,21,FOLLOW_21_in_fact1348); - following.push(FOLLOW_opt_eol_in_fact1350); + match(input,21,FOLLOW_21_in_fact1449); + following.push(FOLLOW_opt_eol_in_fact1451); opt_eol(); following.pop(); - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:436:29: (c= constraints )? - int alt41=2; - alt41 = dfa41.predict(input); - switch (alt41) { + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:461:29: (c= constraints )? + int alt44=2; + alt44 = dfa44.predict(input); + switch (alt44) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:436:33: c= constraints + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:461:33: c= constraints { - following.push(FOLLOW_constraints_in_fact1356); + following.push(FOLLOW_constraints_in_fact1457); c=constraints(); following.pop(); @@ -2645,12 +2819,12 @@ public PatternDescr fact() throws RecognitionException { } - following.push(FOLLOW_opt_eol_in_fact1375); + following.push(FOLLOW_opt_eol_in_fact1476); opt_eol(); following.pop(); - match(input,23,FOLLOW_23_in_fact1377); - following.push(FOLLOW_opt_eol_in_fact1379); + match(input,23,FOLLOW_23_in_fact1478); + following.push(FOLLOW_opt_eol_in_fact1480); opt_eol(); following.pop(); @@ -2670,76 +2844,76 @@ public PatternDescr fact() throws RecognitionException { // $ANTLR start constraints - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:447:1: constraints returns [List constraints] : opt_eol ( constraint[constraints] | predicate[constraints] ) ( opt_eol ',' opt_eol ( constraint[constraints] | predicate[constraints] ) )* opt_eol ; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:472:1: constraints returns [List constraints] : opt_eol ( constraint[constraints] | predicate[constraints] ) ( opt_eol ',' opt_eol ( constraint[constraints] | predicate[constraints] ) )* opt_eol ; public List constraints() throws RecognitionException { List constraints; constraints = new ArrayList(); try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:451:17: ( opt_eol ( constraint[constraints] | predicate[constraints] ) ( opt_eol ',' opt_eol ( constraint[constraints] | predicate[constraints] ) )* opt_eol ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:451:17: opt_eol ( constraint[constraints] | predicate[constraints] ) ( opt_eol ',' opt_eol ( constraint[constraints] | predicate[constraints] ) )* opt_eol + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:476:17: ( opt_eol ( constraint[constraints] | predicate[constraints] ) ( opt_eol ',' opt_eol ( constraint[constraints] | predicate[constraints] ) )* opt_eol ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:476:17: opt_eol ( constraint[constraints] | predicate[constraints] ) ( opt_eol ',' opt_eol ( constraint[constraints] | predicate[constraints] ) )* opt_eol { - following.push(FOLLOW_opt_eol_in_constraints1404); + following.push(FOLLOW_opt_eol_in_constraints1505); opt_eol(); following.pop(); - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:452:17: ( constraint[constraints] | predicate[constraints] ) - int alt42=2; - int LA42_0 = input.LA(1); - if ( LA42_0==EOL ) { - alt42=1; - } - else if ( LA42_0==ID ) { - int LA42_2 = input.LA(2); - if ( LA42_2==30 ) { - int LA42_3 = input.LA(3); - if ( LA42_3==ID ) { - int LA42_8 = input.LA(4); - if ( LA42_8==47 ) { - alt42=2; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:477:17: ( constraint[constraints] | predicate[constraints] ) + int alt45=2; + int LA45_0 = input.LA(1); + if ( LA45_0==EOL ) { + alt45=1; + } + else if ( LA45_0==ID ) { + int LA45_2 = input.LA(2); + if ( LA45_2==30 ) { + int LA45_3 = input.LA(3); + if ( LA45_3==ID ) { + int LA45_8 = input.LA(4); + if ( LA45_8==48 ) { + alt45=2; } - else if ( LA42_8==EOL||(LA42_8>=22 && LA42_8<=23)||(LA42_8>=39 && LA42_8<=46) ) { - alt42=1; + else if ( LA45_8==EOL||(LA45_8>=22 && LA45_8<=23)||(LA45_8>=40 && LA45_8<=47) ) { + alt45=1; } else { NoViableAltException nvae = - new NoViableAltException("452:17: ( constraint[constraints] | predicate[constraints] )", 42, 8, input); + new NoViableAltException("477:17: ( constraint[constraints] | predicate[constraints] )", 45, 8, input); throw nvae; } } - else if ( LA42_3==EOL ) { - alt42=1; + else if ( LA45_3==EOL ) { + alt45=1; } else { NoViableAltException nvae = - new NoViableAltException("452:17: ( constraint[constraints] | predicate[constraints] )", 42, 3, input); + new NoViableAltException("477:17: ( constraint[constraints] | predicate[constraints] )", 45, 3, input); throw nvae; } } - else if ( LA42_2==EOL||(LA42_2>=22 && LA42_2<=23)||(LA42_2>=39 && LA42_2<=46) ) { - alt42=1; + else if ( LA45_2==EOL||(LA45_2>=22 && LA45_2<=23)||(LA45_2>=40 && LA45_2<=47) ) { + alt45=1; } else { NoViableAltException nvae = - new NoViableAltException("452:17: ( constraint[constraints] | predicate[constraints] )", 42, 2, input); + new NoViableAltException("477:17: ( constraint[constraints] | predicate[constraints] )", 45, 2, input); throw nvae; } } else { NoViableAltException nvae = - new NoViableAltException("452:17: ( constraint[constraints] | predicate[constraints] )", 42, 0, input); + new NoViableAltException("477:17: ( constraint[constraints] | predicate[constraints] )", 45, 0, input); throw nvae; } - switch (alt42) { + switch (alt45) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:452:18: constraint[constraints] + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:477:18: constraint[constraints] { - following.push(FOLLOW_constraint_in_constraints1409); + following.push(FOLLOW_constraint_in_constraints1510); constraint(constraints); following.pop(); @@ -2747,9 +2921,9 @@ else if ( LA42_2==EOL||(LA42_2>=22 && LA42_2<=23)||(LA42_2>=39 && LA42_2<=46) ) } break; case 2 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:452:42: predicate[constraints] + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:477:42: predicate[constraints] { - following.push(FOLLOW_predicate_in_constraints1412); + following.push(FOLLOW_predicate_in_constraints1513); predicate(constraints); following.pop(); @@ -2759,80 +2933,80 @@ else if ( LA42_2==EOL||(LA42_2>=22 && LA42_2<=23)||(LA42_2>=39 && LA42_2<=46) ) } - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:453:17: ( opt_eol ',' opt_eol ( constraint[constraints] | predicate[constraints] ) )* - loop44: + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:478:17: ( opt_eol ',' opt_eol ( constraint[constraints] | predicate[constraints] ) )* + loop47: do { - int alt44=2; - alt44 = dfa44.predict(input); - switch (alt44) { + int alt47=2; + alt47 = dfa47.predict(input); + switch (alt47) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:453:19: opt_eol ',' opt_eol ( constraint[constraints] | predicate[constraints] ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:478:19: opt_eol ',' opt_eol ( constraint[constraints] | predicate[constraints] ) { - following.push(FOLLOW_opt_eol_in_constraints1420); + following.push(FOLLOW_opt_eol_in_constraints1521); opt_eol(); following.pop(); - match(input,22,FOLLOW_22_in_constraints1422); - following.push(FOLLOW_opt_eol_in_constraints1424); + match(input,22,FOLLOW_22_in_constraints1523); + following.push(FOLLOW_opt_eol_in_constraints1525); opt_eol(); following.pop(); - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:453:39: ( constraint[constraints] | predicate[constraints] ) - int alt43=2; - int LA43_0 = input.LA(1); - if ( LA43_0==EOL ) { - alt43=1; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:478:39: ( constraint[constraints] | predicate[constraints] ) + int alt46=2; + int LA46_0 = input.LA(1); + if ( LA46_0==EOL ) { + alt46=1; } - else if ( LA43_0==ID ) { - int LA43_2 = input.LA(2); - if ( LA43_2==30 ) { - int LA43_3 = input.LA(3); - if ( LA43_3==ID ) { - int LA43_8 = input.LA(4); - if ( LA43_8==47 ) { - alt43=2; + else if ( LA46_0==ID ) { + int LA46_2 = input.LA(2); + if ( LA46_2==30 ) { + int LA46_3 = input.LA(3); + if ( LA46_3==ID ) { + int LA46_8 = input.LA(4); + if ( LA46_8==48 ) { + alt46=2; } - else if ( LA43_8==EOL||(LA43_8>=22 && LA43_8<=23)||(LA43_8>=39 && LA43_8<=46) ) { - alt43=1; + else if ( LA46_8==EOL||(LA46_8>=22 && LA46_8<=23)||(LA46_8>=40 && LA46_8<=47) ) { + alt46=1; } else { NoViableAltException nvae = - new NoViableAltException("453:39: ( constraint[constraints] | predicate[constraints] )", 43, 8, input); + new NoViableAltException("478:39: ( constraint[constraints] | predicate[constraints] )", 46, 8, input); throw nvae; } } - else if ( LA43_3==EOL ) { - alt43=1; + else if ( LA46_3==EOL ) { + alt46=1; } else { NoViableAltException nvae = - new NoViableAltException("453:39: ( constraint[constraints] | predicate[constraints] )", 43, 3, input); + new NoViableAltException("478:39: ( constraint[constraints] | predicate[constraints] )", 46, 3, input); throw nvae; } } - else if ( LA43_2==EOL||(LA43_2>=22 && LA43_2<=23)||(LA43_2>=39 && LA43_2<=46) ) { - alt43=1; + else if ( LA46_2==EOL||(LA46_2>=22 && LA46_2<=23)||(LA46_2>=40 && LA46_2<=47) ) { + alt46=1; } else { NoViableAltException nvae = - new NoViableAltException("453:39: ( constraint[constraints] | predicate[constraints] )", 43, 2, input); + new NoViableAltException("478:39: ( constraint[constraints] | predicate[constraints] )", 46, 2, input); throw nvae; } } else { NoViableAltException nvae = - new NoViableAltException("453:39: ( constraint[constraints] | predicate[constraints] )", 43, 0, input); + new NoViableAltException("478:39: ( constraint[constraints] | predicate[constraints] )", 46, 0, input); throw nvae; } - switch (alt43) { + switch (alt46) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:453:40: constraint[constraints] + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:478:40: constraint[constraints] { - following.push(FOLLOW_constraint_in_constraints1427); + following.push(FOLLOW_constraint_in_constraints1528); constraint(constraints); following.pop(); @@ -2840,9 +3014,9 @@ else if ( LA43_2==EOL||(LA43_2>=22 && LA43_2<=23)||(LA43_2>=39 && LA43_2<=46) ) } break; case 2 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:453:64: predicate[constraints] + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:478:64: predicate[constraints] { - following.push(FOLLOW_predicate_in_constraints1430); + following.push(FOLLOW_predicate_in_constraints1531); predicate(constraints); following.pop(); @@ -2857,11 +3031,11 @@ else if ( LA43_2==EOL||(LA43_2>=22 && LA43_2<=23)||(LA43_2>=39 && LA43_2<=46) ) break; default : - break loop44; + break loop47; } } while (true); - following.push(FOLLOW_opt_eol_in_constraints1438); + following.push(FOLLOW_opt_eol_in_constraints1539); opt_eol(); following.pop(); @@ -2881,7 +3055,7 @@ else if ( LA43_2==EOL||(LA43_2>=22 && LA43_2<=23)||(LA43_2>=39 && LA43_2<=46) ) // $ANTLR start constraint - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:457:1: constraint[List constraints] : opt_eol (fb= ID opt_eol ':' opt_eol )? f= ID opt_eol (op= ('=='|'>'|'>='|'<'|'<='|'!='|'contains'|'matches') opt_eol (bvc= ID | lc= literal_constraint | rvc= retval_constraint ) )? opt_eol ; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:482:1: constraint[List constraints] : opt_eol (fb= ID opt_eol ':' opt_eol )? f= ID opt_eol (op= ('=='|'>'|'>='|'<'|'<='|'!='|'contains'|'matches') opt_eol (bvc= ID | lc= literal_constraint | rvc= retval_constraint ) )? opt_eol ; public void constraint(List constraints) throws RecognitionException { Token fb=null; Token f=null; @@ -2896,28 +3070,28 @@ public void constraint(List constraints) throws RecognitionException { PatternDescr d = null; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:461:17: ( opt_eol (fb= ID opt_eol ':' opt_eol )? f= ID opt_eol (op= ('=='|'>'|'>='|'<'|'<='|'!='|'contains'|'matches') opt_eol (bvc= ID | lc= literal_constraint | rvc= retval_constraint ) )? opt_eol ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:461:17: opt_eol (fb= ID opt_eol ':' opt_eol )? f= ID opt_eol (op= ('=='|'>'|'>='|'<'|'<='|'!='|'contains'|'matches') opt_eol (bvc= ID | lc= literal_constraint | rvc= retval_constraint ) )? opt_eol + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:486:17: ( opt_eol (fb= ID opt_eol ':' opt_eol )? f= ID opt_eol (op= ('=='|'>'|'>='|'<'|'<='|'!='|'contains'|'matches') opt_eol (bvc= ID | lc= literal_constraint | rvc= retval_constraint ) )? opt_eol ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:486:17: opt_eol (fb= ID opt_eol ':' opt_eol )? f= ID opt_eol (op= ('=='|'>'|'>='|'<'|'<='|'!='|'contains'|'matches') opt_eol (bvc= ID | lc= literal_constraint | rvc= retval_constraint ) )? opt_eol { - following.push(FOLLOW_opt_eol_in_constraint1457); + following.push(FOLLOW_opt_eol_in_constraint1558); opt_eol(); following.pop(); - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:462:17: (fb= ID opt_eol ':' opt_eol )? - int alt45=2; - alt45 = dfa45.predict(input); - switch (alt45) { + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:487:17: (fb= ID opt_eol ':' opt_eol )? + int alt48=2; + alt48 = dfa48.predict(input); + switch (alt48) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:462:19: fb= ID opt_eol ':' opt_eol + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:487:19: fb= ID opt_eol ':' opt_eol { fb=(Token)input.LT(1); - match(input,ID,FOLLOW_ID_in_constraint1465); - following.push(FOLLOW_opt_eol_in_constraint1467); + match(input,ID,FOLLOW_ID_in_constraint1566); + following.push(FOLLOW_opt_eol_in_constraint1568); opt_eol(); following.pop(); - match(input,30,FOLLOW_30_in_constraint1469); - following.push(FOLLOW_opt_eol_in_constraint1471); + match(input,30,FOLLOW_30_in_constraint1570); + following.push(FOLLOW_opt_eol_in_constraint1572); opt_eol(); following.pop(); @@ -2928,7 +3102,7 @@ public void constraint(List constraints) throws RecognitionException { } f=(Token)input.LT(1); - match(input,ID,FOLLOW_ID_in_constraint1481); + match(input,ID,FOLLOW_ID_in_constraint1582); if ( fb != null ) { //System.err.println( "fb: " + fb.getText() ); @@ -2940,72 +3114,72 @@ public void constraint(List constraints) throws RecognitionException { constraints.add( d ); } - following.push(FOLLOW_opt_eol_in_constraint1491); + following.push(FOLLOW_opt_eol_in_constraint1592); opt_eol(); following.pop(); - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:475:33: (op= ('=='|'>'|'>='|'<'|'<='|'!='|'contains'|'matches') opt_eol (bvc= ID | lc= literal_constraint | rvc= retval_constraint ) )? - int alt47=2; - int LA47_0 = input.LA(1); - if ( (LA47_0>=39 && LA47_0<=46) ) { - alt47=1; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:500:33: (op= ('=='|'>'|'>='|'<'|'<='|'!='|'contains'|'matches') opt_eol (bvc= ID | lc= literal_constraint | rvc= retval_constraint ) )? + int alt50=2; + int LA50_0 = input.LA(1); + if ( (LA50_0>=40 && LA50_0<=47) ) { + alt50=1; } - else if ( LA47_0==EOL||(LA47_0>=22 && LA47_0<=23) ) { - alt47=2; + else if ( LA50_0==EOL||(LA50_0>=22 && LA50_0<=23) ) { + alt50=2; } else { NoViableAltException nvae = - new NoViableAltException("475:33: (op= (\'==\'|\'>\'|\'>=\'|\'<\'|\'<=\'|\'!=\'|\'contains\'|\'matches\') opt_eol (bvc= ID | lc= literal_constraint | rvc= retval_constraint ) )?", 47, 0, input); + new NoViableAltException("500:33: (op= (\'==\'|\'>\'|\'>=\'|\'<\'|\'<=\'|\'!=\'|\'contains\'|\'matches\') opt_eol (bvc= ID | lc= literal_constraint | rvc= retval_constraint ) )?", 50, 0, input); throw nvae; } - switch (alt47) { + switch (alt50) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:475:41: op= ('=='|'>'|'>='|'<'|'<='|'!='|'contains'|'matches') opt_eol (bvc= ID | lc= literal_constraint | rvc= retval_constraint ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:500:41: op= ('=='|'>'|'>='|'<'|'<='|'!='|'contains'|'matches') opt_eol (bvc= ID | lc= literal_constraint | rvc= retval_constraint ) { op=(Token)input.LT(1); - if ( (input.LA(1)>=39 && input.LA(1)<=46) ) { + if ( (input.LA(1)>=40 && input.LA(1)<=47) ) { input.consume(); errorRecovery=false; } else { MismatchedSetException mse = new MismatchedSetException(null,input); - recoverFromMismatchedSet(input,mse,FOLLOW_set_in_constraint1499); throw mse; + recoverFromMismatchedSet(input,mse,FOLLOW_set_in_constraint1600); throw mse; } - following.push(FOLLOW_opt_eol_in_constraint1571); + following.push(FOLLOW_opt_eol_in_constraint1672); opt_eol(); following.pop(); - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:485:41: (bvc= ID | lc= literal_constraint | rvc= retval_constraint ) - int alt46=3; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:510:41: (bvc= ID | lc= literal_constraint | rvc= retval_constraint ) + int alt49=3; switch ( input.LA(1) ) { case ID: - alt46=1; + alt49=1; break; case INT: case BOOL: case STRING: case FLOAT: - alt46=2; + alt49=2; break; case 21: - alt46=3; + alt49=3; break; default: NoViableAltException nvae = - new NoViableAltException("485:41: (bvc= ID | lc= literal_constraint | rvc= retval_constraint )", 46, 0, input); + new NoViableAltException("510:41: (bvc= ID | lc= literal_constraint | rvc= retval_constraint )", 49, 0, input); throw nvae; } - switch (alt46) { + switch (alt49) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:485:49: bvc= ID + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:510:49: bvc= ID { bvc=(Token)input.LT(1); - match(input,ID,FOLLOW_ID_in_constraint1589); + match(input,ID,FOLLOW_ID_in_constraint1690); d = new BoundVariableDescr( f.getText(), op.getText(), bvc.getText() ); d.setLocation( f.getLine(), f.getCharPositionInLine() ); @@ -3015,9 +3189,9 @@ else if ( LA47_0==EOL||(LA47_0>=22 && LA47_0<=23) ) { } break; case 2 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:492:49: lc= literal_constraint + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:517:49: lc= literal_constraint { - following.push(FOLLOW_literal_constraint_in_constraint1614); + following.push(FOLLOW_literal_constraint_in_constraint1715); lc=literal_constraint(); following.pop(); @@ -3030,9 +3204,9 @@ else if ( LA47_0==EOL||(LA47_0>=22 && LA47_0<=23) ) { } break; case 3 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:498:49: rvc= retval_constraint + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:523:49: rvc= retval_constraint { - following.push(FOLLOW_retval_constraint_in_constraint1634); + following.push(FOLLOW_retval_constraint_in_constraint1735); rvc=retval_constraint(); following.pop(); @@ -3053,7 +3227,7 @@ else if ( LA47_0==EOL||(LA47_0>=22 && LA47_0<=23) ) { } - following.push(FOLLOW_opt_eol_in_constraint1667); + following.push(FOLLOW_opt_eol_in_constraint1768); opt_eol(); following.pop(); @@ -3073,7 +3247,7 @@ else if ( LA47_0==EOL||(LA47_0>=22 && LA47_0<=23) ) { // $ANTLR start literal_constraint - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:509:1: literal_constraint returns [String text] : (t= STRING | t= INT | t= FLOAT | t= BOOL ) ; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:534:1: literal_constraint returns [String text] : (t= STRING | t= INT | t= FLOAT | t= BOOL ) ; public String literal_constraint() throws RecognitionException { String text; Token t=null; @@ -3082,64 +3256,64 @@ public String literal_constraint() throws RecognitionException { text = null; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:513:17: ( (t= STRING | t= INT | t= FLOAT | t= BOOL ) ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:513:17: (t= STRING | t= INT | t= FLOAT | t= BOOL ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:538:17: ( (t= STRING | t= INT | t= FLOAT | t= BOOL ) ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:538:17: (t= STRING | t= INT | t= FLOAT | t= BOOL ) { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:513:17: (t= STRING | t= INT | t= FLOAT | t= BOOL ) - int alt48=4; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:538:17: (t= STRING | t= INT | t= FLOAT | t= BOOL ) + int alt51=4; switch ( input.LA(1) ) { case STRING: - alt48=1; + alt51=1; break; case INT: - alt48=2; + alt51=2; break; case FLOAT: - alt48=3; + alt51=3; break; case BOOL: - alt48=4; + alt51=4; break; default: NoViableAltException nvae = - new NoViableAltException("513:17: (t= STRING | t= INT | t= FLOAT | t= BOOL )", 48, 0, input); + new NoViableAltException("538:17: (t= STRING | t= INT | t= FLOAT | t= BOOL )", 51, 0, input); throw nvae; } - switch (alt48) { + switch (alt51) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:513:25: t= STRING + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:538:25: t= STRING { t=(Token)input.LT(1); - match(input,STRING,FOLLOW_STRING_in_literal_constraint1694); + match(input,STRING,FOLLOW_STRING_in_literal_constraint1795); text = getString( t ); } break; case 2 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:514:25: t= INT + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:539:25: t= INT { t=(Token)input.LT(1); - match(input,INT,FOLLOW_INT_in_literal_constraint1705); + match(input,INT,FOLLOW_INT_in_literal_constraint1806); text = t.getText(); } break; case 3 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:515:25: t= FLOAT + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:540:25: t= FLOAT { t=(Token)input.LT(1); - match(input,FLOAT,FOLLOW_FLOAT_in_literal_constraint1718); + match(input,FLOAT,FOLLOW_FLOAT_in_literal_constraint1819); text = t.getText(); } break; case 4 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:516:25: t= BOOL + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:541:25: t= BOOL { t=(Token)input.LT(1); - match(input,BOOL,FOLLOW_BOOL_in_literal_constraint1729); + match(input,BOOL,FOLLOW_BOOL_in_literal_constraint1830); text = t.getText(); } @@ -3163,7 +3337,7 @@ public String literal_constraint() throws RecognitionException { // $ANTLR start retval_constraint - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:520:1: retval_constraint returns [String text] : '(' c= paren_chunk ')' ; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:545:1: retval_constraint returns [String text] : '(' c= paren_chunk ')' ; public String retval_constraint() throws RecognitionException { String text; String c = null; @@ -3173,15 +3347,15 @@ public String retval_constraint() throws RecognitionException { text = null; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:525:17: ( '(' c= paren_chunk ')' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:525:17: '(' c= paren_chunk ')' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:550:17: ( '(' c= paren_chunk ')' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:550:17: '(' c= paren_chunk ')' { - match(input,21,FOLLOW_21_in_retval_constraint1762); - following.push(FOLLOW_paren_chunk_in_retval_constraint1766); + match(input,21,FOLLOW_21_in_retval_constraint1863); + following.push(FOLLOW_paren_chunk_in_retval_constraint1867); c=paren_chunk(); following.pop(); - match(input,23,FOLLOW_23_in_retval_constraint1768); + match(input,23,FOLLOW_23_in_retval_constraint1869); text = c; } @@ -3199,7 +3373,7 @@ public String retval_constraint() throws RecognitionException { // $ANTLR start predicate - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:528:1: predicate[List constraints] : decl= ID ':' field= ID '->' '(' text= paren_chunk ')' ; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:553:1: predicate[List constraints] : decl= ID ':' field= ID '->' '(' text= paren_chunk ')' ; public void predicate(List constraints) throws RecognitionException { Token decl=null; Token field=null; @@ -3207,21 +3381,21 @@ public void predicate(List constraints) throws RecognitionException { try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:530:17: (decl= ID ':' field= ID '->' '(' text= paren_chunk ')' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:530:17: decl= ID ':' field= ID '->' '(' text= paren_chunk ')' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:555:17: (decl= ID ':' field= ID '->' '(' text= paren_chunk ')' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:555:17: decl= ID ':' field= ID '->' '(' text= paren_chunk ')' { decl=(Token)input.LT(1); - match(input,ID,FOLLOW_ID_in_predicate1786); - match(input,30,FOLLOW_30_in_predicate1788); + match(input,ID,FOLLOW_ID_in_predicate1887); + match(input,30,FOLLOW_30_in_predicate1889); field=(Token)input.LT(1); - match(input,ID,FOLLOW_ID_in_predicate1792); - match(input,47,FOLLOW_47_in_predicate1794); - match(input,21,FOLLOW_21_in_predicate1796); - following.push(FOLLOW_paren_chunk_in_predicate1800); + match(input,ID,FOLLOW_ID_in_predicate1893); + match(input,48,FOLLOW_48_in_predicate1895); + match(input,21,FOLLOW_21_in_predicate1897); + following.push(FOLLOW_paren_chunk_in_predicate1901); text=paren_chunk(); following.pop(); - match(input,23,FOLLOW_23_in_predicate1802); + match(input,23,FOLLOW_23_in_predicate1903); PredicateDescr d = new PredicateDescr(field.getText(), decl.getText(), text ); constraints.add( d ); @@ -3242,7 +3416,7 @@ public void predicate(List constraints) throws RecognitionException { // $ANTLR start paren_chunk - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:537:1: paren_chunk returns [String text] : ( options {greedy=false; } : '(' c= paren_chunk ')' | any= . )* ; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:562:1: paren_chunk returns [String text] : ( options {greedy=false; } : '(' c= paren_chunk ')' | any= . )* ; public String paren_chunk() throws RecognitionException { String text; Token any=null; @@ -3253,22 +3427,22 @@ public String paren_chunk() throws RecognitionException { text = null; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:543:17: ( ( options {greedy=false; } : '(' c= paren_chunk ')' | any= . )* ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:543:17: ( options {greedy=false; } : '(' c= paren_chunk ')' | any= . )* + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:568:17: ( ( options {greedy=false; } : '(' c= paren_chunk ')' | any= . )* ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:568:17: ( options {greedy=false; } : '(' c= paren_chunk ')' | any= . )* { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:543:17: ( options {greedy=false; } : '(' c= paren_chunk ')' | any= . )* - loop49: + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:568:17: ( options {greedy=false; } : '(' c= paren_chunk ')' | any= . )* + loop52: do { - int alt49=3; + int alt52=3; switch ( input.LA(1) ) { case EOL: - alt49=3; + alt52=3; break; case 23: - alt49=3; + alt52=3; break; case 21: - alt49=1; + alt52=1; break; case ID: case INT: @@ -3319,21 +3493,22 @@ public String paren_chunk() throws RecognitionException { case 53: case 54: case 55: - alt49=2; + case 56: + alt52=2; break; } - switch (alt49) { + switch (alt52) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:544:25: '(' c= paren_chunk ')' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:569:25: '(' c= paren_chunk ')' { - match(input,21,FOLLOW_21_in_paren_chunk1847); - following.push(FOLLOW_paren_chunk_in_paren_chunk1851); + match(input,21,FOLLOW_21_in_paren_chunk1948); + following.push(FOLLOW_paren_chunk_in_paren_chunk1952); c=paren_chunk(); following.pop(); - match(input,23,FOLLOW_23_in_paren_chunk1853); + match(input,23,FOLLOW_23_in_paren_chunk1954); //System.err.println( "chunk [" + c + "]" ); if ( c == null ) { @@ -3349,7 +3524,7 @@ public String paren_chunk() throws RecognitionException { } break; case 2 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:556:19: any= . + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:581:19: any= . { any=(Token)input.LT(1); matchAny(input); @@ -3366,7 +3541,7 @@ public String paren_chunk() throws RecognitionException { break; default : - break loop49; + break loop52; } } while (true); @@ -3386,7 +3561,7 @@ public String paren_chunk() throws RecognitionException { // $ANTLR start curly_chunk - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:568:1: curly_chunk returns [String text] : ( options {greedy=false; } : '{' c= curly_chunk '}' | any= . )* ; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:593:1: curly_chunk returns [String text] : ( options {greedy=false; } : '{' c= curly_chunk '}' | any= . )* ; public String curly_chunk() throws RecognitionException { String text; Token any=null; @@ -3397,19 +3572,19 @@ public String curly_chunk() throws RecognitionException { text = null; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:574:17: ( ( options {greedy=false; } : '{' c= curly_chunk '}' | any= . )* ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:574:17: ( options {greedy=false; } : '{' c= curly_chunk '}' | any= . )* + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:599:17: ( ( options {greedy=false; } : '{' c= curly_chunk '}' | any= . )* ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:599:17: ( options {greedy=false; } : '{' c= curly_chunk '}' | any= . )* { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:574:17: ( options {greedy=false; } : '{' c= curly_chunk '}' | any= . )* - loop50: + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:599:17: ( options {greedy=false; } : '{' c= curly_chunk '}' | any= . )* + loop53: do { - int alt50=3; + int alt53=3; switch ( input.LA(1) ) { case 25: - alt50=3; + alt53=3; break; case 24: - alt50=1; + alt53=1; break; case EOL: case ID: @@ -3461,21 +3636,22 @@ public String curly_chunk() throws RecognitionException { case 53: case 54: case 55: - alt50=2; + case 56: + alt53=2; break; } - switch (alt50) { + switch (alt53) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:575:25: '{' c= curly_chunk '}' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:600:25: '{' c= curly_chunk '}' { - match(input,24,FOLLOW_24_in_curly_chunk1921); - following.push(FOLLOW_curly_chunk_in_curly_chunk1925); + match(input,24,FOLLOW_24_in_curly_chunk2022); + following.push(FOLLOW_curly_chunk_in_curly_chunk2026); c=curly_chunk(); following.pop(); - match(input,25,FOLLOW_25_in_curly_chunk1927); + match(input,25,FOLLOW_25_in_curly_chunk2028); //System.err.println( "chunk [" + c + "]" ); if ( c == null ) { @@ -3491,7 +3667,7 @@ public String curly_chunk() throws RecognitionException { } break; case 2 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:587:19: any= . + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:612:19: any= . { any=(Token)input.LT(1); matchAny(input); @@ -3508,7 +3684,7 @@ public String curly_chunk() throws RecognitionException { break; default : - break loop50; + break loop53; } } while (true); @@ -3528,7 +3704,7 @@ public String curly_chunk() throws RecognitionException { // $ANTLR start lhs_or - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:599:1: lhs_or returns [PatternDescr d] : left= lhs_and ( ('or'|'||')right= lhs_and )* ; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:624:1: lhs_or returns [PatternDescr d] : left= lhs_and ( ('or'|'||')right= lhs_and )* ; public PatternDescr lhs_or() throws RecognitionException { PatternDescr d; PatternDescr left = null; @@ -3540,40 +3716,40 @@ public PatternDescr lhs_or() throws RecognitionException { d = null; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:604:17: (left= lhs_and ( ('or'|'||')right= lhs_and )* ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:604:17: left= lhs_and ( ('or'|'||')right= lhs_and )* + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:629:17: (left= lhs_and ( ('or'|'||')right= lhs_and )* ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:629:17: left= lhs_and ( ('or'|'||')right= lhs_and )* { OrDescr or = null; - following.push(FOLLOW_lhs_and_in_lhs_or1985); + following.push(FOLLOW_lhs_and_in_lhs_or2086); left=lhs_and(); following.pop(); d = left; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:606:17: ( ('or'|'||')right= lhs_and )* - loop51: + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:631:17: ( ('or'|'||')right= lhs_and )* + loop54: do { - int alt51=2; - int LA51_0 = input.LA(1); - if ( LA51_0==38||LA51_0==48 ) { - alt51=1; + int alt54=2; + int LA54_0 = input.LA(1); + if ( LA54_0==39||LA54_0==49 ) { + alt54=1; } - switch (alt51) { + switch (alt54) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:606:25: ('or'|'||')right= lhs_and + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:631:25: ('or'|'||')right= lhs_and { - if ( input.LA(1)==38||input.LA(1)==48 ) { + if ( input.LA(1)==39||input.LA(1)==49 ) { input.consume(); errorRecovery=false; } else { MismatchedSetException mse = new MismatchedSetException(null,input); - recoverFromMismatchedSet(input,mse,FOLLOW_set_in_lhs_or1995); throw mse; + recoverFromMismatchedSet(input,mse,FOLLOW_set_in_lhs_or2096); throw mse; } - following.push(FOLLOW_lhs_and_in_lhs_or2006); + following.push(FOLLOW_lhs_and_in_lhs_or2107); right=lhs_and(); following.pop(); @@ -3591,7 +3767,7 @@ public PatternDescr lhs_or() throws RecognitionException { break; default : - break loop51; + break loop54; } } while (true); @@ -3611,7 +3787,7 @@ public PatternDescr lhs_or() throws RecognitionException { // $ANTLR start lhs_and - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:620:1: lhs_and returns [PatternDescr d] : left= lhs_unary ( ('and'|'&&')right= lhs_unary )* ; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:645:1: lhs_and returns [PatternDescr d] : left= lhs_unary ( ('and'|'&&')right= lhs_unary )* ; public PatternDescr lhs_and() throws RecognitionException { PatternDescr d; PatternDescr left = null; @@ -3623,40 +3799,40 @@ public PatternDescr lhs_and() throws RecognitionException { d = null; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:625:17: (left= lhs_unary ( ('and'|'&&')right= lhs_unary )* ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:625:17: left= lhs_unary ( ('and'|'&&')right= lhs_unary )* + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:650:17: (left= lhs_unary ( ('and'|'&&')right= lhs_unary )* ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:650:17: left= lhs_unary ( ('and'|'&&')right= lhs_unary )* { AndDescr and = null; - following.push(FOLLOW_lhs_unary_in_lhs_and2046); + following.push(FOLLOW_lhs_unary_in_lhs_and2147); left=lhs_unary(); following.pop(); d = left; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:627:17: ( ('and'|'&&')right= lhs_unary )* - loop52: + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:652:17: ( ('and'|'&&')right= lhs_unary )* + loop55: do { - int alt52=2; - int LA52_0 = input.LA(1); - if ( (LA52_0>=49 && LA52_0<=50) ) { - alt52=1; + int alt55=2; + int LA55_0 = input.LA(1); + if ( (LA55_0>=50 && LA55_0<=51) ) { + alt55=1; } - switch (alt52) { + switch (alt55) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:627:25: ('and'|'&&')right= lhs_unary + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:652:25: ('and'|'&&')right= lhs_unary { - if ( (input.LA(1)>=49 && input.LA(1)<=50) ) { + if ( (input.LA(1)>=50 && input.LA(1)<=51) ) { input.consume(); errorRecovery=false; } else { MismatchedSetException mse = new MismatchedSetException(null,input); - recoverFromMismatchedSet(input,mse,FOLLOW_set_in_lhs_and2055); throw mse; + recoverFromMismatchedSet(input,mse,FOLLOW_set_in_lhs_and2156); throw mse; } - following.push(FOLLOW_lhs_unary_in_lhs_and2066); + following.push(FOLLOW_lhs_unary_in_lhs_and2167); right=lhs_unary(); following.pop(); @@ -3674,7 +3850,7 @@ public PatternDescr lhs_and() throws RecognitionException { break; default : - break loop52; + break loop55; } } while (true); @@ -3694,7 +3870,7 @@ public PatternDescr lhs_and() throws RecognitionException { // $ANTLR start lhs_unary - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:641:1: lhs_unary returns [PatternDescr d] : (u= lhs_exist | u= lhs_not | u= lhs_eval | u= lhs_column | '(' u= lhs ')' ) ; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:666:1: lhs_unary returns [PatternDescr d] : (u= lhs_exist | u= lhs_not | u= lhs_eval | u= lhs_column | '(' u= lhs ')' ) ; public PatternDescr lhs_unary() throws RecognitionException { PatternDescr d; PatternDescr u = null; @@ -3704,39 +3880,39 @@ public PatternDescr lhs_unary() throws RecognitionException { d = null; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:645:17: ( (u= lhs_exist | u= lhs_not | u= lhs_eval | u= lhs_column | '(' u= lhs ')' ) ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:645:17: (u= lhs_exist | u= lhs_not | u= lhs_eval | u= lhs_column | '(' u= lhs ')' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:670:17: ( (u= lhs_exist | u= lhs_not | u= lhs_eval | u= lhs_column | '(' u= lhs ')' ) ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:670:17: (u= lhs_exist | u= lhs_not | u= lhs_eval | u= lhs_column | '(' u= lhs ')' ) { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:645:17: (u= lhs_exist | u= lhs_not | u= lhs_eval | u= lhs_column | '(' u= lhs ')' ) - int alt53=5; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:670:17: (u= lhs_exist | u= lhs_not | u= lhs_eval | u= lhs_column | '(' u= lhs ')' ) + int alt56=5; switch ( input.LA(1) ) { - case 51: - alt53=1; - break; case 52: - alt53=2; + alt56=1; break; case 53: - alt53=3; + alt56=2; + break; + case 54: + alt56=3; break; case ID: - alt53=4; + alt56=4; break; case 21: - alt53=5; + alt56=5; break; default: NoViableAltException nvae = - new NoViableAltException("645:17: (u= lhs_exist | u= lhs_not | u= lhs_eval | u= lhs_column | \'(\' u= lhs \')\' )", 53, 0, input); + new NoViableAltException("670:17: (u= lhs_exist | u= lhs_not | u= lhs_eval | u= lhs_column | \'(\' u= lhs \')\' )", 56, 0, input); throw nvae; } - switch (alt53) { + switch (alt56) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:645:25: u= lhs_exist + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:670:25: u= lhs_exist { - following.push(FOLLOW_lhs_exist_in_lhs_unary2104); + following.push(FOLLOW_lhs_exist_in_lhs_unary2205); u=lhs_exist(); following.pop(); @@ -3744,9 +3920,9 @@ public PatternDescr lhs_unary() throws RecognitionException { } break; case 2 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:646:25: u= lhs_not + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:671:25: u= lhs_not { - following.push(FOLLOW_lhs_not_in_lhs_unary2112); + following.push(FOLLOW_lhs_not_in_lhs_unary2213); u=lhs_not(); following.pop(); @@ -3754,9 +3930,9 @@ public PatternDescr lhs_unary() throws RecognitionException { } break; case 3 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:647:25: u= lhs_eval + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:672:25: u= lhs_eval { - following.push(FOLLOW_lhs_eval_in_lhs_unary2120); + following.push(FOLLOW_lhs_eval_in_lhs_unary2221); u=lhs_eval(); following.pop(); @@ -3764,9 +3940,9 @@ public PatternDescr lhs_unary() throws RecognitionException { } break; case 4 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:648:25: u= lhs_column + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:673:25: u= lhs_column { - following.push(FOLLOW_lhs_column_in_lhs_unary2128); + following.push(FOLLOW_lhs_column_in_lhs_unary2229); u=lhs_column(); following.pop(); @@ -3774,14 +3950,14 @@ public PatternDescr lhs_unary() throws RecognitionException { } break; case 5 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:649:25: '(' u= lhs ')' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:674:25: '(' u= lhs ')' { - match(input,21,FOLLOW_21_in_lhs_unary2134); - following.push(FOLLOW_lhs_in_lhs_unary2138); + match(input,21,FOLLOW_21_in_lhs_unary2235); + following.push(FOLLOW_lhs_in_lhs_unary2239); u=lhs(); following.pop(); - match(input,23,FOLLOW_23_in_lhs_unary2140); + match(input,23,FOLLOW_23_in_lhs_unary2241); } break; @@ -3805,7 +3981,7 @@ public PatternDescr lhs_unary() throws RecognitionException { // $ANTLR start lhs_exist - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:653:1: lhs_exist returns [PatternDescr d] : loc= 'exists' column= lhs_column ; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:678:1: lhs_exist returns [PatternDescr d] : loc= 'exists' column= lhs_column ; public PatternDescr lhs_exist() throws RecognitionException { PatternDescr d; Token loc=null; @@ -3816,12 +3992,12 @@ public PatternDescr lhs_exist() throws RecognitionException { d = null; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:657:17: (loc= 'exists' column= lhs_column ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:657:17: loc= 'exists' column= lhs_column + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:682:17: (loc= 'exists' column= lhs_column ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:682:17: loc= 'exists' column= lhs_column { loc=(Token)input.LT(1); - match(input,51,FOLLOW_51_in_lhs_exist2170); - following.push(FOLLOW_lhs_column_in_lhs_exist2174); + match(input,52,FOLLOW_52_in_lhs_exist2271); + following.push(FOLLOW_lhs_column_in_lhs_exist2275); column=lhs_column(); following.pop(); @@ -3845,7 +4021,7 @@ public PatternDescr lhs_exist() throws RecognitionException { // $ANTLR start lhs_not - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:664:1: lhs_not returns [NotDescr d] : loc= 'not' column= lhs_column ; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:689:1: lhs_not returns [NotDescr d] : loc= 'not' column= lhs_column ; public NotDescr lhs_not() throws RecognitionException { NotDescr d; Token loc=null; @@ -3856,12 +4032,12 @@ public NotDescr lhs_not() throws RecognitionException { d = null; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:668:17: (loc= 'not' column= lhs_column ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:668:17: loc= 'not' column= lhs_column + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:693:17: (loc= 'not' column= lhs_column ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:693:17: loc= 'not' column= lhs_column { loc=(Token)input.LT(1); - match(input,52,FOLLOW_52_in_lhs_not2204); - following.push(FOLLOW_lhs_column_in_lhs_not2208); + match(input,53,FOLLOW_53_in_lhs_not2305); + following.push(FOLLOW_lhs_column_in_lhs_not2309); column=lhs_column(); following.pop(); @@ -3885,7 +4061,7 @@ public NotDescr lhs_not() throws RecognitionException { // $ANTLR start lhs_eval - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:675:1: lhs_eval returns [PatternDescr d] : 'eval' '(' c= paren_chunk ')' ; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:700:1: lhs_eval returns [PatternDescr d] : 'eval' '(' c= paren_chunk ')' ; public PatternDescr lhs_eval() throws RecognitionException { PatternDescr d; String c = null; @@ -3896,16 +4072,16 @@ public PatternDescr lhs_eval() throws RecognitionException { String text = ""; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:680:17: ( 'eval' '(' c= paren_chunk ')' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:680:17: 'eval' '(' c= paren_chunk ')' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:705:17: ( 'eval' '(' c= paren_chunk ')' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:705:17: 'eval' '(' c= paren_chunk ')' { - match(input,53,FOLLOW_53_in_lhs_eval2234); - match(input,21,FOLLOW_21_in_lhs_eval2236); - following.push(FOLLOW_paren_chunk_in_lhs_eval2240); + match(input,54,FOLLOW_54_in_lhs_eval2335); + match(input,21,FOLLOW_21_in_lhs_eval2337); + following.push(FOLLOW_paren_chunk_in_lhs_eval2341); c=paren_chunk(); following.pop(); - match(input,23,FOLLOW_23_in_lhs_eval2242); + match(input,23,FOLLOW_23_in_lhs_eval2343); d = new EvalDescr( c ); } @@ -3923,7 +4099,7 @@ public PatternDescr lhs_eval() throws RecognitionException { // $ANTLR start dotted_name - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:684:1: dotted_name returns [String name] : id= ID ( '.' id= ID )* ; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:709:1: dotted_name returns [String name] : id= ID ( '.' id= ID )* ; public String dotted_name() throws RecognitionException { String name; Token id=null; @@ -3932,36 +4108,36 @@ public String dotted_name() throws RecognitionException { name = null; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:689:17: (id= ID ( '.' id= ID )* ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:689:17: id= ID ( '.' id= ID )* + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:714:17: (id= ID ( '.' id= ID )* ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:714:17: id= ID ( '.' id= ID )* { id=(Token)input.LT(1); - match(input,ID,FOLLOW_ID_in_dotted_name2274); + match(input,ID,FOLLOW_ID_in_dotted_name2375); name=id.getText(); - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:689:46: ( '.' id= ID )* - loop54: + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:714:46: ( '.' id= ID )* + loop57: do { - int alt54=2; - int LA54_0 = input.LA(1); - if ( LA54_0==54 ) { - alt54=1; + int alt57=2; + int LA57_0 = input.LA(1); + if ( LA57_0==55 ) { + alt57=1; } - switch (alt54) { + switch (alt57) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:689:48: '.' id= ID + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:714:48: '.' id= ID { - match(input,54,FOLLOW_54_in_dotted_name2280); + match(input,55,FOLLOW_55_in_dotted_name2381); id=(Token)input.LT(1); - match(input,ID,FOLLOW_ID_in_dotted_name2284); + match(input,ID,FOLLOW_ID_in_dotted_name2385); name = name + "." + id.getText(); } break; default : - break loop54; + break loop57; } } while (true); @@ -3981,7 +4157,7 @@ public String dotted_name() throws RecognitionException { // $ANTLR start word - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:693:1: word returns [String word] : (id= ID | 'import' | 'use' | 'rule' | 'query' | 'salience' | 'no-loop' | 'when' | 'then' | 'end' | str= STRING ); + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:718:1: word returns [String word] : (id= ID | 'import' | 'use' | 'rule' | 'query' | 'salience' | 'no-loop' | 'when' | 'then' | 'end' | str= STRING ); public String word() throws RecognitionException { String word; Token id=null; @@ -3991,136 +4167,136 @@ public String word() throws RecognitionException { word = null; try { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:697:17: (id= ID | 'import' | 'use' | 'rule' | 'query' | 'salience' | 'no-loop' | 'when' | 'then' | 'end' | str= STRING ) - int alt55=11; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:722:17: (id= ID | 'import' | 'use' | 'rule' | 'query' | 'salience' | 'no-loop' | 'when' | 'then' | 'end' | str= STRING ) + int alt58=11; switch ( input.LA(1) ) { case ID: - alt55=1; + alt58=1; break; case 17: - alt55=2; + alt58=2; break; - case 55: - alt55=3; + case 56: + alt58=3; break; case 28: - alt55=4; + alt58=4; break; case 26: - alt55=5; + alt58=5; break; case 33: - alt55=6; + alt58=6; break; case 34: - alt55=7; + alt58=7; break; case 29: - alt55=8; + alt58=8; break; case 31: - alt55=9; + alt58=9; break; case 27: - alt55=10; + alt58=10; break; case STRING: - alt55=11; + alt58=11; break; default: NoViableAltException nvae = - new NoViableAltException("693:1: word returns [String word] : (id= ID | \'import\' | \'use\' | \'rule\' | \'query\' | \'salience\' | \'no-loop\' | \'when\' | \'then\' | \'end\' | str= STRING );", 55, 0, input); + new NoViableAltException("718:1: word returns [String word] : (id= ID | \'import\' | \'use\' | \'rule\' | \'query\' | \'salience\' | \'no-loop\' | \'when\' | \'then\' | \'end\' | str= STRING );", 58, 0, input); throw nvae; } - switch (alt55) { + switch (alt58) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:697:17: id= ID + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:722:17: id= ID { id=(Token)input.LT(1); - match(input,ID,FOLLOW_ID_in_word2314); + match(input,ID,FOLLOW_ID_in_word2415); word=id.getText(); } break; case 2 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:698:17: 'import' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:723:17: 'import' { - match(input,17,FOLLOW_17_in_word2326); + match(input,17,FOLLOW_17_in_word2427); word="import"; } break; case 3 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:699:17: 'use' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:724:17: 'use' { - match(input,55,FOLLOW_55_in_word2335); + match(input,56,FOLLOW_56_in_word2436); word="use"; } break; case 4 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:700:17: 'rule' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:725:17: 'rule' { - match(input,28,FOLLOW_28_in_word2347); + match(input,28,FOLLOW_28_in_word2448); word="rule"; } break; case 5 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:701:17: 'query' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:726:17: 'query' { - match(input,26,FOLLOW_26_in_word2358); + match(input,26,FOLLOW_26_in_word2459); word="query"; } break; case 6 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:702:17: 'salience' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:727:17: 'salience' { - match(input,33,FOLLOW_33_in_word2368); + match(input,33,FOLLOW_33_in_word2469); word="salience"; } break; case 7 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:703:17: 'no-loop' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:728:17: 'no-loop' { - match(input,34,FOLLOW_34_in_word2376); + match(input,34,FOLLOW_34_in_word2477); word="no-loop"; } break; case 8 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:704:17: 'when' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:729:17: 'when' { - match(input,29,FOLLOW_29_in_word2384); + match(input,29,FOLLOW_29_in_word2485); word="when"; } break; case 9 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:705:17: 'then' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:730:17: 'then' { - match(input,31,FOLLOW_31_in_word2395); + match(input,31,FOLLOW_31_in_word2496); word="then"; } break; case 10 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:706:17: 'end' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:731:17: 'end' { - match(input,27,FOLLOW_27_in_word2406); + match(input,27,FOLLOW_27_in_word2507); word="end"; } break; case 11 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:707:17: str= STRING + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:732:17: str= STRING { str=(Token)input.LT(1); - match(input,STRING,FOLLOW_STRING_in_word2420); + match(input,STRING,FOLLOW_STRING_in_word2521); word=getString(str); } @@ -4139,7 +4315,7 @@ public String word() throws RecognitionException { // $ANTLR end word - protected DFA2 dfa2 = new DFA2();protected DFA13 dfa13 = new DFA13();protected DFA14 dfa14 = new DFA14();protected DFA15 dfa15 = new DFA15();protected DFA39 dfa39 = new DFA39();protected DFA41 dfa41 = new DFA41();protected DFA44 dfa44 = new DFA44();protected DFA45 dfa45 = new DFA45(); + protected DFA2 dfa2 = new DFA2();protected DFA13 dfa13 = new DFA13();protected DFA14 dfa14 = new DFA14();protected DFA15 dfa15 = new DFA15();protected DFA42 dfa42 = new DFA42();protected DFA44 dfa44 = new DFA44();protected DFA47 dfa47 = new DFA47();protected DFA48 dfa48 = new DFA48(); class DFA2 extends DFA { public int predict(IntStream input) throws RecognitionException { return predict(input, s0); @@ -4193,19 +4369,19 @@ public DFA.State transition(IntStream input) throws RecognitionException { public int predict(IntStream input) throws RecognitionException { return predict(input, s0); } - DFA.State s5 = new DFA.State() {{alt=1;}}; DFA.State s2 = new DFA.State() {{alt=2;}}; + DFA.State s5 = new DFA.State() {{alt=1;}}; DFA.State s3 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { switch ( input.LA(1) ) { - case ID: - return s5; + case 21: + return s2; case EOL: return s3; - case 21: - return s2; + case ID: + return s5; default: NoViableAltException nvae = @@ -4224,7 +4400,7 @@ public DFA.State transition(IntStream input) throws RecognitionException { return s2; case ID: - case 54: + case 55: return s5; default: @@ -4251,16 +4427,16 @@ public DFA.State transition(IntStream input) throws RecognitionException { public int predict(IntStream input) throws RecognitionException { return predict(input, s0); } - DFA.State s6 = new DFA.State() {{alt=1;}}; + DFA.State s3 = new DFA.State() {{alt=1;}}; DFA.State s2 = new DFA.State() {{alt=2;}}; - DFA.State s3 = new DFA.State() { + DFA.State s4 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { switch ( input.LA(1) ) { case ID: - return s6; + return s3; case EOL: - return s3; + return s4; case 22: case 23: @@ -4268,7 +4444,7 @@ public DFA.State transition(IntStream input) throws RecognitionException { default: NoViableAltException nvae = - new NoViableAltException("", 14, 3, input); + new NoViableAltException("", 14, 4, input); throw nvae; } } @@ -4276,17 +4452,17 @@ public DFA.State transition(IntStream input) throws RecognitionException { DFA.State s1 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { switch ( input.LA(1) ) { - case EOL: + case ID: + case 55: return s3; + case EOL: + return s4; + case 22: case 23: return s2; - case ID: - case 54: - return s6; - default: NoViableAltException nvae = new NoViableAltException("", 14, 1, input); @@ -4311,20 +4487,20 @@ public DFA.State transition(IntStream input) throws RecognitionException { public int predict(IntStream input) throws RecognitionException { return predict(input, s0); } - DFA.State s2 = new DFA.State() {{alt=2;}}; DFA.State s6 = new DFA.State() {{alt=1;}}; + DFA.State s2 = new DFA.State() {{alt=2;}}; DFA.State s3 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { switch ( input.LA(1) ) { - case 22: - case 23: - return s2; + case ID: + return s6; case EOL: return s3; - case ID: - return s6; + case 22: + case 23: + return s2; default: NoViableAltException nvae = @@ -4344,7 +4520,7 @@ public DFA.State transition(IntStream input) throws RecognitionException { return s2; case ID: - case 54: + case 55: return s6; default: @@ -4367,7 +4543,7 @@ public DFA.State transition(IntStream input) throws RecognitionException { } }; - }class DFA39 extends DFA { + }class DFA42 extends DFA { public int predict(IntStream input) throws RecognitionException { return predict(input, s0); } @@ -4387,7 +4563,7 @@ public DFA.State transition(IntStream input) throws RecognitionException { default: NoViableAltException nvae = - new NoViableAltException("", 39, 2, input); + new NoViableAltException("", 42, 2, input); throw nvae; } } @@ -4406,24 +4582,24 @@ public DFA.State transition(IntStream input) throws RecognitionException { default: NoViableAltException nvae = - new NoViableAltException("", 39, 1, input); + new NoViableAltException("", 42, 1, input); throw nvae; } } }; DFA.State s0 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA39_0 = input.LA(1); - if ( LA39_0==ID ) {return s1;} + int LA42_0 = input.LA(1); + if ( LA42_0==ID ) {return s1;} NoViableAltException nvae = - new NoViableAltException("", 39, 0, input); + new NoViableAltException("", 42, 0, input); throw nvae; } }; - }class DFA41 extends DFA { + }class DFA44 extends DFA { public int predict(IntStream input) throws RecognitionException { return predict(input, s0); } @@ -4443,7 +4619,7 @@ public DFA.State transition(IntStream input) throws RecognitionException { default: NoViableAltException nvae = - new NoViableAltException("", 41, 1, input); + new NoViableAltException("", 44, 1, input); throw nvae; } } @@ -4462,33 +4638,33 @@ public DFA.State transition(IntStream input) throws RecognitionException { default: NoViableAltException nvae = - new NoViableAltException("", 41, 0, input); + new NoViableAltException("", 44, 0, input); throw nvae; } } }; - }class DFA44 extends DFA { + }class DFA47 extends DFA { public int predict(IntStream input) throws RecognitionException { return predict(input, s0); } - DFA.State s2 = new DFA.State() {{alt=2;}}; DFA.State s3 = new DFA.State() {{alt=1;}}; + DFA.State s2 = new DFA.State() {{alt=2;}}; DFA.State s1 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { switch ( input.LA(1) ) { - case 23: - return s2; + case 22: + return s3; case EOL: return s1; - case 22: - return s3; + case 23: + return s2; default: NoViableAltException nvae = - new NoViableAltException("", 44, 1, input); + new NoViableAltException("", 47, 1, input); throw nvae; } } @@ -4507,13 +4683,13 @@ public DFA.State transition(IntStream input) throws RecognitionException { default: NoViableAltException nvae = - new NoViableAltException("", 44, 0, input); + new NoViableAltException("", 47, 0, input); throw nvae; } } }; - }class DFA45 extends DFA { + }class DFA48 extends DFA { public int predict(IntStream input) throws RecognitionException { return predict(input, s0); } @@ -4524,7 +4700,6 @@ public DFA.State transition(IntStream input) throws RecognitionException { switch ( input.LA(1) ) { case 22: case 23: - case 39: case 40: case 41: case 42: @@ -4532,6 +4707,7 @@ public DFA.State transition(IntStream input) throws RecognitionException { case 44: case 45: case 46: + case 47: return s4; case EOL: @@ -4542,7 +4718,7 @@ public DFA.State transition(IntStream input) throws RecognitionException { default: NoViableAltException nvae = - new NoViableAltException("", 45, 2, input); + new NoViableAltException("", 48, 2, input); throw nvae; } } @@ -4558,7 +4734,6 @@ public DFA.State transition(IntStream input) throws RecognitionException { case 22: case 23: - case 39: case 40: case 41: case 42: @@ -4566,22 +4741,23 @@ public DFA.State transition(IntStream input) throws RecognitionException { case 44: case 45: case 46: + case 47: return s4; default: NoViableAltException nvae = - new NoViableAltException("", 45, 1, input); + new NoViableAltException("", 48, 1, input); throw nvae; } } }; DFA.State s0 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA45_0 = input.LA(1); - if ( LA45_0==ID ) {return s1;} + int LA48_0 = input.LA(1); + if ( LA48_0==ID ) {return s1;} NoViableAltException nvae = - new NoViableAltException("", 45, 0, input); + new NoViableAltException("", 48, 0, input); throw nvae; } @@ -4641,172 +4817,182 @@ public DFA.State transition(IntStream input) throws RecognitionException { public static final BitSet FOLLOW_opt_eol_in_function382 = new BitSet(new long[]{0x0000000000C00000L}); public static final BitSet FOLLOW_23_in_function407 = new BitSet(new long[]{0x0000000000000012L}); public static final BitSet FOLLOW_opt_eol_in_function411 = new BitSet(new long[]{0x0000000001000000L}); - public static final BitSet FOLLOW_24_in_function415 = new BitSet(new long[]{0x00FFFFFFFFFFFFF2L}); + public static final BitSet FOLLOW_24_in_function415 = new BitSet(new long[]{0x01FFFFFFFFFFFFF2L}); public static final BitSet FOLLOW_curly_chunk_in_function422 = new BitSet(new long[]{0x0000000002000000L}); public static final BitSet FOLLOW_25_in_function431 = new BitSet(new long[]{0x0000000000000012L}); public static final BitSet FOLLOW_opt_eol_in_function439 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_opt_eol_in_query463 = new BitSet(new long[]{0x0000000004000000L}); - public static final BitSet FOLLOW_26_in_query469 = new BitSet(new long[]{0x00800006BC020120L}); + public static final BitSet FOLLOW_26_in_query469 = new BitSet(new long[]{0x01000006BC020120L}); public static final BitSet FOLLOW_word_in_query473 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_query475 = new BitSet(new long[]{0x00FFFFFFFFFFFFF2L}); + public static final BitSet FOLLOW_opt_eol_in_query475 = new BitSet(new long[]{0x01FFFFFFFFFFFFF2L}); public static final BitSet FOLLOW_expander_lhs_block_in_query491 = new BitSet(new long[]{0x0000000008000000L}); public static final BitSet FOLLOW_normal_lhs_block_in_query499 = new BitSet(new long[]{0x0000000008000000L}); public static final BitSet FOLLOW_27_in_query514 = new BitSet(new long[]{0x0000000000000012L}); public static final BitSet FOLLOW_opt_eol_in_query516 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_opt_eol_in_rule539 = new BitSet(new long[]{0x0000000010000000L}); - public static final BitSet FOLLOW_28_in_rule545 = new BitSet(new long[]{0x00800006BC020120L}); + public static final BitSet FOLLOW_28_in_rule545 = new BitSet(new long[]{0x01000006BC020120L}); public static final BitSet FOLLOW_word_in_rule549 = new BitSet(new long[]{0x0000000000000012L}); public static final BitSet FOLLOW_opt_eol_in_rule551 = new BitSet(new long[]{0x0000000140000012L}); public static final BitSet FOLLOW_rule_attributes_in_rule562 = new BitSet(new long[]{0x0000000000000012L}); public static final BitSet FOLLOW_opt_eol_in_rule572 = new BitSet(new long[]{0x00000000A0000000L}); public static final BitSet FOLLOW_29_in_rule580 = new BitSet(new long[]{0x0000000040000012L}); public static final BitSet FOLLOW_30_in_rule582 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_rule585 = new BitSet(new long[]{0x00FFFFFFFFFFFFF2L}); + public static final BitSet FOLLOW_opt_eol_in_rule585 = new BitSet(new long[]{0x01FFFFFFFFFFFFF2L}); public static final BitSet FOLLOW_expander_lhs_block_in_rule603 = new BitSet(new long[]{0x0000000080000000L}); public static final BitSet FOLLOW_normal_lhs_block_in_rule612 = new BitSet(new long[]{0x0000000080000000L}); public static final BitSet FOLLOW_31_in_rule633 = new BitSet(new long[]{0x0000000040000012L}); public static final BitSet FOLLOW_30_in_rule635 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_rule639 = new BitSet(new long[]{0x00FFFFFFFFFFFFF0L}); + public static final BitSet FOLLOW_opt_eol_in_rule639 = new BitSet(new long[]{0x01FFFFFFFFFFFFF0L}); public static final BitSet FOLLOW_27_in_rule674 = new BitSet(new long[]{0x0000000000000012L}); public static final BitSet FOLLOW_opt_eol_in_rule676 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_32_in_rule_attributes694 = new BitSet(new long[]{0x0000000040000012L}); public static final BitSet FOLLOW_30_in_rule_attributes697 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_rule_attributes700 = new BitSet(new long[]{0x0000003E00400002L}); - public static final BitSet FOLLOW_22_in_rule_attributes707 = new BitSet(new long[]{0x0000003E00000000L}); + public static final BitSet FOLLOW_opt_eol_in_rule_attributes700 = new BitSet(new long[]{0x0000007E00400002L}); + public static final BitSet FOLLOW_22_in_rule_attributes707 = new BitSet(new long[]{0x0000007E00000000L}); public static final BitSet FOLLOW_rule_attribute_in_rule_attributes712 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_rule_attributes714 = new BitSet(new long[]{0x0000003E00400002L}); + public static final BitSet FOLLOW_opt_eol_in_rule_attributes714 = new BitSet(new long[]{0x0000007E00400002L}); public static final BitSet FOLLOW_salience_in_rule_attribute753 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_no_loop_in_rule_attribute763 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_agenda_group_in_rule_attribute774 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_duration_in_rule_attribute787 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_xor_group_in_rule_attribute801 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_33_in_salience834 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_salience836 = new BitSet(new long[]{0x0000000000000040L}); - public static final BitSet FOLLOW_INT_in_salience840 = new BitSet(new long[]{0x0000000000010012L}); - public static final BitSet FOLLOW_16_in_salience842 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_salience845 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_34_in_no_loop880 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_no_loop882 = new BitSet(new long[]{0x0000000000010012L}); - public static final BitSet FOLLOW_16_in_no_loop884 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_no_loop887 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_34_in_no_loop912 = new BitSet(new long[]{0x0000000000000080L}); - public static final BitSet FOLLOW_BOOL_in_no_loop916 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_no_loop918 = new BitSet(new long[]{0x0000000000010012L}); - public static final BitSet FOLLOW_16_in_no_loop920 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_no_loop923 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_35_in_xor_group964 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_xor_group966 = new BitSet(new long[]{0x0000000000000100L}); - public static final BitSet FOLLOW_STRING_in_xor_group970 = new BitSet(new long[]{0x0000000000010012L}); - public static final BitSet FOLLOW_16_in_xor_group972 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_xor_group975 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_36_in_agenda_group1004 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_agenda_group1006 = new BitSet(new long[]{0x0000000000000100L}); - public static final BitSet FOLLOW_STRING_in_agenda_group1010 = new BitSet(new long[]{0x0000000000010012L}); - public static final BitSet FOLLOW_16_in_agenda_group1012 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_agenda_group1015 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_37_in_duration1047 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_duration1049 = new BitSet(new long[]{0x0000000000000040L}); - public static final BitSet FOLLOW_INT_in_duration1053 = new BitSet(new long[]{0x0000000000010012L}); - public static final BitSet FOLLOW_16_in_duration1055 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_duration1058 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_lhs_in_normal_lhs_block1084 = new BitSet(new long[]{0x0038000000200022L}); - public static final BitSet FOLLOW_paren_chunk_in_expander_lhs_block1130 = new BitSet(new long[]{0x0000000000000010L}); - public static final BitSet FOLLOW_EOL_in_expander_lhs_block1132 = new BitSet(new long[]{0x00FFFFFFFFFFFFF2L}); - public static final BitSet FOLLOW_lhs_or_in_lhs1176 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_fact_binding_in_lhs_column1203 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_fact_in_lhs_column1212 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_ID_in_fact_binding1244 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_fact_binding1254 = new BitSet(new long[]{0x0000000040000000L}); - public static final BitSet FOLLOW_30_in_fact_binding1256 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_fact_binding1258 = new BitSet(new long[]{0x0000000000000020L}); - public static final BitSet FOLLOW_fact_in_fact_binding1266 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_fact_binding1268 = new BitSet(new long[]{0x0000004000000002L}); - public static final BitSet FOLLOW_38_in_fact_binding1280 = new BitSet(new long[]{0x0000000000000020L}); - public static final BitSet FOLLOW_fact_in_fact_binding1294 = new BitSet(new long[]{0x0000004000000002L}); - public static final BitSet FOLLOW_ID_in_fact1334 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_fact1342 = new BitSet(new long[]{0x0000000000200000L}); - public static final BitSet FOLLOW_21_in_fact1348 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_fact1350 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_constraints_in_fact1356 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_fact1375 = new BitSet(new long[]{0x0000000000800000L}); - public static final BitSet FOLLOW_23_in_fact1377 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_fact1379 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_opt_eol_in_constraints1404 = new BitSet(new long[]{0x0000000000000032L}); - public static final BitSet FOLLOW_constraint_in_constraints1409 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_predicate_in_constraints1412 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_constraints1420 = new BitSet(new long[]{0x0000000000400000L}); - public static final BitSet FOLLOW_22_in_constraints1422 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_constraints1424 = new BitSet(new long[]{0x0000000000000032L}); - public static final BitSet FOLLOW_constraint_in_constraints1427 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_predicate_in_constraints1430 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_constraints1438 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_opt_eol_in_constraint1457 = new BitSet(new long[]{0x0000000000000020L}); - public static final BitSet FOLLOW_ID_in_constraint1465 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_constraint1467 = new BitSet(new long[]{0x0000000040000000L}); - public static final BitSet FOLLOW_30_in_constraint1469 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_constraint1471 = new BitSet(new long[]{0x0000000000000020L}); - public static final BitSet FOLLOW_ID_in_constraint1481 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_constraint1491 = new BitSet(new long[]{0x00007F8000000012L}); - public static final BitSet FOLLOW_set_in_constraint1499 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_constraint1571 = new BitSet(new long[]{0x00000000002003E0L}); - public static final BitSet FOLLOW_ID_in_constraint1589 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_literal_constraint_in_constraint1614 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_retval_constraint_in_constraint1634 = new BitSet(new long[]{0x0000000000000012L}); - public static final BitSet FOLLOW_opt_eol_in_constraint1667 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_STRING_in_literal_constraint1694 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_INT_in_literal_constraint1705 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_FLOAT_in_literal_constraint1718 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_BOOL_in_literal_constraint1729 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_21_in_retval_constraint1762 = new BitSet(new long[]{0x00FFFFFFFFFFFFF2L}); - public static final BitSet FOLLOW_paren_chunk_in_retval_constraint1766 = new BitSet(new long[]{0x0000000000800000L}); - public static final BitSet FOLLOW_23_in_retval_constraint1768 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_ID_in_predicate1786 = new BitSet(new long[]{0x0000000040000000L}); - public static final BitSet FOLLOW_30_in_predicate1788 = new BitSet(new long[]{0x0000000000000020L}); - public static final BitSet FOLLOW_ID_in_predicate1792 = new BitSet(new long[]{0x0000800000000000L}); - public static final BitSet FOLLOW_47_in_predicate1794 = new BitSet(new long[]{0x0000000000200000L}); - public static final BitSet FOLLOW_21_in_predicate1796 = new BitSet(new long[]{0x00FFFFFFFFFFFFF2L}); - public static final BitSet FOLLOW_paren_chunk_in_predicate1800 = new BitSet(new long[]{0x0000000000800000L}); - public static final BitSet FOLLOW_23_in_predicate1802 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_21_in_paren_chunk1847 = new BitSet(new long[]{0x00FFFFFFFFFFFFF2L}); - public static final BitSet FOLLOW_paren_chunk_in_paren_chunk1851 = new BitSet(new long[]{0x0000000000800000L}); - public static final BitSet FOLLOW_23_in_paren_chunk1853 = new BitSet(new long[]{0x00FFFFFFFFFFFFF2L}); - public static final BitSet FOLLOW_24_in_curly_chunk1921 = new BitSet(new long[]{0x00FFFFFFFFFFFFF2L}); - public static final BitSet FOLLOW_curly_chunk_in_curly_chunk1925 = new BitSet(new long[]{0x0000000002000000L}); - public static final BitSet FOLLOW_25_in_curly_chunk1927 = new BitSet(new long[]{0x00FFFFFFFFFFFFF2L}); - public static final BitSet FOLLOW_lhs_and_in_lhs_or1985 = new BitSet(new long[]{0x0001004000000002L}); - public static final BitSet FOLLOW_set_in_lhs_or1995 = new BitSet(new long[]{0x0038000000200020L}); - public static final BitSet FOLLOW_lhs_and_in_lhs_or2006 = new BitSet(new long[]{0x0001004000000002L}); - public static final BitSet FOLLOW_lhs_unary_in_lhs_and2046 = new BitSet(new long[]{0x0006000000000002L}); - public static final BitSet FOLLOW_set_in_lhs_and2055 = new BitSet(new long[]{0x0038000000200020L}); - public static final BitSet FOLLOW_lhs_unary_in_lhs_and2066 = new BitSet(new long[]{0x0006000000000002L}); - public static final BitSet FOLLOW_lhs_exist_in_lhs_unary2104 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_lhs_not_in_lhs_unary2112 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_lhs_eval_in_lhs_unary2120 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_lhs_column_in_lhs_unary2128 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_21_in_lhs_unary2134 = new BitSet(new long[]{0x0038000000200020L}); - public static final BitSet FOLLOW_lhs_in_lhs_unary2138 = new BitSet(new long[]{0x0000000000800000L}); - public static final BitSet FOLLOW_23_in_lhs_unary2140 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_51_in_lhs_exist2170 = new BitSet(new long[]{0x0000000000000020L}); - public static final BitSet FOLLOW_lhs_column_in_lhs_exist2174 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_52_in_lhs_not2204 = new BitSet(new long[]{0x0000000000000020L}); - public static final BitSet FOLLOW_lhs_column_in_lhs_not2208 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_53_in_lhs_eval2234 = new BitSet(new long[]{0x0000000000200000L}); - public static final BitSet FOLLOW_21_in_lhs_eval2236 = new BitSet(new long[]{0x00FFFFFFFFFFFFF2L}); - public static final BitSet FOLLOW_paren_chunk_in_lhs_eval2240 = new BitSet(new long[]{0x0000000000800000L}); - public static final BitSet FOLLOW_23_in_lhs_eval2242 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_ID_in_dotted_name2274 = new BitSet(new long[]{0x0040000000000002L}); - public static final BitSet FOLLOW_54_in_dotted_name2280 = new BitSet(new long[]{0x0000000000000020L}); - public static final BitSet FOLLOW_ID_in_dotted_name2284 = new BitSet(new long[]{0x0040000000000002L}); - public static final BitSet FOLLOW_ID_in_word2314 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_17_in_word2326 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_55_in_word2335 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_28_in_word2347 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_26_in_word2358 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_33_in_word2368 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_34_in_word2376 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_29_in_word2384 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_31_in_word2395 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_27_in_word2406 = new BitSet(new long[]{0x0000000000000002L}); - public static final BitSet FOLLOW_STRING_in_word2420 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_auto_focus_in_rule_attribute812 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_33_in_salience845 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_salience847 = new BitSet(new long[]{0x0000000000000040L}); + public static final BitSet FOLLOW_INT_in_salience851 = new BitSet(new long[]{0x0000000000010012L}); + public static final BitSet FOLLOW_16_in_salience853 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_salience856 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_34_in_no_loop891 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_no_loop893 = new BitSet(new long[]{0x0000000000010012L}); + public static final BitSet FOLLOW_16_in_no_loop895 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_no_loop898 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_34_in_no_loop923 = new BitSet(new long[]{0x0000000000000080L}); + public static final BitSet FOLLOW_BOOL_in_no_loop927 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_no_loop929 = new BitSet(new long[]{0x0000000000010012L}); + public static final BitSet FOLLOW_16_in_no_loop931 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_no_loop934 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_35_in_auto_focus980 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_auto_focus982 = new BitSet(new long[]{0x0000000000010012L}); + public static final BitSet FOLLOW_16_in_auto_focus984 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_auto_focus987 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_35_in_auto_focus1012 = new BitSet(new long[]{0x0000000000000080L}); + public static final BitSet FOLLOW_BOOL_in_auto_focus1016 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_auto_focus1018 = new BitSet(new long[]{0x0000000000010012L}); + public static final BitSet FOLLOW_16_in_auto_focus1020 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_auto_focus1023 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_36_in_xor_group1065 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_xor_group1067 = new BitSet(new long[]{0x0000000000000100L}); + public static final BitSet FOLLOW_STRING_in_xor_group1071 = new BitSet(new long[]{0x0000000000010012L}); + public static final BitSet FOLLOW_16_in_xor_group1073 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_xor_group1076 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_37_in_agenda_group1105 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_agenda_group1107 = new BitSet(new long[]{0x0000000000000100L}); + public static final BitSet FOLLOW_STRING_in_agenda_group1111 = new BitSet(new long[]{0x0000000000010012L}); + public static final BitSet FOLLOW_16_in_agenda_group1113 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_agenda_group1116 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_38_in_duration1148 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_duration1150 = new BitSet(new long[]{0x0000000000000040L}); + public static final BitSet FOLLOW_INT_in_duration1154 = new BitSet(new long[]{0x0000000000010012L}); + public static final BitSet FOLLOW_16_in_duration1156 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_duration1159 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_lhs_in_normal_lhs_block1185 = new BitSet(new long[]{0x0070000000200022L}); + public static final BitSet FOLLOW_paren_chunk_in_expander_lhs_block1231 = new BitSet(new long[]{0x0000000000000010L}); + public static final BitSet FOLLOW_EOL_in_expander_lhs_block1233 = new BitSet(new long[]{0x01FFFFFFFFFFFFF2L}); + public static final BitSet FOLLOW_lhs_or_in_lhs1277 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_fact_binding_in_lhs_column1304 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_fact_in_lhs_column1313 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_ID_in_fact_binding1345 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_fact_binding1355 = new BitSet(new long[]{0x0000000040000000L}); + public static final BitSet FOLLOW_30_in_fact_binding1357 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_fact_binding1359 = new BitSet(new long[]{0x0000000000000020L}); + public static final BitSet FOLLOW_fact_in_fact_binding1367 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_fact_binding1369 = new BitSet(new long[]{0x0000008000000002L}); + public static final BitSet FOLLOW_39_in_fact_binding1381 = new BitSet(new long[]{0x0000000000000020L}); + public static final BitSet FOLLOW_fact_in_fact_binding1395 = new BitSet(new long[]{0x0000008000000002L}); + public static final BitSet FOLLOW_ID_in_fact1435 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_fact1443 = new BitSet(new long[]{0x0000000000200000L}); + public static final BitSet FOLLOW_21_in_fact1449 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_fact1451 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_constraints_in_fact1457 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_fact1476 = new BitSet(new long[]{0x0000000000800000L}); + public static final BitSet FOLLOW_23_in_fact1478 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_fact1480 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_opt_eol_in_constraints1505 = new BitSet(new long[]{0x0000000000000032L}); + public static final BitSet FOLLOW_constraint_in_constraints1510 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_predicate_in_constraints1513 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_constraints1521 = new BitSet(new long[]{0x0000000000400000L}); + public static final BitSet FOLLOW_22_in_constraints1523 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_constraints1525 = new BitSet(new long[]{0x0000000000000032L}); + public static final BitSet FOLLOW_constraint_in_constraints1528 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_predicate_in_constraints1531 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_constraints1539 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_opt_eol_in_constraint1558 = new BitSet(new long[]{0x0000000000000020L}); + public static final BitSet FOLLOW_ID_in_constraint1566 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_constraint1568 = new BitSet(new long[]{0x0000000040000000L}); + public static final BitSet FOLLOW_30_in_constraint1570 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_constraint1572 = new BitSet(new long[]{0x0000000000000020L}); + public static final BitSet FOLLOW_ID_in_constraint1582 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_constraint1592 = new BitSet(new long[]{0x0000FF0000000012L}); + public static final BitSet FOLLOW_set_in_constraint1600 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_constraint1672 = new BitSet(new long[]{0x00000000002003E0L}); + public static final BitSet FOLLOW_ID_in_constraint1690 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_literal_constraint_in_constraint1715 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_retval_constraint_in_constraint1735 = new BitSet(new long[]{0x0000000000000012L}); + public static final BitSet FOLLOW_opt_eol_in_constraint1768 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_STRING_in_literal_constraint1795 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_INT_in_literal_constraint1806 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_FLOAT_in_literal_constraint1819 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_BOOL_in_literal_constraint1830 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_21_in_retval_constraint1863 = new BitSet(new long[]{0x01FFFFFFFFFFFFF2L}); + public static final BitSet FOLLOW_paren_chunk_in_retval_constraint1867 = new BitSet(new long[]{0x0000000000800000L}); + public static final BitSet FOLLOW_23_in_retval_constraint1869 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_ID_in_predicate1887 = new BitSet(new long[]{0x0000000040000000L}); + public static final BitSet FOLLOW_30_in_predicate1889 = new BitSet(new long[]{0x0000000000000020L}); + public static final BitSet FOLLOW_ID_in_predicate1893 = new BitSet(new long[]{0x0001000000000000L}); + public static final BitSet FOLLOW_48_in_predicate1895 = new BitSet(new long[]{0x0000000000200000L}); + public static final BitSet FOLLOW_21_in_predicate1897 = new BitSet(new long[]{0x01FFFFFFFFFFFFF2L}); + public static final BitSet FOLLOW_paren_chunk_in_predicate1901 = new BitSet(new long[]{0x0000000000800000L}); + public static final BitSet FOLLOW_23_in_predicate1903 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_21_in_paren_chunk1948 = new BitSet(new long[]{0x01FFFFFFFFFFFFF2L}); + public static final BitSet FOLLOW_paren_chunk_in_paren_chunk1952 = new BitSet(new long[]{0x0000000000800000L}); + public static final BitSet FOLLOW_23_in_paren_chunk1954 = new BitSet(new long[]{0x01FFFFFFFFFFFFF2L}); + public static final BitSet FOLLOW_24_in_curly_chunk2022 = new BitSet(new long[]{0x01FFFFFFFFFFFFF2L}); + public static final BitSet FOLLOW_curly_chunk_in_curly_chunk2026 = new BitSet(new long[]{0x0000000002000000L}); + public static final BitSet FOLLOW_25_in_curly_chunk2028 = new BitSet(new long[]{0x01FFFFFFFFFFFFF2L}); + public static final BitSet FOLLOW_lhs_and_in_lhs_or2086 = new BitSet(new long[]{0x0002008000000002L}); + public static final BitSet FOLLOW_set_in_lhs_or2096 = new BitSet(new long[]{0x0070000000200020L}); + public static final BitSet FOLLOW_lhs_and_in_lhs_or2107 = new BitSet(new long[]{0x0002008000000002L}); + public static final BitSet FOLLOW_lhs_unary_in_lhs_and2147 = new BitSet(new long[]{0x000C000000000002L}); + public static final BitSet FOLLOW_set_in_lhs_and2156 = new BitSet(new long[]{0x0070000000200020L}); + public static final BitSet FOLLOW_lhs_unary_in_lhs_and2167 = new BitSet(new long[]{0x000C000000000002L}); + public static final BitSet FOLLOW_lhs_exist_in_lhs_unary2205 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_lhs_not_in_lhs_unary2213 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_lhs_eval_in_lhs_unary2221 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_lhs_column_in_lhs_unary2229 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_21_in_lhs_unary2235 = new BitSet(new long[]{0x0070000000200020L}); + public static final BitSet FOLLOW_lhs_in_lhs_unary2239 = new BitSet(new long[]{0x0000000000800000L}); + public static final BitSet FOLLOW_23_in_lhs_unary2241 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_52_in_lhs_exist2271 = new BitSet(new long[]{0x0000000000000020L}); + public static final BitSet FOLLOW_lhs_column_in_lhs_exist2275 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_53_in_lhs_not2305 = new BitSet(new long[]{0x0000000000000020L}); + public static final BitSet FOLLOW_lhs_column_in_lhs_not2309 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_54_in_lhs_eval2335 = new BitSet(new long[]{0x0000000000200000L}); + public static final BitSet FOLLOW_21_in_lhs_eval2337 = new BitSet(new long[]{0x01FFFFFFFFFFFFF2L}); + public static final BitSet FOLLOW_paren_chunk_in_lhs_eval2341 = new BitSet(new long[]{0x0000000000800000L}); + public static final BitSet FOLLOW_23_in_lhs_eval2343 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_ID_in_dotted_name2375 = new BitSet(new long[]{0x0080000000000002L}); + public static final BitSet FOLLOW_55_in_dotted_name2381 = new BitSet(new long[]{0x0000000000000020L}); + public static final BitSet FOLLOW_ID_in_dotted_name2385 = new BitSet(new long[]{0x0080000000000002L}); + public static final BitSet FOLLOW_ID_in_word2415 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_17_in_word2427 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_56_in_word2436 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_28_in_word2448 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_26_in_word2459 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_33_in_word2469 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_34_in_word2477 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_29_in_word2485 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_31_in_word2496 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_27_in_word2507 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_STRING_in_word2521 = new BitSet(new long[]{0x0000000000000002L}); } \ No newline at end of file diff --git a/drools-compiler/src/main/java/org/drools/lang/RuleParserLexer.java b/drools-compiler/src/main/java/org/drools/lang/RuleParserLexer.java index 5593cab34df..d69f3246d93 100644 --- a/drools-compiler/src/main/java/org/drools/lang/RuleParserLexer.java +++ b/drools-compiler/src/main/java/org/drools/lang/RuleParserLexer.java @@ -1,4 +1,4 @@ -// $ANTLR 3.0ea8 C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g 2006-03-28 11:19:24 +// $ANTLR 3.0ea8 C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g 2006-03-28 14:38:45 package org.drools.lang; @@ -48,13 +48,14 @@ public class RuleParserLexer extends Lexer { public static final int T28=28; public static final int T42=42; public static final int T40=40; + public static final int T56=56; public static final int T48=48; public static final int T15=15; public static final int T54=54; public static final int EOF=-1; public static final int T47=47; public static final int EOL=4; - public static final int Tokens=56; + public static final int Tokens=57; public static final int T53=53; public static final int T31=31; public static final int MULTI_LINE_COMMENT=14; @@ -550,10 +551,10 @@ public void mT35() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:26:7: ( 'xor-group' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:26:7: 'xor-group' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:26:7: ( 'auto-focus' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:26:7: 'auto-focus' { - match("xor-group"); + match("auto-focus"); } @@ -574,10 +575,10 @@ public void mT36() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:27:7: ( 'agenda-group' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:27:7: 'agenda-group' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:27:7: ( 'xor-group' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:27:7: 'xor-group' { - match("agenda-group"); + match("xor-group"); } @@ -598,10 +599,10 @@ public void mT37() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:28:7: ( 'duration' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:28:7: 'duration' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:28:7: ( 'agenda-group' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:28:7: 'agenda-group' { - match("duration"); + match("agenda-group"); } @@ -622,10 +623,10 @@ public void mT38() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:29:7: ( 'or' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:29:7: 'or' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:29:7: ( 'duration' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:29:7: 'duration' { - match("or"); + match("duration"); } @@ -646,10 +647,10 @@ public void mT39() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:30:7: ( '==' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:30:7: '==' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:30:7: ( 'or' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:30:7: 'or' { - match("=="); + match("or"); } @@ -670,10 +671,11 @@ public void mT40() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:31:7: ( '>' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:31:7: '>' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:31:7: ( '==' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:31:7: '==' { - match('>'); + match("=="); + } @@ -693,11 +695,10 @@ public void mT41() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:32:7: ( '>=' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:32:7: '>=' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:32:7: ( '>' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:32:7: '>' { - match(">="); - + match('>'); } @@ -717,10 +718,11 @@ public void mT42() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:33:7: ( '<' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:33:7: '<' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:33:7: ( '>=' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:33:7: '>=' { - match('<'); + match(">="); + } @@ -740,11 +742,10 @@ public void mT43() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:34:7: ( '<=' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:34:7: '<=' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:34:7: ( '<' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:34:7: '<' { - match("<="); - + match('<'); } @@ -764,10 +765,10 @@ public void mT44() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:35:7: ( '!=' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:35:7: '!=' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:35:7: ( '<=' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:35:7: '<=' { - match("!="); + match("<="); } @@ -788,10 +789,10 @@ public void mT45() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:36:7: ( 'contains' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:36:7: 'contains' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:36:7: ( '!=' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:36:7: '!=' { - match("contains"); + match("!="); } @@ -812,10 +813,10 @@ public void mT46() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:37:7: ( 'matches' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:37:7: 'matches' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:37:7: ( 'contains' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:37:7: 'contains' { - match("matches"); + match("contains"); } @@ -836,10 +837,10 @@ public void mT47() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:38:7: ( '->' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:38:7: '->' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:38:7: ( 'matches' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:38:7: 'matches' { - match("->"); + match("matches"); } @@ -860,10 +861,10 @@ public void mT48() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:39:7: ( '||' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:39:7: '||' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:39:7: ( '->' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:39:7: '->' { - match("||"); + match("->"); } @@ -884,10 +885,10 @@ public void mT49() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:40:7: ( 'and' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:40:7: 'and' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:40:7: ( '||' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:40:7: '||' { - match("and"); + match("||"); } @@ -908,10 +909,10 @@ public void mT50() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:41:7: ( '&&' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:41:7: '&&' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:41:7: ( 'and' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:41:7: 'and' { - match("&&"); + match("and"); } @@ -932,10 +933,10 @@ public void mT51() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:42:7: ( 'exists' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:42:7: 'exists' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:42:7: ( '&&' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:42:7: '&&' { - match("exists"); + match("&&"); } @@ -956,10 +957,10 @@ public void mT52() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:43:7: ( 'not' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:43:7: 'not' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:43:7: ( 'exists' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:43:7: 'exists' { - match("not"); + match("exists"); } @@ -980,10 +981,10 @@ public void mT53() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:44:7: ( 'eval' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:44:7: 'eval' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:44:7: ( 'not' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:44:7: 'not' { - match("eval"); + match("not"); } @@ -1004,10 +1005,11 @@ public void mT54() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:45:7: ( '.' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:45:7: '.' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:45:7: ( 'eval' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:45:7: 'eval' { - match('.'); + match("eval"); + } @@ -1027,8 +1029,31 @@ public void mT55() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:46:7: ( 'use' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:46:7: 'use' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:46:7: ( '.' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:46:7: '.' + { + match('.'); + + } + + if ( token==null ) {emit(type,line,charPosition,channel,start,getCharIndex()-1);} + } + finally { + } + } + // $ANTLR end T55 + + + // $ANTLR start T56 + public void mT56() throws RecognitionException { + try { + int type = T56; + int start = getCharIndex(); + int line = getLine(); + int charPosition = getCharPositionInLine(); + int channel = Token.DEFAULT_CHANNEL; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:47:7: ( 'use' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:47:7: 'use' { match("use"); @@ -1040,7 +1065,7 @@ public void mT55() throws RecognitionException { finally { } } - // $ANTLR end T55 + // $ANTLR end T56 // $ANTLR start MISC @@ -1051,8 +1076,8 @@ public void mMISC() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:711:9: ( ('!'|'@'|'$'|'%'|'^'|'&'|'*'|'_'|'-'|'+'|'|'|','|'{'|'}'|'['|']'|';')) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:712:17: ('!'|'@'|'$'|'%'|'^'|'&'|'*'|'_'|'-'|'+'|'|'|','|'{'|'}'|'['|']'|';') + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:736:9: ( ('!'|'@'|'$'|'%'|'^'|'&'|'*'|'_'|'-'|'+'|'|'|','|'{'|'}'|'['|']'|';')) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:737:17: ('!'|'@'|'$'|'%'|'^'|'&'|'*'|'_'|'-'|'+'|'|'|','|'{'|'}'|'['|']'|';') { if ( input.LA(1)=='!'||(input.LA(1)>='$' && input.LA(1)<='&')||(input.LA(1)>='*' && input.LA(1)<='-')||input.LA(1)==';'||input.LA(1)=='@'||input.LA(1)=='['||(input.LA(1)>=']' && input.LA(1)<='_')||(input.LA(1)>='{' && input.LA(1)<='}') ) { input.consume(); @@ -1083,8 +1108,8 @@ public void mWS() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:715:17: ( (' '|'\t'|'\f')) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:715:17: (' '|'\t'|'\f') + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:740:17: ( (' '|'\t'|'\f')) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:740:17: (' '|'\t'|'\f') { if ( input.LA(1)=='\t'||input.LA(1)=='\f'||input.LA(1)==' ' ) { input.consume(); @@ -1116,10 +1141,10 @@ public void mEOL() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:723:17: ( ( '\r\n' | '\r' | '\n' ) ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:723:17: ( '\r\n' | '\r' | '\n' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:748:17: ( ( '\r\n' | '\r' | '\n' ) ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:748:17: ( '\r\n' | '\r' | '\n' ) { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:723:17: ( '\r\n' | '\r' | '\n' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:748:17: ( '\r\n' | '\r' | '\n' ) int alt1=3; int LA1_0 = input.LA(1); if ( LA1_0=='\r' ) { @@ -1135,13 +1160,13 @@ else if ( LA1_0=='\n' ) { } else { NoViableAltException nvae = - new NoViableAltException("723:17: ( \'\\r\\n\' | \'\\r\' | \'\\n\' )", 1, 0, input); + new NoViableAltException("748:17: ( \'\\r\\n\' | \'\\r\' | \'\\n\' )", 1, 0, input); throw nvae; } switch (alt1) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:723:25: '\r\n' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:748:25: '\r\n' { match("\r\n"); @@ -1149,14 +1174,14 @@ else if ( LA1_0=='\n' ) { } break; case 2 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:724:25: '\r' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:749:25: '\r' { match('\r'); } break; case 3 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:725:25: '\n' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:750:25: '\n' { match('\n'); @@ -1184,10 +1209,10 @@ public void mINT() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:730:17: ( ( '-' )? ( '0' .. '9' )+ ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:730:17: ( '-' )? ( '0' .. '9' )+ + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:755:17: ( ( '-' )? ( '0' .. '9' )+ ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:755:17: ( '-' )? ( '0' .. '9' )+ { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:730:17: ( '-' )? + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:755:17: ( '-' )? int alt2=2; int LA2_0 = input.LA(1); if ( LA2_0=='-' ) { @@ -1198,13 +1223,13 @@ else if ( (LA2_0>='0' && LA2_0<='9') ) { } else { NoViableAltException nvae = - new NoViableAltException("730:17: ( \'-\' )?", 2, 0, input); + new NoViableAltException("755:17: ( \'-\' )?", 2, 0, input); throw nvae; } switch (alt2) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:730:18: '-' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:755:18: '-' { match('-'); @@ -1213,7 +1238,7 @@ else if ( (LA2_0>='0' && LA2_0<='9') ) { } - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:730:23: ( '0' .. '9' )+ + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:755:23: ( '0' .. '9' )+ int cnt3=0; loop3: do { @@ -1226,7 +1251,7 @@ else if ( (LA2_0>='0' && LA2_0<='9') ) { switch (alt3) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:730:24: '0' .. '9' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:755:24: '0' .. '9' { matchRange('0','9'); @@ -1261,10 +1286,10 @@ public void mFLOAT() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:734:17: ( ( '0' .. '9' )+ '.' ( '0' .. '9' )+ ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:734:17: ( '0' .. '9' )+ '.' ( '0' .. '9' )+ + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:759:17: ( ( '0' .. '9' )+ '.' ( '0' .. '9' )+ ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:759:17: ( '0' .. '9' )+ '.' ( '0' .. '9' )+ { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:734:17: ( '0' .. '9' )+ + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:759:17: ( '0' .. '9' )+ int cnt4=0; loop4: do { @@ -1277,7 +1302,7 @@ public void mFLOAT() throws RecognitionException { switch (alt4) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:734:18: '0' .. '9' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:759:18: '0' .. '9' { matchRange('0','9'); @@ -1294,7 +1319,7 @@ public void mFLOAT() throws RecognitionException { } while (true); match('.'); - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:734:33: ( '0' .. '9' )+ + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:759:33: ( '0' .. '9' )+ int cnt5=0; loop5: do { @@ -1307,7 +1332,7 @@ public void mFLOAT() throws RecognitionException { switch (alt5) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:734:34: '0' .. '9' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:759:34: '0' .. '9' { matchRange('0','9'); @@ -1342,11 +1367,11 @@ public void mSTRING() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:738:17: ( '"' ( options {greedy=false; } : . )* '"' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:738:17: '"' ( options {greedy=false; } : . )* '"' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:763:17: ( '"' ( options {greedy=false; } : . )* '"' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:763:17: '"' ( options {greedy=false; } : . )* '"' { match('"'); - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:738:21: ( options {greedy=false; } : . )* + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:763:21: ( options {greedy=false; } : . )* loop6: do { int alt6=2; @@ -1361,7 +1386,7 @@ else if ( (LA6_0>='\u0000' && LA6_0<='!')||(LA6_0>='#' && LA6_0<='\uFFFE') ) { switch (alt6) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:738:48: . + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:763:48: . { matchAny(); @@ -1393,10 +1418,10 @@ public void mBOOL() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:742:17: ( ( 'true' | 'false' ) ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:742:17: ( 'true' | 'false' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:767:17: ( ( 'true' | 'false' ) ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:767:17: ( 'true' | 'false' ) { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:742:17: ( 'true' | 'false' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:767:17: ( 'true' | 'false' ) int alt7=2; int LA7_0 = input.LA(1); if ( LA7_0=='t' ) { @@ -1407,13 +1432,13 @@ else if ( LA7_0=='f' ) { } else { NoViableAltException nvae = - new NoViableAltException("742:17: ( \'true\' | \'false\' )", 7, 0, input); + new NoViableAltException("767:17: ( \'true\' | \'false\' )", 7, 0, input); throw nvae; } switch (alt7) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:742:18: 'true' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:767:18: 'true' { match("true"); @@ -1421,7 +1446,7 @@ else if ( LA7_0=='f' ) { } break; case 2 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:742:25: 'false' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:767:25: 'false' { match("false"); @@ -1450,8 +1475,8 @@ public void mID() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:746:17: ( ('a'..'z'|'A'..'Z'|'_'|'$') ( ('a'..'z'|'A'..'Z'|'_'|'0'..'9'))* ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:746:17: ('a'..'z'|'A'..'Z'|'_'|'$') ( ('a'..'z'|'A'..'Z'|'_'|'0'..'9'))* + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:771:17: ( ('a'..'z'|'A'..'Z'|'_'|'$') ( ('a'..'z'|'A'..'Z'|'_'|'0'..'9'))* ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:771:17: ('a'..'z'|'A'..'Z'|'_'|'$') ( ('a'..'z'|'A'..'Z'|'_'|'0'..'9'))* { if ( input.LA(1)=='$'||(input.LA(1)>='A' && input.LA(1)<='Z')||input.LA(1)=='_'||(input.LA(1)>='a' && input.LA(1)<='z') ) { input.consume(); @@ -1463,7 +1488,7 @@ public void mID() throws RecognitionException { recover(mse); throw mse; } - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:746:44: ( ('a'..'z'|'A'..'Z'|'_'|'0'..'9'))* + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:771:44: ( ('a'..'z'|'A'..'Z'|'_'|'0'..'9'))* loop8: do { int alt8=2; @@ -1475,7 +1500,7 @@ public void mID() throws RecognitionException { switch (alt8) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:746:45: ('a'..'z'|'A'..'Z'|'_'|'0'..'9') + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:771:45: ('a'..'z'|'A'..'Z'|'_'|'0'..'9') { if ( (input.LA(1)>='0' && input.LA(1)<='9')||(input.LA(1)>='A' && input.LA(1)<='Z')||input.LA(1)=='_'||(input.LA(1)>='a' && input.LA(1)<='z') ) { input.consume(); @@ -1515,11 +1540,11 @@ public void mSH_STYLE_SINGLE_LINE_COMMENT() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:751:17: ( '#' ( options {greedy=false; } : . )* EOL ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:751:17: '#' ( options {greedy=false; } : . )* EOL + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:776:17: ( '#' ( options {greedy=false; } : . )* EOL ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:776:17: '#' ( options {greedy=false; } : . )* EOL { match('#'); - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:751:21: ( options {greedy=false; } : . )* + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:776:21: ( options {greedy=false; } : . )* loop9: do { int alt9=2; @@ -1537,7 +1562,7 @@ else if ( (LA9_0>='\u0000' && LA9_0<='\t')||(LA9_0>='\u000B' && LA9_0<='\f')||(L switch (alt9) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:751:48: . + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:776:48: . { matchAny(); @@ -1570,12 +1595,12 @@ public void mC_STYLE_SINGLE_LINE_COMMENT() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:757:17: ( '//' ( options {greedy=false; } : . )* EOL ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:757:17: '//' ( options {greedy=false; } : . )* EOL + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:782:17: ( '//' ( options {greedy=false; } : . )* EOL ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:782:17: '//' ( options {greedy=false; } : . )* EOL { match("//"); - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:757:22: ( options {greedy=false; } : . )* + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:782:22: ( options {greedy=false; } : . )* loop10: do { int alt10=2; @@ -1593,7 +1618,7 @@ else if ( (LA10_0>='\u0000' && LA10_0<='\t')||(LA10_0>='\u000B' && LA10_0<='\f') switch (alt10) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:757:49: . + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:782:49: . { matchAny(); @@ -1626,12 +1651,12 @@ public void mMULTI_LINE_COMMENT() throws RecognitionException { int line = getLine(); int charPosition = getCharPositionInLine(); int channel = Token.DEFAULT_CHANNEL; - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:762:17: ( '/*' ( options {greedy=false; } : . )* '*/' ) - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:762:17: '/*' ( options {greedy=false; } : . )* '*/' + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:787:17: ( '/*' ( options {greedy=false; } : . )* '*/' ) + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:787:17: '/*' ( options {greedy=false; } : . )* '*/' { match("/*"); - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:762:22: ( options {greedy=false; } : . )* + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:787:22: ( options {greedy=false; } : . )* loop11: do { int alt11=2; @@ -1654,7 +1679,7 @@ else if ( (LA11_0>='\u0000' && LA11_0<=')')||(LA11_0>='+' && LA11_0<='\uFFFE') ) switch (alt11) { case 1 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:762:48: . + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:787:48: . { matchAny(); @@ -1680,8 +1705,8 @@ else if ( (LA11_0>='\u0000' && LA11_0<=')')||(LA11_0>='+' && LA11_0<='\uFFFE') ) // $ANTLR end MULTI_LINE_COMMENT public void mTokens() throws RecognitionException { - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:10: ( T15 | T16 | T17 | T18 | T19 | T20 | T21 | T22 | T23 | T24 | T25 | T26 | T27 | T28 | T29 | T30 | T31 | T32 | T33 | T34 | T35 | T36 | T37 | T38 | T39 | T40 | T41 | T42 | T43 | T44 | T45 | T46 | T47 | T48 | T49 | T50 | T51 | T52 | T53 | T54 | T55 | MISC | WS | EOL | INT | FLOAT | STRING | BOOL | ID | SH_STYLE_SINGLE_LINE_COMMENT | C_STYLE_SINGLE_LINE_COMMENT | MULTI_LINE_COMMENT ) - int alt12=52; + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:10: ( T15 | T16 | T17 | T18 | T19 | T20 | T21 | T22 | T23 | T24 | T25 | T26 | T27 | T28 | T29 | T30 | T31 | T32 | T33 | T34 | T35 | T36 | T37 | T38 | T39 | T40 | T41 | T42 | T43 | T44 | T45 | T46 | T47 | T48 | T49 | T50 | T51 | T52 | T53 | T54 | T55 | T56 | MISC | WS | EOL | INT | FLOAT | STRING | BOOL | ID | SH_STYLE_SINGLE_LINE_COMMENT | C_STYLE_SINGLE_LINE_COMMENT | MULTI_LINE_COMMENT ) + int alt12=53; alt12 = dfa12.predict(input); switch (alt12) { case 1 : @@ -1972,77 +1997,84 @@ public void mTokens() throws RecognitionException { } break; case 42 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:174: MISC + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:174: T56 { - mMISC(); + mT56(); } break; case 43 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:179: WS + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:178: MISC { - mWS(); + mMISC(); } break; case 44 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:182: EOL + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:183: WS { - mEOL(); + mWS(); } break; case 45 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:186: INT + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:186: EOL { - mINT(); + mEOL(); } break; case 46 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:190: FLOAT + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:190: INT { - mFLOAT(); + mINT(); } break; case 47 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:196: STRING + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:194: FLOAT { - mSTRING(); + mFLOAT(); } break; case 48 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:203: BOOL + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:200: STRING { - mBOOL(); + mSTRING(); } break; case 49 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:208: ID + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:207: BOOL { - mID(); + mBOOL(); } break; case 50 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:211: SH_STYLE_SINGLE_LINE_COMMENT + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:212: ID { - mSH_STYLE_SINGLE_LINE_COMMENT(); + mID(); } break; case 51 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:240: C_STYLE_SINGLE_LINE_COMMENT + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:215: SH_STYLE_SINGLE_LINE_COMMENT { - mC_STYLE_SINGLE_LINE_COMMENT(); + mSH_STYLE_SINGLE_LINE_COMMENT(); } break; case 52 : - // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:268: MULTI_LINE_COMMENT + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:244: C_STYLE_SINGLE_LINE_COMMENT + { + mC_STYLE_SINGLE_LINE_COMMENT(); + + } + break; + case 53 : + // C:\Projects\jboss-rules\drools-compiler\src\main\resources\org\drools\lang\drl.g:1:272: MULTI_LINE_COMMENT { mMULTI_LINE_COMMENT(); @@ -2059,44 +2091,44 @@ class DFA12 extends DFA { public int predict(IntStream input) throws RecognitionException { return predict(input, s0); } - DFA.State s394 = new DFA.State() {{alt=1;}}; - DFA.State s41 = new DFA.State() {{alt=49;}}; - DFA.State s361 = new DFA.State() { + DFA.State s404 = new DFA.State() {{alt=1;}}; + DFA.State s41 = new DFA.State() {{alt=50;}}; + DFA.State s371 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_361 = input.LA(1); - if ( (LA12_361>='0' && LA12_361<='9')||(LA12_361>='A' && LA12_361<='Z')||LA12_361=='_'||(LA12_361>='a' && LA12_361<='z') ) {return s41;} - return s394; + int LA12_371 = input.LA(1); + if ( (LA12_371>='0' && LA12_371<='9')||(LA12_371>='A' && LA12_371<='Z')||LA12_371=='_'||(LA12_371>='a' && LA12_371<='z') ) {return s41;} + return s404; } }; - DFA.State s321 = new DFA.State() { + DFA.State s331 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_321 = input.LA(1); - if ( LA12_321=='e' ) {return s361;} + int LA12_331 = input.LA(1); + if ( LA12_331=='e' ) {return s371;} return s41; } }; - DFA.State s269 = new DFA.State() { + DFA.State s276 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_269 = input.LA(1); - if ( LA12_269=='g' ) {return s321;} + int LA12_276 = input.LA(1); + if ( LA12_276=='g' ) {return s331;} return s41; } }; - DFA.State s201 = new DFA.State() { + DFA.State s205 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_201 = input.LA(1); - if ( LA12_201=='a' ) {return s269;} + int LA12_205 = input.LA(1); + if ( LA12_205=='a' ) {return s276;} return s41; } }; - DFA.State s128 = new DFA.State() { + DFA.State s129 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_128 = input.LA(1); - if ( LA12_128=='k' ) {return s201;} + int LA12_129 = input.LA(1); + if ( LA12_129=='k' ) {return s205;} return s41; } @@ -2104,7 +2136,7 @@ public DFA.State transition(IntStream input) throws RecognitionException { DFA.State s44 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_44 = input.LA(1); - if ( LA12_44=='c' ) {return s128;} + if ( LA12_44=='c' ) {return s129;} return s41; } @@ -2125,35 +2157,35 @@ public DFA.State transition(IntStream input) throws RecognitionException { } }; - DFA.State s364 = new DFA.State() {{alt=3;}}; - DFA.State s324 = new DFA.State() { + DFA.State s374 = new DFA.State() {{alt=3;}}; + DFA.State s334 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_324 = input.LA(1); - if ( (LA12_324>='0' && LA12_324<='9')||(LA12_324>='A' && LA12_324<='Z')||LA12_324=='_'||(LA12_324>='a' && LA12_324<='z') ) {return s41;} - return s364; + int LA12_334 = input.LA(1); + if ( (LA12_334>='0' && LA12_334<='9')||(LA12_334>='A' && LA12_334<='Z')||LA12_334=='_'||(LA12_334>='a' && LA12_334<='z') ) {return s41;} + return s374; } }; - DFA.State s272 = new DFA.State() { + DFA.State s279 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_272 = input.LA(1); - if ( LA12_272=='t' ) {return s324;} + int LA12_279 = input.LA(1); + if ( LA12_279=='t' ) {return s334;} return s41; } }; - DFA.State s204 = new DFA.State() { + DFA.State s208 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_204 = input.LA(1); - if ( LA12_204=='r' ) {return s272;} + int LA12_208 = input.LA(1); + if ( LA12_208=='r' ) {return s279;} return s41; } }; - DFA.State s131 = new DFA.State() { + DFA.State s132 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_131 = input.LA(1); - if ( LA12_131=='o' ) {return s204;} + int LA12_132 = input.LA(1); + if ( LA12_132=='o' ) {return s208;} return s41; } @@ -2161,7 +2193,7 @@ public DFA.State transition(IntStream input) throws RecognitionException { DFA.State s48 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_48 = input.LA(1); - if ( LA12_48=='p' ) {return s131;} + if ( LA12_48=='p' ) {return s132;} return s41; } @@ -2174,84 +2206,84 @@ public DFA.State transition(IntStream input) throws RecognitionException { } }; - DFA.State s416 = new DFA.State() {{alt=4;}}; - DFA.State s396 = new DFA.State() { + DFA.State s426 = new DFA.State() {{alt=4;}}; + DFA.State s406 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_396 = input.LA(1); - if ( (LA12_396>='0' && LA12_396<='9')||(LA12_396>='A' && LA12_396<='Z')||LA12_396=='_'||(LA12_396>='a' && LA12_396<='z') ) {return s41;} - return s416; + int LA12_406 = input.LA(1); + if ( (LA12_406>='0' && LA12_406<='9')||(LA12_406>='A' && LA12_406<='Z')||LA12_406=='_'||(LA12_406>='a' && LA12_406<='z') ) {return s41;} + return s426; } }; - DFA.State s366 = new DFA.State() { + DFA.State s376 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_366 = input.LA(1); - if ( LA12_366=='r' ) {return s396;} + int LA12_376 = input.LA(1); + if ( LA12_376=='r' ) {return s406;} return s41; } }; - DFA.State s327 = new DFA.State() { + DFA.State s337 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_327 = input.LA(1); - if ( LA12_327=='e' ) {return s366;} + int LA12_337 = input.LA(1); + if ( LA12_337=='e' ) {return s376;} return s41; } }; - DFA.State s275 = new DFA.State() { + DFA.State s282 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_275 = input.LA(1); - if ( LA12_275=='d' ) {return s327;} + int LA12_282 = input.LA(1); + if ( LA12_282=='d' ) {return s337;} return s41; } }; - DFA.State s207 = new DFA.State() { + DFA.State s211 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_207 = input.LA(1); - if ( LA12_207=='n' ) {return s275;} + int LA12_211 = input.LA(1); + if ( LA12_211=='n' ) {return s282;} return s41; } }; - DFA.State s134 = new DFA.State() { + DFA.State s135 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_134 = input.LA(1); - if ( LA12_134=='a' ) {return s207;} + int LA12_135 = input.LA(1); + if ( LA12_135=='a' ) {return s211;} return s41; } }; - DFA.State s369 = new DFA.State() {{alt=37;}}; - DFA.State s330 = new DFA.State() { + DFA.State s379 = new DFA.State() {{alt=38;}}; + DFA.State s340 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_330 = input.LA(1); - if ( (LA12_330>='0' && LA12_330<='9')||(LA12_330>='A' && LA12_330<='Z')||LA12_330=='_'||(LA12_330>='a' && LA12_330<='z') ) {return s41;} - return s369; + int LA12_340 = input.LA(1); + if ( (LA12_340>='0' && LA12_340<='9')||(LA12_340>='A' && LA12_340<='Z')||LA12_340=='_'||(LA12_340>='a' && LA12_340<='z') ) {return s41;} + return s379; } }; - DFA.State s278 = new DFA.State() { + DFA.State s285 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_278 = input.LA(1); - if ( LA12_278=='s' ) {return s330;} + int LA12_285 = input.LA(1); + if ( LA12_285=='s' ) {return s340;} return s41; } }; - DFA.State s210 = new DFA.State() { + DFA.State s214 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_210 = input.LA(1); - if ( LA12_210=='t' ) {return s278;} + int LA12_214 = input.LA(1); + if ( LA12_214=='t' ) {return s285;} return s41; } }; - DFA.State s135 = new DFA.State() { + DFA.State s136 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_135 = input.LA(1); - if ( LA12_135=='s' ) {return s210;} + int LA12_136 = input.LA(1); + if ( LA12_136=='s' ) {return s214;} return s41; } @@ -2260,29 +2292,29 @@ public DFA.State transition(IntStream input) throws RecognitionException { public DFA.State transition(IntStream input) throws RecognitionException { switch ( input.LA(1) ) { case 'p': - return s134; + return s135; case 'i': - return s135; + return s136; default: return s41; } } }; - DFA.State s281 = new DFA.State() {{alt=39;}}; - DFA.State s213 = new DFA.State() { + DFA.State s288 = new DFA.State() {{alt=40;}}; + DFA.State s217 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_213 = input.LA(1); - if ( (LA12_213>='0' && LA12_213<='9')||(LA12_213>='A' && LA12_213<='Z')||LA12_213=='_'||(LA12_213>='a' && LA12_213<='z') ) {return s41;} - return s281; + int LA12_217 = input.LA(1); + if ( (LA12_217>='0' && LA12_217<='9')||(LA12_217>='A' && LA12_217<='Z')||LA12_217=='_'||(LA12_217>='a' && LA12_217<='z') ) {return s41;} + return s288; } }; - DFA.State s138 = new DFA.State() { + DFA.State s139 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_138 = input.LA(1); - if ( LA12_138=='l' ) {return s213;} + int LA12_139 = input.LA(1); + if ( LA12_139=='l' ) {return s217;} return s41; } @@ -2290,24 +2322,24 @@ public DFA.State transition(IntStream input) throws RecognitionException { DFA.State s52 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_52 = input.LA(1); - if ( LA12_52=='a' ) {return s138;} + if ( LA12_52=='a' ) {return s139;} return s41; } }; - DFA.State s216 = new DFA.State() {{alt=13;}}; - DFA.State s141 = new DFA.State() { + DFA.State s220 = new DFA.State() {{alt=13;}}; + DFA.State s142 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_141 = input.LA(1); - if ( (LA12_141>='0' && LA12_141<='9')||(LA12_141>='A' && LA12_141<='Z')||LA12_141=='_'||(LA12_141>='a' && LA12_141<='z') ) {return s41;} - return s216; + int LA12_142 = input.LA(1); + if ( (LA12_142>='0' && LA12_142<='9')||(LA12_142>='A' && LA12_142<='Z')||LA12_142=='_'||(LA12_142>='a' && LA12_142<='z') ) {return s41;} + return s220; } }; DFA.State s53 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_53 = input.LA(1); - if ( LA12_53=='d' ) {return s141;} + if ( LA12_53=='d' ) {return s142;} return s41; } @@ -2329,35 +2361,35 @@ public DFA.State transition(IntStream input) throws RecognitionException { } } }; - DFA.State s371 = new DFA.State() {{alt=5;}}; - DFA.State s333 = new DFA.State() { + DFA.State s381 = new DFA.State() {{alt=5;}}; + DFA.State s343 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_333 = input.LA(1); - if ( (LA12_333>='0' && LA12_333<='9')||(LA12_333>='A' && LA12_333<='Z')||LA12_333=='_'||(LA12_333>='a' && LA12_333<='z') ) {return s41;} - return s371; + int LA12_343 = input.LA(1); + if ( (LA12_343>='0' && LA12_343<='9')||(LA12_343>='A' && LA12_343<='Z')||LA12_343=='_'||(LA12_343>='a' && LA12_343<='z') ) {return s41;} + return s381; } }; - DFA.State s283 = new DFA.State() { + DFA.State s290 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_283 = input.LA(1); - if ( LA12_283=='l' ) {return s333;} + int LA12_290 = input.LA(1); + if ( LA12_290=='l' ) {return s343;} return s41; } }; - DFA.State s218 = new DFA.State() { + DFA.State s222 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_218 = input.LA(1); - if ( LA12_218=='a' ) {return s283;} + int LA12_222 = input.LA(1); + if ( LA12_222=='a' ) {return s290;} return s41; } }; - DFA.State s144 = new DFA.State() { + DFA.State s145 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_144 = input.LA(1); - if ( LA12_144=='b' ) {return s218;} + int LA12_145 = input.LA(1); + if ( LA12_145=='b' ) {return s222;} return s41; } @@ -2365,7 +2397,7 @@ public DFA.State transition(IntStream input) throws RecognitionException { DFA.State s56 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_56 = input.LA(1); - if ( LA12_56=='o' ) {return s144;} + if ( LA12_56=='o' ) {return s145;} return s41; } @@ -2378,84 +2410,84 @@ public DFA.State transition(IntStream input) throws RecognitionException { } }; - DFA.State s418 = new DFA.State() {{alt=6;}}; - DFA.State s399 = new DFA.State() { + DFA.State s308 = new DFA.State() {{alt=49;}}; + DFA.State s293 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_399 = input.LA(1); - if ( (LA12_399>='0' && LA12_399<='9')||(LA12_399>='A' && LA12_399<='Z')||LA12_399=='_'||(LA12_399>='a' && LA12_399<='z') ) {return s41;} - return s418; + int LA12_293 = input.LA(1); + if ( (LA12_293>='0' && LA12_293<='9')||(LA12_293>='A' && LA12_293<='Z')||LA12_293=='_'||(LA12_293>='a' && LA12_293<='z') ) {return s41;} + return s308; } }; - DFA.State s373 = new DFA.State() { + DFA.State s225 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_373 = input.LA(1); - if ( LA12_373=='n' ) {return s399;} + int LA12_225 = input.LA(1); + if ( LA12_225=='e' ) {return s293;} return s41; } }; - DFA.State s336 = new DFA.State() { + DFA.State s148 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_336 = input.LA(1); - if ( LA12_336=='o' ) {return s373;} + int LA12_148 = input.LA(1); + if ( LA12_148=='s' ) {return s225;} return s41; } }; - DFA.State s286 = new DFA.State() { + DFA.State s59 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_286 = input.LA(1); - if ( LA12_286=='i' ) {return s336;} + int LA12_59 = input.LA(1); + if ( LA12_59=='l' ) {return s148;} return s41; } }; - DFA.State s221 = new DFA.State() { + DFA.State s428 = new DFA.State() {{alt=6;}}; + DFA.State s409 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_221 = input.LA(1); - if ( LA12_221=='t' ) {return s286;} - return s41; + int LA12_409 = input.LA(1); + if ( (LA12_409>='0' && LA12_409<='9')||(LA12_409>='A' && LA12_409<='Z')||LA12_409=='_'||(LA12_409>='a' && LA12_409<='z') ) {return s41;} + return s428; } }; - DFA.State s147 = new DFA.State() { + DFA.State s383 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_147 = input.LA(1); - if ( LA12_147=='c' ) {return s221;} + int LA12_383 = input.LA(1); + if ( LA12_383=='n' ) {return s409;} return s41; } }; - DFA.State s59 = new DFA.State() { + DFA.State s348 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_59 = input.LA(1); - if ( LA12_59=='n' ) {return s147;} + int LA12_348 = input.LA(1); + if ( LA12_348=='o' ) {return s383;} return s41; } }; - DFA.State s301 = new DFA.State() {{alt=48;}}; - DFA.State s289 = new DFA.State() { + DFA.State s296 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_289 = input.LA(1); - if ( (LA12_289>='0' && LA12_289<='9')||(LA12_289>='A' && LA12_289<='Z')||LA12_289=='_'||(LA12_289>='a' && LA12_289<='z') ) {return s41;} - return s301; + int LA12_296 = input.LA(1); + if ( LA12_296=='i' ) {return s348;} + return s41; } }; - DFA.State s224 = new DFA.State() { + DFA.State s228 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_224 = input.LA(1); - if ( LA12_224=='e' ) {return s289;} + int LA12_228 = input.LA(1); + if ( LA12_228=='t' ) {return s296;} return s41; } }; - DFA.State s150 = new DFA.State() { + DFA.State s151 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_150 = input.LA(1); - if ( LA12_150=='s' ) {return s224;} + int LA12_151 = input.LA(1); + if ( LA12_151=='c' ) {return s228;} return s41; } @@ -2463,7 +2495,7 @@ public DFA.State transition(IntStream input) throws RecognitionException { DFA.State s60 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_60 = input.LA(1); - if ( LA12_60=='l' ) {return s150;} + if ( LA12_60=='n' ) {return s151;} return s41; } @@ -2471,10 +2503,10 @@ public DFA.State transition(IntStream input) throws RecognitionException { DFA.State s6 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { switch ( input.LA(1) ) { - case 'u': + case 'a': return s59; - case 'a': + case 'u': return s60; default: @@ -2508,27 +2540,27 @@ public DFA.State transition(IntStream input) throws RecognitionException { } }; - DFA.State s341 = new DFA.State() {{alt=12;}}; - DFA.State s292 = new DFA.State() { + DFA.State s351 = new DFA.State() {{alt=12;}}; + DFA.State s299 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_292 = input.LA(1); - if ( (LA12_292>='0' && LA12_292<='9')||(LA12_292>='A' && LA12_292<='Z')||LA12_292=='_'||(LA12_292>='a' && LA12_292<='z') ) {return s41;} - return s341; + int LA12_299 = input.LA(1); + if ( (LA12_299>='0' && LA12_299<='9')||(LA12_299>='A' && LA12_299<='Z')||LA12_299=='_'||(LA12_299>='a' && LA12_299<='z') ) {return s41;} + return s351; } }; - DFA.State s227 = new DFA.State() { + DFA.State s231 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_227 = input.LA(1); - if ( LA12_227=='y' ) {return s292;} + int LA12_231 = input.LA(1); + if ( LA12_231=='y' ) {return s299;} return s41; } }; - DFA.State s153 = new DFA.State() { + DFA.State s154 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_153 = input.LA(1); - if ( LA12_153=='r' ) {return s227;} + int LA12_154 = input.LA(1); + if ( LA12_154=='r' ) {return s231;} return s41; } @@ -2536,7 +2568,7 @@ public DFA.State transition(IntStream input) throws RecognitionException { DFA.State s66 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_66 = input.LA(1); - if ( LA12_66=='e' ) {return s153;} + if ( LA12_66=='e' ) {return s154;} return s41; } @@ -2549,19 +2581,19 @@ public DFA.State transition(IntStream input) throws RecognitionException { } }; - DFA.State s295 = new DFA.State() {{alt=14;}}; - DFA.State s230 = new DFA.State() { + DFA.State s302 = new DFA.State() {{alt=14;}}; + DFA.State s234 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_230 = input.LA(1); - if ( (LA12_230>='0' && LA12_230<='9')||(LA12_230>='A' && LA12_230<='Z')||LA12_230=='_'||(LA12_230>='a' && LA12_230<='z') ) {return s41;} - return s295; + int LA12_234 = input.LA(1); + if ( (LA12_234>='0' && LA12_234<='9')||(LA12_234>='A' && LA12_234<='Z')||LA12_234=='_'||(LA12_234>='a' && LA12_234<='z') ) {return s41;} + return s302; } }; - DFA.State s156 = new DFA.State() { + DFA.State s157 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_156 = input.LA(1); - if ( LA12_156=='e' ) {return s230;} + int LA12_157 = input.LA(1); + if ( LA12_157=='e' ) {return s234;} return s41; } @@ -2569,7 +2601,7 @@ public DFA.State transition(IntStream input) throws RecognitionException { DFA.State s69 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_69 = input.LA(1); - if ( LA12_69=='l' ) {return s156;} + if ( LA12_69=='l' ) {return s157;} return s41; } @@ -2582,19 +2614,19 @@ public DFA.State transition(IntStream input) throws RecognitionException { } }; - DFA.State s297 = new DFA.State() {{alt=15;}}; - DFA.State s233 = new DFA.State() { + DFA.State s304 = new DFA.State() {{alt=15;}}; + DFA.State s237 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_233 = input.LA(1); - if ( (LA12_233>='0' && LA12_233<='9')||(LA12_233>='A' && LA12_233<='Z')||LA12_233=='_'||(LA12_233>='a' && LA12_233<='z') ) {return s41;} - return s297; + int LA12_237 = input.LA(1); + if ( (LA12_237>='0' && LA12_237<='9')||(LA12_237>='A' && LA12_237<='Z')||LA12_237=='_'||(LA12_237>='a' && LA12_237<='z') ) {return s41;} + return s304; } }; - DFA.State s159 = new DFA.State() { + DFA.State s160 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_159 = input.LA(1); - if ( LA12_159=='n' ) {return s233;} + int LA12_160 = input.LA(1); + if ( LA12_160=='n' ) {return s237;} return s41; } @@ -2602,7 +2634,7 @@ public DFA.State transition(IntStream input) throws RecognitionException { DFA.State s72 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_72 = input.LA(1); - if ( LA12_72=='e' ) {return s159;} + if ( LA12_72=='e' ) {return s160;} return s41; } @@ -2616,19 +2648,19 @@ public DFA.State transition(IntStream input) throws RecognitionException { } }; DFA.State s15 = new DFA.State() {{alt=16;}}; - DFA.State s299 = new DFA.State() {{alt=17;}}; - DFA.State s236 = new DFA.State() { + DFA.State s306 = new DFA.State() {{alt=17;}}; + DFA.State s240 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_236 = input.LA(1); - if ( (LA12_236>='0' && LA12_236<='9')||(LA12_236>='A' && LA12_236<='Z')||LA12_236=='_'||(LA12_236>='a' && LA12_236<='z') ) {return s41;} - return s299; + int LA12_240 = input.LA(1); + if ( (LA12_240>='0' && LA12_240<='9')||(LA12_240>='A' && LA12_240<='Z')||LA12_240=='_'||(LA12_240>='a' && LA12_240<='z') ) {return s41;} + return s306; } }; - DFA.State s162 = new DFA.State() { + DFA.State s163 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_162 = input.LA(1); - if ( LA12_162=='n' ) {return s236;} + int LA12_163 = input.LA(1); + if ( LA12_163=='n' ) {return s240;} return s41; } @@ -2636,23 +2668,23 @@ public DFA.State transition(IntStream input) throws RecognitionException { DFA.State s75 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_75 = input.LA(1); - if ( LA12_75=='e' ) {return s162;} + if ( LA12_75=='e' ) {return s163;} return s41; } }; - DFA.State s239 = new DFA.State() { + DFA.State s243 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_239 = input.LA(1); - if ( (LA12_239>='0' && LA12_239<='9')||(LA12_239>='A' && LA12_239<='Z')||LA12_239=='_'||(LA12_239>='a' && LA12_239<='z') ) {return s41;} - return s301; + int LA12_243 = input.LA(1); + if ( (LA12_243>='0' && LA12_243<='9')||(LA12_243>='A' && LA12_243<='Z')||LA12_243=='_'||(LA12_243>='a' && LA12_243<='z') ) {return s41;} + return s308; } }; - DFA.State s165 = new DFA.State() { + DFA.State s166 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_165 = input.LA(1); - if ( LA12_165=='e' ) {return s239;} + int LA12_166 = input.LA(1); + if ( LA12_166=='e' ) {return s243;} return s41; } @@ -2660,7 +2692,7 @@ public DFA.State transition(IntStream input) throws RecognitionException { DFA.State s76 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_76 = input.LA(1); - if ( LA12_76=='u' ) {return s165;} + if ( LA12_76=='u' ) {return s166;} return s41; } @@ -2679,133 +2711,158 @@ public DFA.State transition(IntStream input) throws RecognitionException { } } }; - DFA.State s376 = new DFA.State() {{alt=22;}}; - DFA.State s343 = new DFA.State() { + DFA.State s246 = new DFA.State() {{alt=36;}}; + DFA.State s169 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_343 = input.LA(1); - if ( LA12_343=='-' ) {return s376;} + int LA12_169 = input.LA(1); + if ( (LA12_169>='0' && LA12_169<='9')||(LA12_169>='A' && LA12_169<='Z')||LA12_169=='_'||(LA12_169>='a' && LA12_169<='z') ) {return s41;} + return s246; + + } + }; + DFA.State s79 = new DFA.State() { + public DFA.State transition(IntStream input) throws RecognitionException { + int LA12_79 = input.LA(1); + if ( LA12_79=='d' ) {return s169;} return s41; } }; - DFA.State s303 = new DFA.State() { + DFA.State s310 = new DFA.State() {{alt=21;}}; + DFA.State s248 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_303 = input.LA(1); - if ( LA12_303=='a' ) {return s343;} + int LA12_248 = input.LA(1); + if ( LA12_248=='-' ) {return s310;} return s41; } }; - DFA.State s242 = new DFA.State() { + DFA.State s172 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_242 = input.LA(1); - if ( LA12_242=='d' ) {return s303;} + int LA12_172 = input.LA(1); + if ( LA12_172=='o' ) {return s248;} return s41; } }; - DFA.State s168 = new DFA.State() { + DFA.State s80 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_168 = input.LA(1); - if ( LA12_168=='n' ) {return s242;} + int LA12_80 = input.LA(1); + if ( LA12_80=='t' ) {return s172;} return s41; } }; - DFA.State s79 = new DFA.State() { + DFA.State s386 = new DFA.State() {{alt=23;}}; + DFA.State s353 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_79 = input.LA(1); - if ( LA12_79=='e' ) {return s168;} + int LA12_353 = input.LA(1); + if ( LA12_353=='-' ) {return s386;} return s41; } }; - DFA.State s245 = new DFA.State() {{alt=35;}}; - DFA.State s171 = new DFA.State() { + DFA.State s313 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_171 = input.LA(1); - if ( (LA12_171>='0' && LA12_171<='9')||(LA12_171>='A' && LA12_171<='Z')||LA12_171=='_'||(LA12_171>='a' && LA12_171<='z') ) {return s41;} - return s245; + int LA12_313 = input.LA(1); + if ( LA12_313=='a' ) {return s353;} + return s41; } }; - DFA.State s80 = new DFA.State() { + DFA.State s251 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_80 = input.LA(1); - if ( LA12_80=='d' ) {return s171;} + int LA12_251 = input.LA(1); + if ( LA12_251=='d' ) {return s313;} return s41; } }; - DFA.State s432 = new DFA.State() {{alt=18;}}; - DFA.State s429 = new DFA.State() { + DFA.State s175 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_429 = input.LA(1); - if ( (LA12_429>='0' && LA12_429<='9')||(LA12_429>='A' && LA12_429<='Z')||LA12_429=='_'||(LA12_429>='a' && LA12_429<='z') ) {return s41;} - return s432; + int LA12_175 = input.LA(1); + if ( LA12_175=='n' ) {return s251;} + return s41; } }; - DFA.State s420 = new DFA.State() { + DFA.State s81 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_420 = input.LA(1); - if ( LA12_420=='s' ) {return s429;} + int LA12_81 = input.LA(1); + if ( LA12_81=='e' ) {return s175;} return s41; } }; - DFA.State s402 = new DFA.State() { + DFA.State s442 = new DFA.State() {{alt=18;}}; + DFA.State s439 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_402 = input.LA(1); - if ( LA12_402=='e' ) {return s420;} + int LA12_439 = input.LA(1); + if ( (LA12_439>='0' && LA12_439<='9')||(LA12_439>='A' && LA12_439<='Z')||LA12_439=='_'||(LA12_439>='a' && LA12_439<='z') ) {return s41;} + return s442; + + } + }; + DFA.State s430 = new DFA.State() { + public DFA.State transition(IntStream input) throws RecognitionException { + int LA12_430 = input.LA(1); + if ( LA12_430=='s' ) {return s439;} return s41; } }; - DFA.State s379 = new DFA.State() { + DFA.State s412 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_379 = input.LA(1); - if ( LA12_379=='t' ) {return s402;} + int LA12_412 = input.LA(1); + if ( LA12_412=='e' ) {return s430;} return s41; } }; - DFA.State s346 = new DFA.State() { + DFA.State s389 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_346 = input.LA(1); - if ( LA12_346=='u' ) {return s379;} + int LA12_389 = input.LA(1); + if ( LA12_389=='t' ) {return s412;} return s41; } }; - DFA.State s306 = new DFA.State() { + DFA.State s356 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_306 = input.LA(1); - if ( LA12_306=='b' ) {return s346;} + int LA12_356 = input.LA(1); + if ( LA12_356=='u' ) {return s389;} return s41; } }; - DFA.State s247 = new DFA.State() { + DFA.State s316 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_247 = input.LA(1); - if ( LA12_247=='i' ) {return s306;} + int LA12_316 = input.LA(1); + if ( LA12_316=='b' ) {return s356;} return s41; } }; - DFA.State s174 = new DFA.State() { + DFA.State s254 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_174 = input.LA(1); - if ( LA12_174=='r' ) {return s247;} + int LA12_254 = input.LA(1); + if ( LA12_254=='i' ) {return s316;} return s41; } }; - DFA.State s81 = new DFA.State() { + DFA.State s178 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_81 = input.LA(1); - if ( LA12_81=='t' ) {return s174;} + int LA12_178 = input.LA(1); + if ( LA12_178=='r' ) {return s254;} + return s41; + + } + }; + DFA.State s82 = new DFA.State() { + public DFA.State transition(IntStream input) throws RecognitionException { + int LA12_82 = input.LA(1); + if ( LA12_82=='t' ) {return s178;} return s41; } @@ -2813,73 +2870,76 @@ public DFA.State transition(IntStream input) throws RecognitionException { DFA.State s17 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { switch ( input.LA(1) ) { - case 'g': + case 'n': return s79; - case 'n': + case 'u': return s80; - case 't': + case 'g': return s81; + case 't': + return s82; + default: return s41; } } }; - DFA.State s423 = new DFA.State() {{alt=19;}}; - DFA.State s405 = new DFA.State() { + DFA.State s433 = new DFA.State() {{alt=19;}}; + DFA.State s415 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_405 = input.LA(1); - if ( (LA12_405>='0' && LA12_405<='9')||(LA12_405>='A' && LA12_405<='Z')||LA12_405=='_'||(LA12_405>='a' && LA12_405<='z') ) {return s41;} - return s423; + int LA12_415 = input.LA(1); + if ( (LA12_415>='0' && LA12_415<='9')||(LA12_415>='A' && LA12_415<='Z')||LA12_415=='_'||(LA12_415>='a' && LA12_415<='z') ) {return s41;} + return s433; } }; - DFA.State s382 = new DFA.State() { + DFA.State s392 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_382 = input.LA(1); - if ( LA12_382=='e' ) {return s405;} + int LA12_392 = input.LA(1); + if ( LA12_392=='e' ) {return s415;} return s41; } }; - DFA.State s349 = new DFA.State() { + DFA.State s359 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_349 = input.LA(1); - if ( LA12_349=='c' ) {return s382;} + int LA12_359 = input.LA(1); + if ( LA12_359=='c' ) {return s392;} return s41; } }; - DFA.State s309 = new DFA.State() { + DFA.State s319 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_309 = input.LA(1); - if ( LA12_309=='n' ) {return s349;} + int LA12_319 = input.LA(1); + if ( LA12_319=='n' ) {return s359;} return s41; } }; - DFA.State s250 = new DFA.State() { + DFA.State s257 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_250 = input.LA(1); - if ( LA12_250=='e' ) {return s309;} + int LA12_257 = input.LA(1); + if ( LA12_257=='e' ) {return s319;} return s41; } }; - DFA.State s177 = new DFA.State() { + DFA.State s181 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_177 = input.LA(1); - if ( LA12_177=='i' ) {return s250;} + int LA12_181 = input.LA(1); + if ( LA12_181=='i' ) {return s257;} return s41; } }; - DFA.State s84 = new DFA.State() { + DFA.State s85 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_84 = input.LA(1); - if ( LA12_84=='l' ) {return s177;} + int LA12_85 = input.LA(1); + if ( LA12_85=='l' ) {return s181;} return s41; } @@ -2887,29 +2947,29 @@ public DFA.State transition(IntStream input) throws RecognitionException { DFA.State s18 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_18 = input.LA(1); - if ( LA12_18=='a' ) {return s84;} + if ( LA12_18=='a' ) {return s85;} return s41; } }; - DFA.State s180 = new DFA.State() {{alt=20;}}; - DFA.State s253 = new DFA.State() {{alt=38;}}; - DFA.State s181 = new DFA.State() { + DFA.State s260 = new DFA.State() {{alt=39;}}; + DFA.State s184 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_181 = input.LA(1); - if ( (LA12_181>='0' && LA12_181<='9')||(LA12_181>='A' && LA12_181<='Z')||LA12_181=='_'||(LA12_181>='a' && LA12_181<='z') ) {return s41;} - return s253; + int LA12_184 = input.LA(1); + if ( (LA12_184>='0' && LA12_184<='9')||(LA12_184>='A' && LA12_184<='Z')||LA12_184=='_'||(LA12_184>='a' && LA12_184<='z') ) {return s41;} + return s260; } }; - DFA.State s87 = new DFA.State() { + DFA.State s185 = new DFA.State() {{alt=20;}}; + DFA.State s88 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { switch ( input.LA(1) ) { - case '-': - return s180; - case 't': - return s181; + return s184; + + case '-': + return s185; default: return s41; @@ -2919,24 +2979,24 @@ public DFA.State transition(IntStream input) throws RecognitionException { DFA.State s19 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_19 = input.LA(1); - if ( LA12_19=='o' ) {return s87;} + if ( LA12_19=='o' ) {return s88;} return s41; } }; - DFA.State s255 = new DFA.State() {{alt=21;}}; - DFA.State s184 = new DFA.State() { + DFA.State s262 = new DFA.State() {{alt=22;}}; + DFA.State s188 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_184 = input.LA(1); - if ( LA12_184=='-' ) {return s255;} + int LA12_188 = input.LA(1); + if ( LA12_188=='-' ) {return s262;} return s41; } }; - DFA.State s90 = new DFA.State() { + DFA.State s91 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_90 = input.LA(1); - if ( LA12_90=='r' ) {return s184;} + int LA12_91 = input.LA(1); + if ( LA12_91=='r' ) {return s188;} return s41; } @@ -2944,64 +3004,64 @@ public DFA.State transition(IntStream input) throws RecognitionException { DFA.State s20 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_20 = input.LA(1); - if ( LA12_20=='o' ) {return s90;} + if ( LA12_20=='o' ) {return s91;} return s41; } }; - DFA.State s425 = new DFA.State() {{alt=23;}}; - DFA.State s408 = new DFA.State() { + DFA.State s435 = new DFA.State() {{alt=24;}}; + DFA.State s418 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_408 = input.LA(1); - if ( (LA12_408>='0' && LA12_408<='9')||(LA12_408>='A' && LA12_408<='Z')||LA12_408=='_'||(LA12_408>='a' && LA12_408<='z') ) {return s41;} - return s425; + int LA12_418 = input.LA(1); + if ( (LA12_418>='0' && LA12_418<='9')||(LA12_418>='A' && LA12_418<='Z')||LA12_418=='_'||(LA12_418>='a' && LA12_418<='z') ) {return s41;} + return s435; } }; - DFA.State s385 = new DFA.State() { + DFA.State s395 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_385 = input.LA(1); - if ( LA12_385=='n' ) {return s408;} + int LA12_395 = input.LA(1); + if ( LA12_395=='n' ) {return s418;} return s41; } }; - DFA.State s352 = new DFA.State() { + DFA.State s362 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_352 = input.LA(1); - if ( LA12_352=='o' ) {return s385;} + int LA12_362 = input.LA(1); + if ( LA12_362=='o' ) {return s395;} return s41; } }; - DFA.State s312 = new DFA.State() { + DFA.State s322 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_312 = input.LA(1); - if ( LA12_312=='i' ) {return s352;} + int LA12_322 = input.LA(1); + if ( LA12_322=='i' ) {return s362;} return s41; } }; - DFA.State s258 = new DFA.State() { + DFA.State s265 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_258 = input.LA(1); - if ( LA12_258=='t' ) {return s312;} + int LA12_265 = input.LA(1); + if ( LA12_265=='t' ) {return s322;} return s41; } }; - DFA.State s187 = new DFA.State() { + DFA.State s191 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_187 = input.LA(1); - if ( LA12_187=='a' ) {return s258;} + int LA12_191 = input.LA(1); + if ( LA12_191=='a' ) {return s265;} return s41; } }; - DFA.State s93 = new DFA.State() { + DFA.State s94 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_93 = input.LA(1); - if ( LA12_93=='r' ) {return s187;} + int LA12_94 = input.LA(1); + if ( LA12_94=='r' ) {return s191;} return s41; } @@ -3009,112 +3069,112 @@ public DFA.State transition(IntStream input) throws RecognitionException { DFA.State s21 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_21 = input.LA(1); - if ( LA12_21=='u' ) {return s93;} + if ( LA12_21=='u' ) {return s94;} return s41; } }; - DFA.State s190 = new DFA.State() {{alt=24;}}; - DFA.State s96 = new DFA.State() { + DFA.State s194 = new DFA.State() {{alt=25;}}; + DFA.State s97 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_96 = input.LA(1); - if ( (LA12_96>='0' && LA12_96<='9')||(LA12_96>='A' && LA12_96<='Z')||LA12_96=='_'||(LA12_96>='a' && LA12_96<='z') ) {return s41;} - return s190; + int LA12_97 = input.LA(1); + if ( (LA12_97>='0' && LA12_97<='9')||(LA12_97>='A' && LA12_97<='Z')||LA12_97=='_'||(LA12_97>='a' && LA12_97<='z') ) {return s41;} + return s194; } }; DFA.State s22 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_22 = input.LA(1); - if ( LA12_22=='r' ) {return s96;} + if ( LA12_22=='r' ) {return s97;} return s41; } }; - DFA.State s23 = new DFA.State() {{alt=25;}}; - DFA.State s99 = new DFA.State() {{alt=27;}}; - DFA.State s100 = new DFA.State() {{alt=26;}}; + DFA.State s23 = new DFA.State() {{alt=26;}}; + DFA.State s100 = new DFA.State() {{alt=28;}}; + DFA.State s101 = new DFA.State() {{alt=27;}}; DFA.State s24 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_24 = input.LA(1); - if ( LA12_24=='=' ) {return s99;} - return s100; + if ( LA12_24=='=' ) {return s100;} + return s101; } }; - DFA.State s101 = new DFA.State() {{alt=29;}}; - DFA.State s102 = new DFA.State() {{alt=28;}}; + DFA.State s102 = new DFA.State() {{alt=30;}}; + DFA.State s103 = new DFA.State() {{alt=29;}}; DFA.State s25 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_25 = input.LA(1); - if ( LA12_25=='=' ) {return s101;} - return s102; + if ( LA12_25=='=' ) {return s102;} + return s103; } }; - DFA.State s103 = new DFA.State() {{alt=30;}}; - DFA.State s40 = new DFA.State() {{alt=42;}}; + DFA.State s104 = new DFA.State() {{alt=31;}}; + DFA.State s40 = new DFA.State() {{alt=43;}}; DFA.State s26 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_26 = input.LA(1); - if ( LA12_26=='=' ) {return s103;} + if ( LA12_26=='=' ) {return s104;} return s40; } }; - DFA.State s427 = new DFA.State() {{alt=31;}}; - DFA.State s411 = new DFA.State() { + DFA.State s437 = new DFA.State() {{alt=32;}}; + DFA.State s421 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_411 = input.LA(1); - if ( (LA12_411>='0' && LA12_411<='9')||(LA12_411>='A' && LA12_411<='Z')||LA12_411=='_'||(LA12_411>='a' && LA12_411<='z') ) {return s41;} - return s427; + int LA12_421 = input.LA(1); + if ( (LA12_421>='0' && LA12_421<='9')||(LA12_421>='A' && LA12_421<='Z')||LA12_421=='_'||(LA12_421>='a' && LA12_421<='z') ) {return s41;} + return s437; } }; - DFA.State s388 = new DFA.State() { + DFA.State s398 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_388 = input.LA(1); - if ( LA12_388=='s' ) {return s411;} + int LA12_398 = input.LA(1); + if ( LA12_398=='s' ) {return s421;} return s41; } }; - DFA.State s355 = new DFA.State() { + DFA.State s365 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_355 = input.LA(1); - if ( LA12_355=='n' ) {return s388;} + int LA12_365 = input.LA(1); + if ( LA12_365=='n' ) {return s398;} return s41; } }; - DFA.State s315 = new DFA.State() { + DFA.State s325 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_315 = input.LA(1); - if ( LA12_315=='i' ) {return s355;} + int LA12_325 = input.LA(1); + if ( LA12_325=='i' ) {return s365;} return s41; } }; - DFA.State s261 = new DFA.State() { + DFA.State s268 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_261 = input.LA(1); - if ( LA12_261=='a' ) {return s315;} + int LA12_268 = input.LA(1); + if ( LA12_268=='a' ) {return s325;} return s41; } }; - DFA.State s192 = new DFA.State() { + DFA.State s196 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_192 = input.LA(1); - if ( LA12_192=='t' ) {return s261;} + int LA12_196 = input.LA(1); + if ( LA12_196=='t' ) {return s268;} return s41; } }; - DFA.State s105 = new DFA.State() { + DFA.State s106 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_105 = input.LA(1); - if ( LA12_105=='n' ) {return s192;} + int LA12_106 = input.LA(1); + if ( LA12_106=='n' ) {return s196;} return s41; } @@ -3122,56 +3182,56 @@ public DFA.State transition(IntStream input) throws RecognitionException { DFA.State s27 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_27 = input.LA(1); - if ( LA12_27=='o' ) {return s105;} + if ( LA12_27=='o' ) {return s106;} return s41; } }; - DFA.State s414 = new DFA.State() {{alt=32;}}; - DFA.State s391 = new DFA.State() { + DFA.State s424 = new DFA.State() {{alt=33;}}; + DFA.State s401 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_391 = input.LA(1); - if ( (LA12_391>='0' && LA12_391<='9')||(LA12_391>='A' && LA12_391<='Z')||LA12_391=='_'||(LA12_391>='a' && LA12_391<='z') ) {return s41;} - return s414; + int LA12_401 = input.LA(1); + if ( (LA12_401>='0' && LA12_401<='9')||(LA12_401>='A' && LA12_401<='Z')||LA12_401=='_'||(LA12_401>='a' && LA12_401<='z') ) {return s41;} + return s424; } }; - DFA.State s358 = new DFA.State() { + DFA.State s368 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_358 = input.LA(1); - if ( LA12_358=='s' ) {return s391;} + int LA12_368 = input.LA(1); + if ( LA12_368=='s' ) {return s401;} return s41; } }; - DFA.State s318 = new DFA.State() { + DFA.State s328 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_318 = input.LA(1); - if ( LA12_318=='e' ) {return s358;} + int LA12_328 = input.LA(1); + if ( LA12_328=='e' ) {return s368;} return s41; } }; - DFA.State s264 = new DFA.State() { + DFA.State s271 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_264 = input.LA(1); - if ( LA12_264=='h' ) {return s318;} + int LA12_271 = input.LA(1); + if ( LA12_271=='h' ) {return s328;} return s41; } }; - DFA.State s195 = new DFA.State() { + DFA.State s199 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_195 = input.LA(1); - if ( LA12_195=='c' ) {return s264;} + int LA12_199 = input.LA(1); + if ( LA12_199=='c' ) {return s271;} return s41; } }; - DFA.State s108 = new DFA.State() { + DFA.State s109 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_108 = input.LA(1); - if ( LA12_108=='t' ) {return s195;} + int LA12_109 = input.LA(1); + if ( LA12_109=='t' ) {return s199;} return s41; } @@ -3179,18 +3239,18 @@ public DFA.State transition(IntStream input) throws RecognitionException { DFA.State s28 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_28 = input.LA(1); - if ( LA12_28=='a' ) {return s108;} + if ( LA12_28=='a' ) {return s109;} return s41; } }; - DFA.State s111 = new DFA.State() {{alt=33;}}; - DFA.State s113 = new DFA.State() {{alt=45;}}; + DFA.State s112 = new DFA.State() {{alt=34;}}; + DFA.State s114 = new DFA.State() {{alt=46;}}; DFA.State s29 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { switch ( input.LA(1) ) { case '>': - return s111; + return s112; case '0': case '1': @@ -3202,45 +3262,45 @@ public DFA.State transition(IntStream input) throws RecognitionException { case '7': case '8': case '9': - return s113; + return s114; default: return s40; } } }; - DFA.State s114 = new DFA.State() {{alt=34;}}; + DFA.State s115 = new DFA.State() {{alt=35;}}; DFA.State s30 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_30 = input.LA(1); - if ( LA12_30=='|' ) {return s114;} + if ( LA12_30=='|' ) {return s115;} return s40; } }; - DFA.State s116 = new DFA.State() {{alt=36;}}; + DFA.State s117 = new DFA.State() {{alt=37;}}; DFA.State s31 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_31 = input.LA(1); - if ( LA12_31=='&' ) {return s116;} + if ( LA12_31=='&' ) {return s117;} return s40; } }; - DFA.State s32 = new DFA.State() {{alt=40;}}; - DFA.State s267 = new DFA.State() {{alt=41;}}; - DFA.State s198 = new DFA.State() { + DFA.State s32 = new DFA.State() {{alt=41;}}; + DFA.State s274 = new DFA.State() {{alt=42;}}; + DFA.State s202 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_198 = input.LA(1); - if ( (LA12_198>='0' && LA12_198<='9')||(LA12_198>='A' && LA12_198<='Z')||LA12_198=='_'||(LA12_198>='a' && LA12_198<='z') ) {return s41;} - return s267; + int LA12_202 = input.LA(1); + if ( (LA12_202>='0' && LA12_202<='9')||(LA12_202>='A' && LA12_202<='Z')||LA12_202=='_'||(LA12_202>='a' && LA12_202<='z') ) {return s41;} + return s274; } }; - DFA.State s118 = new DFA.State() { + DFA.State s119 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { - int LA12_118 = input.LA(1); - if ( LA12_118=='e' ) {return s198;} + int LA12_119 = input.LA(1); + if ( LA12_119=='e' ) {return s202;} return s41; } @@ -3248,7 +3308,7 @@ public DFA.State transition(IntStream input) throws RecognitionException { DFA.State s33 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_33 = input.LA(1); - if ( LA12_33=='s' ) {return s118;} + if ( LA12_33=='s' ) {return s119;} return s41; } @@ -3261,14 +3321,14 @@ public DFA.State transition(IntStream input) throws RecognitionException { } }; - DFA.State s35 = new DFA.State() {{alt=43;}}; - DFA.State s36 = new DFA.State() {{alt=44;}}; - DFA.State s123 = new DFA.State() {{alt=46;}}; + DFA.State s35 = new DFA.State() {{alt=44;}}; + DFA.State s36 = new DFA.State() {{alt=45;}}; + DFA.State s124 = new DFA.State() {{alt=47;}}; DFA.State s38 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { switch ( input.LA(1) ) { case '.': - return s123; + return s124; case '0': case '1': @@ -3283,19 +3343,19 @@ public DFA.State transition(IntStream input) throws RecognitionException { return s38; default: - return s113; + return s114; } } }; - DFA.State s39 = new DFA.State() {{alt=47;}}; - DFA.State s42 = new DFA.State() {{alt=50;}}; - DFA.State s126 = new DFA.State() {{alt=51;}}; + DFA.State s39 = new DFA.State() {{alt=48;}}; + DFA.State s42 = new DFA.State() {{alt=51;}}; DFA.State s127 = new DFA.State() {{alt=52;}}; + DFA.State s128 = new DFA.State() {{alt=53;}}; DFA.State s43 = new DFA.State() { public DFA.State transition(IntStream input) throws RecognitionException { int LA12_43 = input.LA(1); - if ( LA12_43=='/' ) {return s126;} - if ( LA12_43=='*' ) {return s127;} + if ( LA12_43=='/' ) {return s127;} + if ( LA12_43=='*' ) {return s128;} NoViableAltException nvae = new NoViableAltException("", 12, 43, input); diff --git a/drools-compiler/src/main/resources/org/drools/lang/drl.g b/drools-compiler/src/main/resources/org/drools/lang/drl.g index 0ef4783694f..0381d34d646 100644 --- a/drools-compiler/src/main/resources/org/drools/lang/drl.g +++ b/drools-compiler/src/main/resources/org/drools/lang/drl.g @@ -272,6 +272,7 @@ rule_attribute returns [AttributeDescr d] | a=agenda_group { d = a; } | a=duration { d = a; } | a=xor_group { d = a; } + | a=auto_focus { d = a; } ; @@ -311,6 +312,30 @@ no_loop returns [AttributeDescr d] ; +auto_focus returns [AttributeDescr d] + @init { + d = null; + } + : + ( + loc='auto-focus' opt_eol ';'? opt_eol + { + d = new AttributeDescr( "auto-focus", "true" ); + d.setLocation( loc.getLine(), loc.getCharPositionInLine() ); + } + ) + | + ( + loc='auto-focus' t=BOOL opt_eol ';'? opt_eol + { + d = new AttributeDescr( "auto-focus", t.getText() ); + d.setLocation( loc.getLine(), loc.getCharPositionInLine() ); + } + + ) + + ; + xor_group returns [AttributeDescr d] @init { d = null; diff --git a/drools-compiler/src/test/java/org/drools/lang/RuleParserTest.java b/drools-compiler/src/test/java/org/drools/lang/RuleParserTest.java index c15dedbf4a1..dee0fbee4d6 100644 --- a/drools-compiler/src/test/java/org/drools/lang/RuleParserTest.java +++ b/drools-compiler/src/test/java/org/drools/lang/RuleParserTest.java @@ -100,6 +100,20 @@ public void testNoLoop() throws Exception { } + + public void testAutofocus() throws Exception { + RuleDescr rule = parseResource( "autofocus.drl" ).rule(); + + assertNotNull( rule ); + + assertEquals( "rule1", rule.getName() ); + AttributeDescr att = (AttributeDescr) rule.getAttributes().get( 0 ); + assertEquals("true", att.getValue()); + assertEquals("auto-focus", att.getName()); + + + } + //TODO: uncomment this when antlr bug resolved public void XXXtestConsequenceWithDeclaration() throws Exception { diff --git a/drools-compiler/src/test/resources/org/drools/lang/autofocus.drl b/drools-compiler/src/test/resources/org/drools/lang/autofocus.drl new file mode 100644 index 00000000000..dd741274674 --- /dev/null +++ b/drools-compiler/src/test/resources/org/drools/lang/autofocus.drl @@ -0,0 +1,8 @@ + +rule rule1 + auto-focus true + when + not Cheese(type == "stilton") + then + funky(); +end \ No newline at end of file
4bfc15f2b9f9dd11baaaa887530b40f8a5fb1c65
hbase
HBASE-9227 RESTServer should handle the loginUser- correctly--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1514440 13f79535-47bb-0310-9956-ffa450edef68-
c
https://github.com/apache/hbase
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java index 231ba2c7e194..f0e5d9c148a2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java @@ -101,16 +101,13 @@ public static void main(String[] args) throws Exception { String principalConfig = conf.get(REST_KERBEROS_PRINCIPAL); Preconditions.checkArgument(principalConfig != null && !principalConfig.isEmpty(), REST_KERBEROS_PRINCIPAL + " should be set if security is enabled"); - String principalName = SecurityUtil.getServerPrincipal(principalConfig, machineName); - UserGroupInformation loginUser = - UserGroupInformation.loginUserFromKeytabAndReturnUGI( - principalName, keytabFilename); + User.login(conf, REST_KEYTAB_FILE, REST_KERBEROS_PRINCIPAL, machineName); + realUser = User.getCurrent().getUGI(); if (conf.get(REST_AUTHENTICATION_TYPE) != null) { containerClass = RESTServletContainer.class; authFilter = new FilterHolder(); authFilter.setClassName(AuthFilter.class.getName()); authFilter.setName("AuthenticationFilter"); - realUser = loginUser; } }
869a002cae63a4e8ab52ec7f2d15d5a2cfbe0c02
drools
[DROOLS-839] fix LogicTransformer with Accumulate--
c
https://github.com/kiegroup/drools
diff --git a/drools-compiler/src/test/java/org/drools/compiler/integrationtests/AccumulateTest.java b/drools-compiler/src/test/java/org/drools/compiler/integrationtests/AccumulateTest.java index a51607b4be5..db57691e305 100644 --- a/drools-compiler/src/test/java/org/drools/compiler/integrationtests/AccumulateTest.java +++ b/drools-compiler/src/test/java/org/drools/compiler/integrationtests/AccumulateTest.java @@ -84,10 +84,10 @@ public class AccumulateTest extends CommonTestMethodBase { - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateModify() throws Exception { // read in the source - KieSession wm = getKieSessionFromResources("test_AccumulateModify.drl"); + KieSession wm = getKieSessionFromResources( "test_AccumulateModify.drl" ); final List<?> results = new ArrayList<Object>(); wm.setGlobal( "results", @@ -113,7 +113,7 @@ public void testAccumulateModify() throws Exception { wm.fireAllRules(); // no fire, as per rule constraints assertEquals( 0, - results.size() ); + results.size() ); // ---------------- 2nd scenario final int index = 1; @@ -124,9 +124,9 @@ public void testAccumulateModify() throws Exception { // 1 fire assertEquals( 1, - results.size() ); + results.size() ); assertEquals( 24, - ((Cheesery) results.get( results.size() - 1 )).getTotalAmount() ); + ( (Cheesery) results.get( results.size() - 1 ) ).getTotalAmount() ); // ---------------- 3rd scenario bob.setLikes( "brie" ); @@ -136,9 +136,9 @@ public void testAccumulateModify() throws Exception { // 2 fires assertEquals( 2, - results.size() ); + results.size() ); assertEquals( 31, - ((Cheesery) results.get( results.size() - 1 )).getTotalAmount() ); + ( (Cheesery) results.get( results.size() - 1 ) ).getTotalAmount() ); // ---------------- 4th scenario wm.delete( cheeseHandles[3] ); @@ -146,15 +146,15 @@ public void testAccumulateModify() throws Exception { // should not have fired as per constraint assertEquals( 2, - results.size() ); + results.size() ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulate() throws Exception { // read in the source - KieSession wm = getKieSessionFromResources("test_Accumulate.drl"); + KieSession wm = getKieSessionFromResources( "test_Accumulate.drl" ); final List<?> results = new ArrayList<Object>(); wm.setGlobal( "results", @@ -173,10 +173,10 @@ public void testAccumulate() throws Exception { 150 ) ); wm.fireAllRules(); - + System.out.println( results ); - - assertEquals( 5, + + assertEquals( 5, results.size() ); assertEquals( 165, results.get( 0 ) ); @@ -186,11 +186,11 @@ public void testAccumulate() throws Exception { assertEquals( 210, results.get( 4 ) ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testMVELAccumulate() throws Exception { // read in the source - KieSession wm = getKieSessionFromResources("test_AccumulateMVEL.drl"); + KieSession wm = getKieSessionFromResources( "test_AccumulateMVEL.drl" ); final List<?> results = new ArrayList<Object>(); wm.setGlobal( "results", results ); @@ -216,10 +216,10 @@ public void testMVELAccumulate() throws Exception { assertEquals( 210, results.get( 4 ) ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateModifyMVEL() throws Exception { // read in the source - KieSession wm = getKieSessionFromResources("test_AccumulateModifyMVEL.drl"); + KieSession wm = getKieSessionFromResources( "test_AccumulateModifyMVEL.drl" ); final List<?> results = new ArrayList<Object>(); wm.setGlobal( "results", @@ -245,7 +245,7 @@ public void testAccumulateModifyMVEL() throws Exception { wm.fireAllRules(); // no fire, as per rule constraints assertEquals( 0, - results.size() ); + results.size() ); // ---------------- 2nd scenario final int index = 1; @@ -256,9 +256,9 @@ public void testAccumulateModifyMVEL() throws Exception { // 1 fire assertEquals( 1, - results.size() ); + results.size() ); assertEquals( 24, - ((Cheesery) results.get( results.size() - 1 )).getTotalAmount() ); + ( (Cheesery) results.get( results.size() - 1 ) ).getTotalAmount() ); // ---------------- 3rd scenario bob.setLikes( "brie" ); @@ -268,24 +268,24 @@ public void testAccumulateModifyMVEL() throws Exception { // 2 fires assertEquals( 2, - results.size() ); + results.size() ); assertEquals( 31, - ((Cheesery) results.get( results.size() - 1 )).getTotalAmount() ); + ( (Cheesery) results.get( results.size() - 1 ) ).getTotalAmount() ); // ---------------- 4th scenario - wm.delete(cheeseHandles[3]); + wm.delete( cheeseHandles[3] ); wm.fireAllRules(); // should not have fired as per constraint assertEquals( 2, - results.size() ); + results.size() ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateReverseModify() throws Exception { // read in the source - KieSession wm = getKieSessionFromResources("test_AccumulateReverseModify.drl"); + KieSession wm = getKieSessionFromResources( "test_AccumulateReverseModify.drl" ); final List<?> results = new ArrayList<Object>(); wm.setGlobal( "results", results ); @@ -310,7 +310,7 @@ public void testAccumulateReverseModify() throws Exception { wm.fireAllRules(); // no fire, as per rule constraints assertEquals( 0, - results.size() ); + results.size() ); // ---------------- 2nd scenario final int index = 1; @@ -321,9 +321,9 @@ public void testAccumulateReverseModify() throws Exception { // 1 fire assertEquals( 1, - results.size() ); + results.size() ); assertEquals( 24, - ((Cheesery) results.get( results.size() - 1 )).getTotalAmount() ); + ( (Cheesery) results.get( results.size() - 1 ) ).getTotalAmount() ); // ---------------- 3rd scenario bob.setLikes( "brie" ); @@ -336,24 +336,24 @@ public void testAccumulateReverseModify() throws Exception { // 2 fires assertEquals( 2, - results.size() ); + results.size() ); assertEquals( 36, - ((Cheesery) results.get( results.size() - 1 )).getTotalAmount() ); + ( (Cheesery) results.get( results.size() - 1 ) ).getTotalAmount() ); // ---------------- 4th scenario - wm.delete(cheeseHandles[3]); + wm.delete( cheeseHandles[3] ); wm.fireAllRules(); // should not have fired as per constraint assertEquals( 2, - results.size() ); + results.size() ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateReverseModify2() throws Exception { // read in the source - KieSession wm = getKieSessionFromResources("test_AccumulateReverseModify2.drl"); + KieSession wm = getKieSessionFromResources( "test_AccumulateReverseModify2.drl" ); final List<?> results = new ArrayList<Object>(); wm.setGlobal( "results", @@ -379,7 +379,7 @@ public void testAccumulateReverseModify2() throws Exception { wm.fireAllRules(); // no fire, as per rule constraints assertEquals( 0, - results.size() ); + results.size() ); // ---------------- 2nd scenario final int index = 1; @@ -390,9 +390,9 @@ public void testAccumulateReverseModify2() throws Exception { // 1 fire assertEquals( 1, - results.size() ); + results.size() ); assertEquals( 24, - ((Number) results.get( results.size() - 1 )).intValue() ); + ( (Number) results.get( results.size() - 1 ) ).intValue() ); // ---------------- 3rd scenario bob.setLikes( "brie" ); @@ -405,24 +405,24 @@ public void testAccumulateReverseModify2() throws Exception { // 2 fires assertEquals( 2, - results.size() ); + results.size() ); assertEquals( 36, - ((Number) results.get( results.size() - 1 )).intValue() ); + ( (Number) results.get( results.size() - 1 ) ).intValue() ); // ---------------- 4th scenario - wm.delete(cheeseHandles[3]); + wm.delete( cheeseHandles[3] ); wm.fireAllRules(); // should not have fired as per constraint assertEquals( 2, - results.size() ); + results.size() ); } - - @Test (timeout = 10000) + + @Test(timeout = 10000) public void testAccumulateReverseModifyInsertLogical2() throws Exception { // read in the source - KieSession wm = getKieSessionFromResources("test_AccumulateReverseModifyInsertLogical2.drl"); + KieSession wm = getKieSessionFromResources( "test_AccumulateReverseModifyInsertLogical2.drl" ); final List<?> results = new ArrayList<Object>(); wm.setGlobal( "results", @@ -453,18 +453,18 @@ public void testAccumulateReverseModifyInsertLogical2() throws Exception { // alice = 31, bob = 17, carol = 0, doug = 17 // !alice = 34, !bob = 31, !carol = 65, !doug = 31 wm.fireAllRules(); - assertEquals( 31, ((Number) results.get( results.size() - 1 )).intValue() ); + assertEquals( 31, ( (Number) results.get( results.size() - 1 ) ).intValue() ); // delete stilton=2 ==> bob = 15, doug = 15, !alice = 30, !carol = 61 - wm.delete(cheeseHandles[1]); + wm.delete( cheeseHandles[1] ); wm.fireAllRules(); - assertEquals( 30, ((Number) results.get( results.size() - 1 )).intValue() ); - } + assertEquals( 30, ( (Number) results.get( results.size() - 1 ) ).intValue() ); + } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateReverseModifyMVEL() throws Exception { // read in the source - KieSession wm = getKieSessionFromResources("test_AccumulateReverseModifyMVEL.drl"); + KieSession wm = getKieSessionFromResources( "test_AccumulateReverseModifyMVEL.drl" ); final List<?> results = new ArrayList<Object>(); wm.setGlobal( "results", @@ -490,7 +490,7 @@ public void testAccumulateReverseModifyMVEL() throws Exception { wm.fireAllRules(); // no fire, as per rule constraints assertEquals( 0, - results.size() ); + results.size() ); // ---------------- 2nd scenario final int index = 1; @@ -501,9 +501,9 @@ public void testAccumulateReverseModifyMVEL() throws Exception { // 1 fire assertEquals( 1, - results.size() ); + results.size() ); assertEquals( 24, - ((Cheesery) results.get( results.size() - 1 )).getTotalAmount() ); + ( (Cheesery) results.get( results.size() - 1 ) ).getTotalAmount() ); // ---------------- 3rd scenario bob.setLikes( "brie" ); @@ -513,24 +513,24 @@ public void testAccumulateReverseModifyMVEL() throws Exception { // 2 fires assertEquals( 2, - results.size() ); + results.size() ); assertEquals( 31, - ((Cheesery) results.get( results.size() - 1 )).getTotalAmount() ); + ( (Cheesery) results.get( results.size() - 1 ) ).getTotalAmount() ); // ---------------- 4th scenario - wm.delete(cheeseHandles[3]); + wm.delete( cheeseHandles[3] ); wm.fireAllRules(); // should not have fired as per constraint assertEquals( 2, - results.size() ); + results.size() ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateReverseModifyMVEL2() throws Exception { // read in the source - KieSession wm = getKieSessionFromResources("test_AccumulateReverseModifyMVEL2.drl"); + KieSession wm = getKieSessionFromResources( "test_AccumulateReverseModifyMVEL2.drl" ); final List<?> results = new ArrayList<Object>(); wm.setGlobal( "results", @@ -556,7 +556,7 @@ public void testAccumulateReverseModifyMVEL2() throws Exception { wm.fireAllRules(); // no fire, as per rule constraints assertEquals( 0, - results.size() ); + results.size() ); // ---------------- 2nd scenario final int index = 1; @@ -569,7 +569,7 @@ public void testAccumulateReverseModifyMVEL2() throws Exception { assertEquals( 1, results.size() ); assertEquals( 24, - ((Number) results.get( results.size() - 1 )).intValue() ); + ( (Number) results.get( results.size() - 1 ) ).intValue() ); // ---------------- 3rd scenario bob.setLikes( "brie" ); @@ -581,22 +581,22 @@ public void testAccumulateReverseModifyMVEL2() throws Exception { assertEquals( 2, results.size() ); assertEquals( 31, - ((Number) results.get( results.size() - 1 )).intValue() ); + ( (Number) results.get( results.size() - 1 ) ).intValue() ); // ---------------- 4th scenario - wm.delete(cheeseHandles[3]); + wm.delete( cheeseHandles[3] ); wm.fireAllRules(); // should not have fired as per constraint assertEquals( 2, - results.size() ); + results.size() ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateWithFromChaining() throws Exception { // read in the source - KieSession wm = getKieSessionFromResources("test_AccumulateWithFromChaining.drl"); + KieSession wm = getKieSessionFromResources( "test_AccumulateWithFromChaining.drl" ); final List<?> results = new ArrayList<Object>(); wm.setGlobal( "results", @@ -629,7 +629,7 @@ public void testAccumulateWithFromChaining() throws Exception { assertEquals( 1, results.size() ); assertEquals( 3, - ((List) results.get( results.size() - 1 )).size() ); + ( (List) results.get( results.size() - 1 ) ).size() ); // ---------------- 2nd scenario final int index = 1; @@ -651,9 +651,9 @@ public void testAccumulateWithFromChaining() throws Exception { // 2 fires assertEquals( 2, - results.size() ); + results.size() ); assertEquals( 3, - ((List) results.get( results.size() - 1 )).size() ); + ( (List) results.get( results.size() - 1 ) ).size() ); // ---------------- 4th scenario cheesery.getCheeses().remove( cheese[3] ); @@ -663,11 +663,11 @@ public void testAccumulateWithFromChaining() throws Exception { // should not have fired as per constraint assertEquals( 2, - results.size() ); + results.size() ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testMVELAccumulate2WM() throws Exception { // read in the source @@ -725,11 +725,11 @@ public void testMVELAccumulate2WM() throws Exception { assertEquals( 210, results2.get( 4 ) ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateInnerClass() throws Exception { // read in the source - KieSession wm = getKieSessionFromResources("test_AccumulateInnerClass.drl"); + KieSession wm = getKieSessionFromResources( "test_AccumulateInnerClass.drl" ); final List<?> results = new ArrayList<Object>(); wm.setGlobal( "results", @@ -743,11 +743,11 @@ public void testAccumulateInnerClass() throws Exception { assertEquals( 15, results.get( 0 ) ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateReturningNull() throws Exception { // read in the source - KieSession wm = getKieSessionFromResources("test_AccumulateReturningNull.drl"); + KieSession wm = getKieSessionFromResources( "test_AccumulateReturningNull.drl" ); final List<?> results = new ArrayList<Object>(); wm.setGlobal( "results", @@ -757,120 +757,120 @@ public void testAccumulateReturningNull() throws Exception { 10 ) ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateSumJava() throws Exception { execTestAccumulateSum( "test_AccumulateSum.drl" ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateSumMVEL() throws Exception { execTestAccumulateSum( "test_AccumulateSumMVEL.drl" ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateMultiPatternWithFunctionJava() throws Exception { execTestAccumulateSum( "test_AccumulateMultiPatternFunctionJava.drl" ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateMultiPatternWithFunctionMVEL() throws Exception { execTestAccumulateSum( "test_AccumulateMultiPatternFunctionMVEL.drl" ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateCountJava() throws Exception { execTestAccumulateCount( "test_AccumulateCount.drl" ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateCountMVEL() throws Exception { execTestAccumulateCount( "test_AccumulateCountMVEL.drl" ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateAverageJava() throws Exception { execTestAccumulateAverage( "test_AccumulateAverage.drl" ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateAverageMVEL() throws Exception { execTestAccumulateAverage( "test_AccumulateAverageMVEL.drl" ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateMinJava() throws Exception { execTestAccumulateMin( "test_AccumulateMin.drl" ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateMinMVEL() throws Exception { execTestAccumulateMin( "test_AccumulateMinMVEL.drl" ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateMaxJava() throws Exception { execTestAccumulateMax( "test_AccumulateMax.drl" ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateMaxMVEL() throws Exception { execTestAccumulateMax( "test_AccumulateMaxMVEL.drl" ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateMultiPatternJava() throws Exception { execTestAccumulateReverseModifyMultiPattern( "test_AccumulateMultiPattern.drl" ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateMultiPatternMVEL() throws Exception { execTestAccumulateReverseModifyMultiPattern( "test_AccumulateMultiPatternMVEL.drl" ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateCollectListJava() throws Exception { execTestAccumulateCollectList( "test_AccumulateCollectList.drl" ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateCollectListMVEL() throws Exception { execTestAccumulateCollectList( "test_AccumulateCollectListMVEL.drl" ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateCollectSetJava() throws Exception { execTestAccumulateCollectSet( "test_AccumulateCollectSet.drl" ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateCollectSetMVEL() throws Exception { execTestAccumulateCollectSet( "test_AccumulateCollectSetMVEL.drl" ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateMultipleFunctionsJava() throws Exception { execTestAccumulateMultipleFunctions( "test_AccumulateMultipleFunctions.drl" ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateMultipleFunctionsMVEL() throws Exception { execTestAccumulateMultipleFunctions( "test_AccumulateMultipleFunctionsMVEL.drl" ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateMultipleFunctionsConstraint() throws Exception { execTestAccumulateMultipleFunctionsConstraint( "test_AccumulateMultipleFunctionsConstraint.drl" ); } - - @Test (timeout = 10000) + + @Test(timeout = 10000) public void testAccumulateWithAndOrCombinations() throws Exception { // JBRULES-3482 // once this compils, update it to actually assert on correct outputs. - + String rule = "package org.drools.compiler.test;\n" + "import org.drools.compiler.Cheese;\n" + "import org.drools.compiler.Person;\n" + - + "rule \"Class cast causer\"\n" + " when\n" + " $person : Person( $likes : likes )\n" + @@ -889,63 +889,63 @@ public void testAccumulateWithAndOrCombinations() throws Exception { wm.insert( new Person( "Bob", "stilton" ) ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateWithSameSubnetwork() throws Exception { String rule = "package org.drools.compiler.test;\n" + - "import org.drools.compiler.Cheese;\n" + - "import org.drools.compiler.Person;\n" + - "global java.util.List list; \n" + - "rule r1 salience 100 \n" + - " when\n" + - " $person : Person( name == 'Alice', $likes : likes )\n" + - " $total : Number() from accumulate( $p : Person(likes != $likes, $l : likes) and $c : Cheese( type == $l ),\n" + - " min($c.getPrice()) )\n" + - " then\n" + - " list.add( 'r1' + ':' + $total);\n" + - "end\n" + - "rule r2 \n" + - " when\n" + - " $person : Person( name == 'Alice', $likes : likes )\n" + - " $total : Number() from accumulate( $p : Person(likes != $likes, $l : likes) and $c : Cheese( type == $l ),\n" + - " max($c.getPrice()) )\n" + - " then\n" + - " list.add( 'r2' + ':' + $total);\n" + - "end\n" + + "import org.drools.compiler.Cheese;\n" + + "import org.drools.compiler.Person;\n" + + "global java.util.List list; \n" + + "rule r1 salience 100 \n" + + " when\n" + + " $person : Person( name == 'Alice', $likes : likes )\n" + + " $total : Number() from accumulate( $p : Person(likes != $likes, $l : likes) and $c : Cheese( type == $l ),\n" + + " min($c.getPrice()) )\n" + + " then\n" + + " list.add( 'r1' + ':' + $total);\n" + + "end\n" + + "rule r2 \n" + + " when\n" + + " $person : Person( name == 'Alice', $likes : likes )\n" + + " $total : Number() from accumulate( $p : Person(likes != $likes, $l : likes) and $c : Cheese( type == $l ),\n" + + " max($c.getPrice()) )\n" + + " then\n" + + " list.add( 'r2' + ':' + $total);\n" + + "end\n" + - ""; + ""; // read in the source KnowledgeBase kbase = loadKnowledgeBaseFromString( rule ); StatefulKnowledgeSession wm = createKnowledgeSession( kbase ); List list = new ArrayList(); - wm.setGlobal("list", list); + wm.setGlobal( "list", list ); // Check the network formation, to ensure the RiaNode is shared. - ObjectTypeNode cheeseOtn = LinkingTest.getObjectTypeNode(kbase, Cheese.class); + ObjectTypeNode cheeseOtn = LinkingTest.getObjectTypeNode( kbase, Cheese.class ); ObjectSink[] oSinks = cheeseOtn.getSinkPropagator().getSinks(); assertEquals( 1, oSinks.length ); - JoinNode cheeseJoin = ( JoinNode ) oSinks[0]; + JoinNode cheeseJoin = (JoinNode) oSinks[0]; LeftTupleSink[] ltSinks = cheeseJoin.getSinkPropagator().getSinks(); assertEquals( 1, ltSinks.length ); - RightInputAdapterNode rian = ( RightInputAdapterNode ) ltSinks[0]; + RightInputAdapterNode rian = (RightInputAdapterNode) ltSinks[0]; assertEquals( 2, rian.getSinkPropagator().size() ); // RiaNode is shared, if this has two outputs - wm.insert(new Cheese("stilton", 10)); + wm.insert( new Cheese( "stilton", 10 ) ); wm.insert( new Person( "Alice", "brie" ) ); wm.insert( new Person( "Bob", "stilton" ) ); wm.fireAllRules(); - assertEquals(2, list.size() ); - assertEquals( "r1:10.0", list.get(0)); - assertEquals( "r2:10.0", list.get(1)); + assertEquals( 2, list.size() ); + assertEquals( "r1:10.0", list.get( 0 ) ); + assertEquals( "r2:10.0", list.get( 1 ) ); } public void execTestAccumulateSum( String fileName ) throws Exception { // read in the source - KieSession session = getKieSessionFromResources(fileName); + KieSession session = getKieSessionFromResources( fileName ); DataSet data = new DataSet(); data.results = new ArrayList<Object>(); @@ -978,12 +978,12 @@ public void execTestAccumulateSum( String fileName ) throws Exception { // ---------------- 1st scenario session.fireAllRules(); assertEquals( 1, - data.results.size() ); + data.results.size() ); assertEquals( 27, - ((Number) data.results.get( data.results.size() - 1 )).intValue() ); + ( (Number) data.results.get( data.results.size() - 1 ) ).intValue() ); - session = SerializationHelper.getSerialisedStatefulKnowledgeSession(session, - true); + session = SerializationHelper.getSerialisedStatefulKnowledgeSession( session, + true ); updateReferences( session, data ); @@ -996,9 +996,9 @@ public void execTestAccumulateSum( String fileName ) throws Exception { assertEquals( 1, count ); assertEquals( 2, - data.results.size() ); + data.results.size() ); assertEquals( 20, - ((Number) data.results.get( data.results.size() - 1 )).intValue() ); + ( (Number) data.results.get( data.results.size() - 1 ) ).intValue() ); // ---------------- 3rd scenario data.bob.setLikes( "brie" ); @@ -1007,9 +1007,9 @@ public void execTestAccumulateSum( String fileName ) throws Exception { session.fireAllRules(); assertEquals( 3, - data.results.size() ); + data.results.size() ); assertEquals( 15, - ((Number) data.results.get( data.results.size() - 1 )).intValue() ); + ( (Number) data.results.get( data.results.size() - 1 ) ).intValue() ); // ---------------- 4th scenario session.delete( data.cheeseHandles[3] ); @@ -1017,14 +1017,14 @@ public void execTestAccumulateSum( String fileName ) throws Exception { // should not have fired as per constraint assertEquals( 3, - data.results.size() ); + data.results.size() ); } private void updateReferences( final KieSession session, final DataSet data ) { - data.results = (List< ? >) session.getGlobal( "results" ); - for ( Iterator< ? > it = session.getObjects().iterator(); it.hasNext(); ) { + data.results = (List<?>) session.getGlobal( "results" ); + for ( Iterator<?> it = session.getObjects().iterator(); it.hasNext(); ) { Object next = it.next(); if ( next instanceof Cheese ) { Cheese c = (Cheese) next; @@ -1040,7 +1040,7 @@ private void updateReferences( final KieSession session, public void execTestAccumulateCount( String fileName ) throws Exception { // read in the source - KieSession wm = getKieSessionFromResources(fileName); + KieSession wm = getKieSessionFromResources( fileName ); final List<?> results = new ArrayList<Object>(); wm.setGlobal( "results", @@ -1066,9 +1066,9 @@ public void execTestAccumulateCount( String fileName ) throws Exception { wm.fireAllRules(); // no fire, as per rule constraints assertEquals( 1, - results.size() ); + results.size() ); assertEquals( 3, - ((Number) results.get( results.size() - 1 )).intValue() ); + ( (Number) results.get( results.size() - 1 ) ).intValue() ); // ---------------- 2nd scenario final int index = 1; @@ -1079,9 +1079,9 @@ public void execTestAccumulateCount( String fileName ) throws Exception { // 1 fire assertEquals( 2, - results.size() ); + results.size() ); assertEquals( 3, - ((Number) results.get( results.size() - 1 )).intValue() ); + ( (Number) results.get( results.size() - 1 ) ).intValue() ); // ---------------- 3rd scenario bob.setLikes( "brie" ); @@ -1091,23 +1091,23 @@ public void execTestAccumulateCount( String fileName ) throws Exception { // 2 fires assertEquals( 3, - results.size() ); + results.size() ); assertEquals( 2, - ((Number) results.get( results.size() - 1 )).intValue() ); + ( (Number) results.get( results.size() - 1 ) ).intValue() ); // ---------------- 4th scenario - wm.delete(cheeseHandles[3]); + wm.delete( cheeseHandles[3] ); wm.fireAllRules(); // should not have fired as per constraint assertEquals( 3, - results.size() ); + results.size() ); } public void execTestAccumulateAverage( String fileName ) throws Exception { // read in the source - KieSession wm = getKieSessionFromResources(fileName); + KieSession wm = getKieSessionFromResources( fileName ); final List<?> results = new ArrayList<Object>(); wm.setGlobal( "results", @@ -1133,7 +1133,7 @@ public void execTestAccumulateAverage( String fileName ) throws Exception { wm.fireAllRules(); // no fire, as per rule constraints assertEquals( 0, - results.size() ); + results.size() ); // ---------------- 2nd scenario final int index = 1; @@ -1144,9 +1144,9 @@ public void execTestAccumulateAverage( String fileName ) throws Exception { // 1 fire assertEquals( 1, - results.size() ); + results.size() ); assertEquals( 10, - ((Number) results.get( results.size() - 1 )).intValue() ); + ( (Number) results.get( results.size() - 1 ) ).intValue() ); // ---------------- 3rd scenario bob.setLikes( "brie" ); @@ -1156,24 +1156,24 @@ public void execTestAccumulateAverage( String fileName ) throws Exception { // 2 fires assertEquals( 2, - results.size() ); + results.size() ); assertEquals( 16, - ((Number) results.get( results.size() - 1 )).intValue() ); + ( (Number) results.get( results.size() - 1 ) ).intValue() ); // ---------------- 4th scenario - wm.delete(cheeseHandles[3]); - wm.delete(cheeseHandles[4]); + wm.delete( cheeseHandles[3] ); + wm.delete( cheeseHandles[4] ); wm.fireAllRules(); // should not have fired as per constraint assertEquals( 2, - results.size() ); + results.size() ); } public void execTestAccumulateMin( String fileName ) throws Exception { // read in the source - KieSession wm = getKieSessionFromResources(fileName); + KieSession wm = getKieSessionFromResources( fileName ); final List<?> results = new ArrayList<Object>(); wm.setGlobal( "results", @@ -1199,7 +1199,7 @@ public void execTestAccumulateMin( String fileName ) throws Exception { wm.fireAllRules(); // no fire, as per rule constraints assertEquals( 0, - results.size() ); + results.size() ); // ---------------- 2nd scenario final int index = 1; @@ -1210,9 +1210,9 @@ public void execTestAccumulateMin( String fileName ) throws Exception { // 1 fire assertEquals( 1, - results.size() ); + results.size() ); assertEquals( 3, - ((Number) results.get( results.size() - 1 )).intValue() ); + ( (Number) results.get( results.size() - 1 ) ).intValue() ); // ---------------- 3rd scenario bob.setLikes( "brie" ); @@ -1222,24 +1222,24 @@ public void execTestAccumulateMin( String fileName ) throws Exception { // 2 fires assertEquals( 2, - results.size() ); + results.size() ); assertEquals( 1, - ((Number) results.get( results.size() - 1 )).intValue() ); + ( (Number) results.get( results.size() - 1 ) ).intValue() ); // ---------------- 4th scenario - wm.delete(cheeseHandles[3]); - wm.delete(cheeseHandles[4]); + wm.delete( cheeseHandles[3] ); + wm.delete( cheeseHandles[4] ); wm.fireAllRules(); // should not have fired as per constraint assertEquals( 2, - results.size() ); + results.size() ); } public void execTestAccumulateMax( String fileName ) throws Exception { // read in the source - KieSession wm = getKieSessionFromResources(fileName); + KieSession wm = getKieSessionFromResources( fileName ); final List<?> results = new ArrayList<Object>(); wm.setGlobal( "results", @@ -1265,7 +1265,7 @@ public void execTestAccumulateMax( String fileName ) throws Exception { wm.fireAllRules(); // no fire, as per rule constraints assertEquals( 0, - results.size() ); + results.size() ); // ---------------- 2nd scenario final int index = 1; @@ -1276,9 +1276,9 @@ public void execTestAccumulateMax( String fileName ) throws Exception { // 1 fire assertEquals( 1, - results.size() ); + results.size() ); assertEquals( 9, - ((Number) results.get( results.size() - 1 )).intValue() ); + ( (Number) results.get( results.size() - 1 ) ).intValue() ); // ---------------- 3rd scenario bob.setLikes( "brie" ); @@ -1288,24 +1288,24 @@ public void execTestAccumulateMax( String fileName ) throws Exception { // 2 fires assertEquals( 2, - results.size() ); + results.size() ); assertEquals( 17, - ((Number) results.get( results.size() - 1 )).intValue() ); + ( (Number) results.get( results.size() - 1 ) ).intValue() ); // ---------------- 4th scenario - wm.delete(cheeseHandles[3]); - wm.delete(cheeseHandles[4]); + wm.delete( cheeseHandles[3] ); + wm.delete( cheeseHandles[4] ); wm.fireAllRules(); // should not have fired as per constraint assertEquals( 2, - results.size() ); + results.size() ); } public void execTestAccumulateCollectList( String fileName ) throws Exception { // read in the source - KieSession wm = getKieSessionFromResources(fileName); + KieSession wm = getKieSessionFromResources( fileName ); final List<?> results = new ArrayList<Object>(); wm.setGlobal( "results", @@ -1326,9 +1326,9 @@ public void execTestAccumulateCollectList( String fileName ) throws Exception { // ---------------- 1st scenario wm.fireAllRules(); assertEquals( 1, - results.size() ); + results.size() ); assertEquals( 6, - ((List) results.get( results.size() - 1 )).size() ); + ( (List) results.get( results.size() - 1 ) ).size() ); // ---------------- 2nd scenario final int index = 1; @@ -1339,24 +1339,24 @@ public void execTestAccumulateCollectList( String fileName ) throws Exception { // fire again assertEquals( 2, - results.size() ); + results.size() ); assertEquals( 6, - ((List) results.get( results.size() - 1 )).size() ); + ( (List) results.get( results.size() - 1 ) ).size() ); // ---------------- 3rd scenario - wm.delete(cheeseHandles[3]); - wm.delete(cheeseHandles[4]); + wm.delete( cheeseHandles[3] ); + wm.delete( cheeseHandles[4] ); wm.fireAllRules(); // should not have fired as per constraint assertEquals( 2, - results.size() ); + results.size() ); } public void execTestAccumulateCollectSet( String fileName ) throws Exception { // read in the source - KieSession wm = getKieSessionFromResources(fileName); + KieSession wm = getKieSessionFromResources( fileName ); final List<?> results = new ArrayList<Object>(); wm.setGlobal( "results", @@ -1377,9 +1377,9 @@ public void execTestAccumulateCollectSet( String fileName ) throws Exception { // ---------------- 1st scenario wm.fireAllRules(); assertEquals( 1, - results.size() ); + results.size() ); assertEquals( 3, - ((Set) results.get( results.size() - 1 )).size() ); + ( (Set) results.get( results.size() - 1 ) ).size() ); // ---------------- 2nd scenario final int index = 1; @@ -1390,32 +1390,32 @@ public void execTestAccumulateCollectSet( String fileName ) throws Exception { // fire again assertEquals( 2, - results.size() ); + results.size() ); assertEquals( 3, - ((Set) results.get( results.size() - 1 )).size() ); + ( (Set) results.get( results.size() - 1 ) ).size() ); // ---------------- 3rd scenario - wm.delete(cheeseHandles[3]); + wm.delete( cheeseHandles[3] ); wm.fireAllRules(); // fire again assertEquals( 3, - results.size() ); + results.size() ); assertEquals( 3, - ((Set) results.get( results.size() - 1 )).size() ); + ( (Set) results.get( results.size() - 1 ) ).size() ); // ---------------- 4rd scenario - wm.delete(cheeseHandles[4]); + wm.delete( cheeseHandles[4] ); wm.fireAllRules(); // should not have fired as per constraint assertEquals( 3, - results.size() ); + results.size() ); } public void execTestAccumulateReverseModifyMultiPattern( String fileName ) throws Exception { // read in the source - KieSession wm = getKieSessionFromResources(fileName); + KieSession wm = getKieSessionFromResources( fileName ); final List<?> results = new ArrayList<Object>(); wm.setGlobal( "results", @@ -1444,7 +1444,7 @@ public void execTestAccumulateReverseModifyMultiPattern( String fileName ) throw wm.fireAllRules(); // no fire, as per rule constraints assertEquals( 0, - results.size() ); + results.size() ); // ---------------- 2nd scenario final int index = 1; @@ -1455,9 +1455,9 @@ public void execTestAccumulateReverseModifyMultiPattern( String fileName ) throw // 1 fire assertEquals( 1, - results.size() ); + results.size() ); assertEquals( 32, - ((Cheesery) results.get( results.size() - 1 )).getTotalAmount() ); + ( (Cheesery) results.get( results.size() - 1 ) ).getTotalAmount() ); // ---------------- 3rd scenario bob.setLikes( "brie" ); @@ -1467,25 +1467,25 @@ public void execTestAccumulateReverseModifyMultiPattern( String fileName ) throw // 2 fires assertEquals( 2, - results.size() ); + results.size() ); assertEquals( 39, - ((Cheesery) results.get( results.size() - 1 )).getTotalAmount() ); + ( (Cheesery) results.get( results.size() - 1 ) ).getTotalAmount() ); // ---------------- 4th scenario - wm.delete(cheeseHandles[3]); + wm.delete( cheeseHandles[3] ); wm.fireAllRules(); // should not have fired as per constraint assertEquals( 2, - results.size() ); + results.size() ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateWithPreviouslyBoundVariables() throws Exception { // read in the source - KieSession wm = getKieSessionFromResources("test_AccumulatePreviousBinds.drl"); + KieSession wm = getKieSessionFromResources( "test_AccumulatePreviousBinds.drl" ); final List<?> results = new ArrayList<Object>(); wm.setGlobal( "results", @@ -1508,10 +1508,10 @@ public void testAccumulateWithPreviouslyBoundVariables() throws Exception { results.get( 0 ) ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateMVELWithModify() throws Exception { // read in the source - KieSession wm = getKieSessionFromResources("test_AccumulateMVELwithModify.drl"); + KieSession wm = getKieSessionFromResources( "test_AccumulateMVELwithModify.drl" ); final List<Number> results = new ArrayList<Number>(); wm.setGlobal( "results", results ); @@ -1545,11 +1545,11 @@ public void testAccumulateMVELWithModify() throws Exception { 0.0 ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateGlobals() throws Exception { // read in the source - KieSession wm = getKieSessionFromResources("test_AccumulateGlobals.drl"); + KieSession wm = getKieSessionFromResources( "test_AccumulateGlobals.drl" ); final List<?> results = new ArrayList<Object>(); wm.setGlobal( "results", @@ -1574,12 +1574,12 @@ public void testAccumulateGlobals() throws Exception { results.get( 0 ) ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateNonExistingFunction() throws Exception { final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); - kbuilder.add( ResourceFactory.newClassPathResource("test_NonExistingAccumulateFunction.drl", - getClass()), + kbuilder.add( ResourceFactory.newClassPathResource( "test_NonExistingAccumulateFunction.drl", + getClass() ), ResourceType.DRL ); // should report a proper error, not raise an exception @@ -1591,7 +1591,7 @@ public void testAccumulateNonExistingFunction() throws Exception { } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateZeroParams() { String rule = "global java.util.List list;\n" + "rule fromIt\n" + @@ -1601,40 +1601,40 @@ public void testAccumulateZeroParams() { " list.add( $c );\n" + "end"; - KnowledgeBase kbase = loadKnowledgeBaseFromString(rule); + KnowledgeBase kbase = loadKnowledgeBaseFromString( rule ); StatefulKnowledgeSession ksession = createKnowledgeSession( kbase ); List list = new ArrayList(); - ksession.setGlobal("list", list); + ksession.setGlobal( "list", list ); - ksession.insert( new Integer(1) ); - ksession.insert(new Integer(2)); - ksession.insert( new Integer(3) ); + ksession.insert( new Integer( 1 ) ); + ksession.insert( new Integer( 2 ) ); + ksession.insert( new Integer( 3 ) ); ksession.fireAllRules(); assertEquals( 1, list.size() ); - assertEquals( 3, list.get(0) ); + assertEquals( 3, list.get( 0 ) ); } public void execTestAccumulateMultipleFunctions( String fileName ) throws Exception { - KieSession ksession = getKieSessionFromResources(fileName); + KieSession ksession = getKieSessionFromResources( fileName ); AgendaEventListener ael = mock( AgendaEventListener.class ); ksession.addEventListener( ael ); final Cheese[] cheese = new Cheese[]{new Cheese( "stilton", - 10 ), - new Cheese( "stilton", - 3 ), - new Cheese( "stilton", - 5 ), - new Cheese( "brie", - 15 ), - new Cheese( "brie", - 17 ), - new Cheese( "provolone", - 8 )}; + 10 ), + new Cheese( "stilton", + 3 ), + new Cheese( "stilton", + 5 ), + new Cheese( "brie", + 15 ), + new Cheese( "brie", + 17 ), + new Cheese( "provolone", + 8 )}; final Person bob = new Person( "Bob", "stilton" ); @@ -1648,14 +1648,14 @@ public void execTestAccumulateMultipleFunctions( String fileName ) throws Except ksession.fireAllRules(); ArgumentCaptor<AfterMatchFiredEvent> cap = ArgumentCaptor.forClass( AfterMatchFiredEvent.class ); - Mockito.verify( ael ).afterMatchFired(cap.capture()); + Mockito.verify( ael ).afterMatchFired( cap.capture() ); Match activation = cap.getValue().getMatch(); - assertThat( ((Number) activation.getDeclarationValue( "$sum" )).intValue(), + assertThat( ( (Number) activation.getDeclarationValue( "$sum" ) ).intValue(), is( 18 ) ); - assertThat( ((Number) activation.getDeclarationValue( "$min" )).intValue(), + assertThat( ( (Number) activation.getDeclarationValue( "$min" ) ).intValue(), is( 3 ) ); - assertThat( ((Number) activation.getDeclarationValue( "$avg" )).intValue(), + assertThat( ( (Number) activation.getDeclarationValue( "$avg" ) ).intValue(), is( 6 ) ); Mockito.reset( ael ); @@ -1666,14 +1666,14 @@ public void execTestAccumulateMultipleFunctions( String fileName ) throws Except cheese[index] ); ksession.fireAllRules(); - Mockito.verify( ael ).afterMatchFired(cap.capture()); + Mockito.verify( ael ).afterMatchFired( cap.capture() ); activation = cap.getValue().getMatch(); - assertThat( ((Number) activation.getDeclarationValue( "$sum" )).intValue(), + assertThat( ( (Number) activation.getDeclarationValue( "$sum" ) ).intValue(), is( 24 ) ); - assertThat( ((Number) activation.getDeclarationValue( "$min" )).intValue(), + assertThat( ( (Number) activation.getDeclarationValue( "$min" ) ).intValue(), is( 5 ) ); - assertThat( ((Number) activation.getDeclarationValue( "$avg" )).intValue(), + assertThat( ( (Number) activation.getDeclarationValue( "$avg" ) ).intValue(), is( 8 ) ); Mockito.reset( ael ); @@ -1683,50 +1683,50 @@ public void execTestAccumulateMultipleFunctions( String fileName ) throws Except bob ); ksession.fireAllRules(); - Mockito.verify( ael ).afterMatchFired(cap.capture()); + Mockito.verify( ael ).afterMatchFired( cap.capture() ); activation = cap.getValue().getMatch(); - assertThat( ((Number) activation.getDeclarationValue( "$sum" )).intValue(), + assertThat( ( (Number) activation.getDeclarationValue( "$sum" ) ).intValue(), is( 32 ) ); - assertThat( ((Number) activation.getDeclarationValue( "$min" )).intValue(), + assertThat( ( (Number) activation.getDeclarationValue( "$min" ) ).intValue(), is( 15 ) ); - assertThat( ((Number) activation.getDeclarationValue( "$avg" )).intValue(), + assertThat( ( (Number) activation.getDeclarationValue( "$avg" ) ).intValue(), is( 16 ) ); Mockito.reset( ael ); // ---------------- 4th scenario - ksession.delete(cheeseHandles[3]); + ksession.delete( cheeseHandles[3] ); ksession.fireAllRules(); - Mockito.verify( ael ).afterMatchFired(cap.capture()); + Mockito.verify( ael ).afterMatchFired( cap.capture() ); activation = cap.getValue().getMatch(); - assertThat( ((Number) activation.getDeclarationValue( "$sum" )).intValue(), + assertThat( ( (Number) activation.getDeclarationValue( "$sum" ) ).intValue(), is( 17 ) ); - assertThat( ((Number) activation.getDeclarationValue( "$min" )).intValue(), + assertThat( ( (Number) activation.getDeclarationValue( "$min" ) ).intValue(), is( 17 ) ); - assertThat( ((Number) activation.getDeclarationValue( "$avg" )).intValue(), + assertThat( ( (Number) activation.getDeclarationValue( "$avg" ) ).intValue(), is( 17 ) ); } public void execTestAccumulateMultipleFunctionsConstraint( String fileName ) throws Exception { - KieSession ksession = getKieSessionFromResources(fileName); + KieSession ksession = getKieSessionFromResources( fileName ); AgendaEventListener ael = mock( AgendaEventListener.class ); ksession.addEventListener( ael ); final Cheese[] cheese = new Cheese[]{new Cheese( "stilton", - 10 ), - new Cheese( "stilton", - 3 ), - new Cheese( "stilton", - 5 ), - new Cheese( "brie", - 3 ), - new Cheese( "brie", - 17 ), - new Cheese( "provolone", - 8 )}; + 10 ), + new Cheese( "stilton", + 3 ), + new Cheese( "stilton", + 5 ), + new Cheese( "brie", + 3 ), + new Cheese( "brie", + 17 ), + new Cheese( "provolone", + 8 )}; final Person bob = new Person( "Bob", "stilton" ); @@ -1740,14 +1740,14 @@ public void execTestAccumulateMultipleFunctionsConstraint( String fileName ) thr ksession.fireAllRules(); ArgumentCaptor<AfterMatchFiredEvent> cap = ArgumentCaptor.forClass( AfterMatchFiredEvent.class ); - Mockito.verify( ael ).afterMatchFired(cap.capture()); + Mockito.verify( ael ).afterMatchFired( cap.capture() ); Match activation = cap.getValue().getMatch(); - assertThat( ((Number) activation.getDeclarationValue( "$sum" )).intValue(), + assertThat( ( (Number) activation.getDeclarationValue( "$sum" ) ).intValue(), is( 18 ) ); - assertThat( ((Number) activation.getDeclarationValue( "$min" )).intValue(), + assertThat( ( (Number) activation.getDeclarationValue( "$min" ) ).intValue(), is( 3 ) ); - assertThat( ((Number) activation.getDeclarationValue( "$avg" )).intValue(), + assertThat( ( (Number) activation.getDeclarationValue( "$avg" ) ).intValue(), is( 6 ) ); Mockito.reset( ael ); @@ -1758,7 +1758,7 @@ public void execTestAccumulateMultipleFunctionsConstraint( String fileName ) thr cheese[index] ); ksession.fireAllRules(); - Mockito.verify( ael, Mockito.never() ).afterMatchFired(Mockito.any(AfterMatchFiredEvent.class)); + Mockito.verify( ael, Mockito.never() ).afterMatchFired( Mockito.any( AfterMatchFiredEvent.class ) ); Mockito.reset( ael ); // ---------------- 3rd scenario @@ -1767,29 +1767,29 @@ public void execTestAccumulateMultipleFunctionsConstraint( String fileName ) thr bob ); ksession.fireAllRules(); - Mockito.verify( ael ).afterMatchFired(cap.capture()); + Mockito.verify( ael ).afterMatchFired( cap.capture() ); activation = cap.getValue().getMatch(); - assertThat( ((Number) activation.getDeclarationValue( "$sum" )).intValue(), + assertThat( ( (Number) activation.getDeclarationValue( "$sum" ) ).intValue(), is( 20 ) ); - assertThat( ((Number) activation.getDeclarationValue( "$min" )).intValue(), + assertThat( ( (Number) activation.getDeclarationValue( "$min" ) ).intValue(), is( 3 ) ); - assertThat( ((Number) activation.getDeclarationValue( "$avg" )).intValue(), + assertThat( ( (Number) activation.getDeclarationValue( "$avg" ) ).intValue(), is( 10 ) ); - + ksession.dispose(); } public static class DataSet { - public Cheese[] cheese; + public Cheese[] cheese; public FactHandle[] cheeseHandles; - public Person bob; - public FactHandle bobHandle; - public List< ? > results; + public Person bob; + public FactHandle bobHandle; + public List<?> results; } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateMinMax() throws Exception { String drl = "package org.drools.compiler.test \n" + "import org.drools.compiler.Cheese \n" + @@ -1802,28 +1802,28 @@ public void testAccumulateMinMax() throws Exception { "end \n"; KnowledgeBase kbase = loadKnowledgeBaseFromString( drl ); - StatefulKnowledgeSession ksession = createKnowledgeSession(kbase); + StatefulKnowledgeSession ksession = createKnowledgeSession( kbase ); final List<Number> results = new ArrayList<Number>(); ksession.setGlobal( "results", results ); final Cheese[] cheese = new Cheese[]{new Cheese( "Emmentaler", - 4 ), - new Cheese( "Appenzeller", - 6 ), - new Cheese( "Greyerzer", - 2 ), - new Cheese( "Raclette", - 3 ), - new Cheese( "Olmützer Quargel", - 15 ), - new Cheese( "Brie", - 17 ), - new Cheese( "Dolcelatte", - 8 )}; - - for (Cheese aCheese : cheese) { - ksession.insert(aCheese); + 4 ), + new Cheese( "Appenzeller", + 6 ), + new Cheese( "Greyerzer", + 2 ), + new Cheese( "Raclette", + 3 ), + new Cheese( "Olmützer Quargel", + 15 ), + new Cheese( "Brie", + 17 ), + new Cheese( "Dolcelatte", + 8 )}; + + for ( Cheese aCheese : cheese ) { + ksession.insert( aCheese ); } // ---------------- 1st scenario @@ -1835,53 +1835,53 @@ public void testAccumulateMinMax() throws Exception { assertEquals( results.get( 1 ).intValue(), 17 ); } - - @Test (timeout = 10000) + + @Test(timeout = 10000) public void testAccumulateCE() throws Exception { String drl = "package org.drools.compiler\n" + - "global java.util.List results\n" + - "rule \"ocount\"\n" + - "when\n" + - " accumulate( Cheese(), $c: count(1) )\n" + - "then\n" + - " results.add( $c + \" facts\" );\n" + - "end\n"; + "global java.util.List results\n" + + "rule \"ocount\"\n" + + "when\n" + + " accumulate( Cheese(), $c: count(1) )\n" + + "then\n" + + " results.add( $c + \" facts\" );\n" + + "end\n"; KnowledgeBase kbase = loadKnowledgeBaseFromString( drl ); - StatefulKnowledgeSession ksession = createKnowledgeSession(kbase); + StatefulKnowledgeSession ksession = createKnowledgeSession( kbase ); final List<String> results = new ArrayList<String>(); ksession.setGlobal( "results", results ); final Cheese[] cheese = new Cheese[]{new Cheese( "Emmentaler", - 4 ), - new Cheese( "Appenzeller", - 6 ), - new Cheese( "Greyerzer", - 2 ), - new Cheese( "Raclette", - 3 ), - new Cheese( "Olmützer Quargel", - 15 ), - new Cheese( "Brie", - 17 ), - new Cheese( "Dolcelatte", - 8 )}; - - for (Cheese aCheese : cheese) { - ksession.insert(aCheese); + 4 ), + new Cheese( "Appenzeller", + 6 ), + new Cheese( "Greyerzer", + 2 ), + new Cheese( "Raclette", + 3 ), + new Cheese( "Olmützer Quargel", + 15 ), + new Cheese( "Brie", + 17 ), + new Cheese( "Dolcelatte", + 8 )}; + + for ( Cheese aCheese : cheese ) { + ksession.insert( aCheese ); } // ---------------- 1st scenario ksession.fireAllRules(); assertEquals( 1, results.size() ); - assertEquals( "7 facts", + assertEquals( "7 facts", results.get( 0 ) ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateAndRetract() { String drl = "package org.drools.compiler;\n" + "\n" + @@ -1920,34 +1920,34 @@ public void testAccumulateAndRetract() { "end" + "\n"; - KieSession ks = getKieSessionFromContentStrings(drl); + KieSession ks = getKieSessionFromContentStrings( drl ); ArrayList resList = new ArrayList(); - ks.setGlobal("list",resList); + ks.setGlobal( "list", resList ); ArrayList<String> list = new ArrayList<String>(); - list.add("x"); - list.add("y"); - list.add("z"); + list.add( "x" ); + list.add( "y" ); + list.add( "z" ); - ks.insert(list); + ks.insert( list ); ks.fireAllRules(); - assertEquals(3L, resList.get(0)); + assertEquals( 3L, resList.get( 0 ) ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateWithNull() { String drl = "rule foo\n" + - "when\n" + - "Object() from accumulate( Object(),\n" + - "init( Object res = null; )\n" + - "action( res = null; )\n" + - "result( res ) )\n" + - "then\n" + - "end"; + "when\n" + + "Object() from accumulate( Object(),\n" + + "init( Object res = null; )\n" + + "action( res = null; )\n" + + "result( res ) )\n" + + "then\n" + + "end"; - KieSession ksession = getKieSessionFromContentStrings(drl); + KieSession ksession = getKieSessionFromContentStrings( drl ); ksession.fireAllRules(); ksession.dispose(); } @@ -1956,15 +1956,15 @@ public static class MyObj { public static class NestedObj { public long value; - public NestedObj(long value) { + public NestedObj( long value ) { this.value = value; } } private final NestedObj nestedObj; - public MyObj(long value) { - nestedObj = new NestedObj(value); + public MyObj( long value ) { + nestedObj = new NestedObj( value ); } public NestedObj getNestedObj() { @@ -1972,39 +1972,39 @@ public NestedObj getNestedObj() { } } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateWithBoundExpression() { String drl = "package org.drools.compiler;\n" + - "import " + AccumulateTest.MyObj.class.getCanonicalName() + ";\n" + - "global java.util.List results\n" + - "rule init\n" + - " when\n" + - " then\n" + - " insert( new MyObj(5) );\n" + - " insert( new MyObj(4) );\n" + - "end\n" + - "rule foo\n" + - " salience -10\n" + - " when\n" + - " $n : Number() from accumulate( MyObj( $val : nestedObj.value ),\n" + - " sum( $val ) )\n" + - " then\n" + - " System.out.println($n);\n" + - " results.add($n);\n" + - "end"; + "import " + AccumulateTest.MyObj.class.getCanonicalName() + ";\n" + + "global java.util.List results\n" + + "rule init\n" + + " when\n" + + " then\n" + + " insert( new MyObj(5) );\n" + + " insert( new MyObj(4) );\n" + + "end\n" + + "rule foo\n" + + " salience -10\n" + + " when\n" + + " $n : Number() from accumulate( MyObj( $val : nestedObj.value ),\n" + + " sum( $val ) )\n" + + " then\n" + + " System.out.println($n);\n" + + " results.add($n);\n" + + "end"; KieBase kbase = loadKnowledgeBaseFromString( drl ); KieSession ksession = kbase.newKieSession(); final List<Number> results = new ArrayList<Number>(); ksession.setGlobal( "results", - results ); + results ); ksession.fireAllRules(); ksession.dispose(); assertEquals( 1, - results.size() ); + results.size() ); assertEquals( 9.0, - results.get( 0 ) ); + results.get( 0 ) ); } @Test(timeout = 5000) @@ -2028,11 +2028,11 @@ public void testInfiniteLoopAddingPkgAfterSession() throws Exception { StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession(); // To reproduce, Need to have 3 object asserted (not less) : - ksession.insert(new Triple("<http://deductions.sf.net/samples/princing.n3p.n3#CN1>", "<http://deductions.sf.net/samples/princing.n3p.n3#number>", "200")); - ksession.insert(new Triple("<http://deductions.sf.net/samples/princing.n3p.n3#CN2>", "<http://deductions.sf.net/samples/princing.n3p.n3#number>", "100")); - ksession.insert(new Triple("<http://deductions.sf.net/samples/princing.n3p.n3#CN3>", "<http://deductions.sf.net/samples/princing.n3p.n3#number>", "100")); + ksession.insert( new Triple( "<http://deductions.sf.net/samples/princing.n3p.n3#CN1>", "<http://deductions.sf.net/samples/princing.n3p.n3#number>", "200" ) ); + ksession.insert( new Triple( "<http://deductions.sf.net/samples/princing.n3p.n3#CN2>", "<http://deductions.sf.net/samples/princing.n3p.n3#number>", "100" ) ); + ksession.insert( new Triple( "<http://deductions.sf.net/samples/princing.n3p.n3#CN3>", "<http://deductions.sf.net/samples/princing.n3p.n3#number>", "100" ) ); - kbase.addKnowledgePackages( loadKnowledgePackagesFromString(rule) ); + kbase.addKnowledgePackages( loadKnowledgePackagesFromString( rule ) ); ksession.fireAllRules(); } @@ -2041,10 +2041,13 @@ public static class Triple { private String predicate; private String object; - /** for javabeans */ - public Triple() {} + /** + * for javabeans + */ + public Triple() { + } - public Triple(String subject, String predicate, String object) { + public Triple( String subject, String predicate, String object ) { this.subject = subject; this.predicate = predicate; this.object = object; @@ -2064,7 +2067,7 @@ public String getObject() { } } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateWithVarsOutOfHashOrder() throws Exception { // JBRULES-3494 String rule = "package com.sample;\n" + @@ -2096,9 +2099,9 @@ public void testAccumulateWithVarsOutOfHashOrder() throws Exception { fail( kbuilder.getErrors().toString() ); } final KnowledgeBase kbase = getKnowledgeBase(); - StatefulKnowledgeSession ksession = createKnowledgeSession(kbase); + StatefulKnowledgeSession ksession = createKnowledgeSession( kbase ); - kbase.addKnowledgePackages(loadKnowledgePackagesFromString(rule) ); + kbase.addKnowledgePackages( loadKnowledgePackagesFromString( rule ) ); ksession.fireAllRules(); QueryResults res = ksession.getQueryResults( "getResults", "1", Variable.v ); @@ -2106,111 +2109,111 @@ public void testAccumulateWithVarsOutOfHashOrder() throws Exception { Object o = res.iterator().next().get( "$holders" ); assertTrue( o instanceof List ); - assertEquals( 1, ((List) o).size() ); + assertEquals( 1, ( (List) o ).size() ); } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateWithWindow() { String str = "global java.util.Map map;\n" + - " \n" + - "declare Double\n" + - "@role(event)\n" + - "end\n" + - " \n" + - "declare window Streem\n" + - " Double() over window:length( 10 )\n" + - "end\n" + - " \n" + - "rule \"See\"\n" + - "when\n" + - " $a : Double() from accumulate (\n" + - " $d: Double()\n" + - " from window Streem,\n" + - " sum( $d )\n" + - " )\n" + - "then\n" + - " System.out.println( \"We have a sum \" + $a );\n" + - "end\n"; + " \n" + + "declare Double\n" + + "@role(event)\n" + + "end\n" + + " \n" + + "declare window Streem\n" + + " Double() over window:length( 10 )\n" + + "end\n" + + " \n" + + "rule \"See\"\n" + + "when\n" + + " $a : Double() from accumulate (\n" + + " $d: Double()\n" + + " from window Streem,\n" + + " sum( $d )\n" + + " )\n" + + "then\n" + + " System.out.println( \"We have a sum \" + $a );\n" + + "end\n"; - KieSession ksession = getKieSessionFromContentStrings(str); + KieSession ksession = getKieSessionFromContentStrings( str ); Map res = new HashMap(); ksession.setGlobal( "map", res ); ksession.fireAllRules(); for ( int j = 0; j < 33; j++ ) { - ksession.insert(1.0 * j); + ksession.insert( 1.0 * j ); ksession.fireAllRules(); } } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateWithEntryPoint() { String str = "global java.util.Map map;\n" + - " \n" + - "declare Double\n" + - "@role(event)\n" + - "end\n" + - " \n" + - "rule \"See\"\n" + - "when\n" + - " $a : Double() from accumulate (\n" + - " $d: Double()\n" + - " from entry-point data,\n" + - " sum( $d )\n" + - " )\n" + - "then\n" + - " System.out.println( \"We have a sum \" + $a );\n" + - "end\n"; + " \n" + + "declare Double\n" + + "@role(event)\n" + + "end\n" + + " \n" + + "rule \"See\"\n" + + "when\n" + + " $a : Double() from accumulate (\n" + + " $d: Double()\n" + + " from entry-point data,\n" + + " sum( $d )\n" + + " )\n" + + "then\n" + + " System.out.println( \"We have a sum \" + $a );\n" + + "end\n"; - KieSession ksession = getKieSessionFromContentStrings(str); + KieSession ksession = getKieSessionFromContentStrings( str ); Map res = new HashMap(); ksession.setGlobal( "map", res ); ksession.fireAllRules(); for ( int j = 0; j < 33; j++ ) { - ksession.getEntryPoint( "data" ).insert(1.0 * j); + ksession.getEntryPoint( "data" ).insert( 1.0 * j ); ksession.fireAllRules(); } } - @Test (timeout = 10000) + @Test(timeout = 10000) public void testAccumulateWithWindowAndEntryPoint() { String str = "global java.util.Map map;\n" + - " \n" + - "declare Double\n" + - "@role(event)\n" + - "end\n" + - " \n" + - "declare window Streem\n" + - " Double() over window:length( 10 ) from entry-point data\n" + - "end\n" + - " \n" + - "rule \"See\"\n" + - "when\n" + - " $a : Double() from accumulate (\n" + - " $d: Double()\n" + - " from window Streem,\n" + - " sum( $d )\n" + - " )\n" + - "then\n" + - " System.out.println( \"We have a sum \" + $a );\n" + - "end\n"; + " \n" + + "declare Double\n" + + "@role(event)\n" + + "end\n" + + " \n" + + "declare window Streem\n" + + " Double() over window:length( 10 ) from entry-point data\n" + + "end\n" + + " \n" + + "rule \"See\"\n" + + "when\n" + + " $a : Double() from accumulate (\n" + + " $d: Double()\n" + + " from window Streem,\n" + + " sum( $d )\n" + + " )\n" + + "then\n" + + " System.out.println( \"We have a sum \" + $a );\n" + + "end\n"; - KieSession ksession = getKieSessionFromContentStrings(str); + KieSession ksession = getKieSessionFromContentStrings( str ); Map res = new HashMap(); ksession.setGlobal( "map", res ); ksession.fireAllRules(); for ( int j = 0; j < 33; j++ ) { - ksession.getEntryPoint( "data" ).insert(1.0 * j); + ksession.getEntryPoint( "data" ).insert( 1.0 * j ); ksession.fireAllRules(); } } - @Test (timeout = 10000) + @Test(timeout = 10000) public void test2AccumulatesWithOr() throws Exception { // JBRULES-3538 String str = @@ -2247,49 +2250,49 @@ public void test2AccumulatesWithOr() throws Exception { " map.put('count', ((Integer)map.get('count')) + 1 );\n " + "end\n"; - KieSession ksession = getKieSessionFromContentStrings(str); + KieSession ksession = getKieSessionFromContentStrings( str ); List list = new ArrayList(); Map map = new HashMap(); - ksession.setGlobal( "map", map); + ksession.setGlobal( "map", map ); map.put( "Jos Jr Jr", new HashMap() ); map.put( "Jos", new HashMap() ); - map.put( "count",0 ); + map.put( "count", 0 ); - MyPerson josJr = new MyPerson("Jos Jr Jr", 20, - Arrays.asList(new MyPerson("John Jr 1st", 10, - Arrays.asList(new MyPerson("John Jr Jrx", 4, Collections.<MyPerson>emptyList()))), - new MyPerson("John Jr 2nd", 8, Collections.<MyPerson>emptyList()))); + MyPerson josJr = new MyPerson( "Jos Jr Jr", 20, + Arrays.asList( new MyPerson( "John Jr 1st", 10, + Arrays.asList( new MyPerson( "John Jr Jrx", 4, Collections.<MyPerson>emptyList() ) ) ), + new MyPerson( "John Jr 2nd", 8, Collections.<MyPerson>emptyList() ) ) ); - MyPerson jos = new MyPerson("Jos", 30, - Arrays.asList(new MyPerson("Jeff Jr 1st", 10, Collections.<MyPerson>emptyList()), - new MyPerson("Jeff Jr 2nd", 8, Collections.<MyPerson>emptyList())) ); + MyPerson jos = new MyPerson( "Jos", 30, + Arrays.asList( new MyPerson( "Jeff Jr 1st", 10, Collections.<MyPerson>emptyList() ), + new MyPerson( "Jeff Jr 2nd", 8, Collections.<MyPerson>emptyList() ) ) ); - ksession.execute(new InsertElementsCommand(Arrays.asList(new Object[]{ josJr, jos }))); + ksession.execute( new InsertElementsCommand( Arrays.asList( new Object[]{josJr, jos} ) ) ); ksession.fireAllRules(); System.out.println( map ); - assertEquals( 2, map.get("count") ); - Map pMap = (Map) map.get("Jos Jr Jr"); - assertEquals( 50.0, pMap.get("total") ); - List kids = ( List ) pMap.get("k"); + assertEquals( 2, map.get( "count" ) ); + Map pMap = (Map) map.get( "Jos Jr Jr" ); + assertEquals( 50.0, pMap.get( "total" ) ); + List kids = (List) pMap.get( "k" ); assertEquals( 1, kids.size() ); - assertEquals( "John Jr Jrx", ((MyPerson)kids.get(0)).getName() ); - assertEquals( josJr, pMap.get("p") ); - assertEquals( josJr, pMap.get("r") ); + assertEquals( "John Jr Jrx", ( (MyPerson) kids.get( 0 ) ).getName() ); + assertEquals( josJr, pMap.get( "p" ) ); + assertEquals( josJr, pMap.get( "r" ) ); - pMap = (Map) map.get("Jos"); - assertEquals( 50.0, pMap.get("total") ); - kids = ( List ) pMap.get("k"); + pMap = (Map) map.get( "Jos" ); + assertEquals( 50.0, pMap.get( "total" ) ); + kids = (List) pMap.get( "k" ); assertEquals( 1, kids.size() ); - assertEquals( "John Jr Jrx", ((MyPerson)kids.get(0)).getName() ); - assertEquals( josJr, pMap.get("p") ); - assertEquals( jos, pMap.get("r") ); + assertEquals( "John Jr Jrx", ( (MyPerson) kids.get( 0 ) ).getName() ); + assertEquals( josJr, pMap.get( "p" ) ); + assertEquals( jos, pMap.get( "r" ) ); } public static class MyPerson { - public MyPerson(String name, Integer age, Collection<MyPerson> kids) { + public MyPerson( String name, Integer age, Collection<MyPerson> kids ) { this.name = name; this.age = age; this.kids = kids; @@ -2305,7 +2308,7 @@ public String getName() { return name; } - public void setName(String name) { + public void setName( String name ) { this.name = name; } @@ -2313,7 +2316,7 @@ public Integer getAge() { return age; } - public void setAge(Integer age) { + public void setAge( Integer age ) { this.age = age; } @@ -2321,7 +2324,7 @@ public Collection<MyPerson> getKids() { return kids; } - public void setKids(Collection<MyPerson> kids) { + public void setKids( Collection<MyPerson> kids ) { this.kids = kids; } } @@ -2329,7 +2332,7 @@ public void setKids(Collection<MyPerson> kids) { public static class Course { private int minWorkingDaySize; - public Course(int minWorkingDaySize) { + public Course( int minWorkingDaySize ) { this.minWorkingDaySize = minWorkingDaySize; } @@ -2337,7 +2340,7 @@ public int getMinWorkingDaySize() { return minWorkingDaySize; } - public void setMinWorkingDaySize(int minWorkingDaySize) { + public void setMinWorkingDaySize( int minWorkingDaySize ) { this.minWorkingDaySize = minWorkingDaySize; } } @@ -2346,7 +2349,7 @@ public static class Lecture { private Course course; private int day; - public Lecture(Course course, int day) { + public Lecture( Course course, int day ) { this.course = course; this.day = day; } @@ -2355,7 +2358,7 @@ public Course getCourse() { return course; } - public void setCourse(Course course) { + public void setCourse( Course course ) { this.course = course; } @@ -2363,7 +2366,7 @@ public int getDay() { return day; } - public void setDay(int day) { + public void setDay( int day ) { this.day = day; } } @@ -2389,31 +2392,31 @@ public void testAccumulateWithExists() { " list.add( $dayCount );\n" + "end\n"; - KieSession ksession = getKieSessionFromContentStrings(str); + KieSession ksession = getKieSessionFromContentStrings( str ); List list = new ArrayList(); - ksession.setGlobal("list", list); + ksession.setGlobal( "list", list ); Integer day1 = 1; Integer day2 = 2; Integer day3 = 3; - Course c = new Course(2); + Course c = new Course( 2 ); - Lecture l1 = new Lecture(c, day1); - Lecture l2 = new Lecture(c, day2); + Lecture l1 = new Lecture( c, day1 ); + Lecture l2 = new Lecture( c, day2 ); - ksession.insert(day1); - ksession.insert(day2); - ksession.insert(day3); - ksession.insert(c); - ksession.insert(l1); - ksession.insert(l2); + ksession.insert( day1 ); + ksession.insert( day2 ); + ksession.insert( day3 ); + ksession.insert( c ); + ksession.insert( l1 ); + ksession.insert( l2 ); - assertEquals(1, ksession.fireAllRules()); + assertEquals( 1, ksession.fireAllRules() ); assertEquals( 2, list.size() ); - assertEquals( c, list.get(0)); - assertEquals( 2l, list.get(1)); + assertEquals( c, list.get( 0 ) ); + assertEquals( 2l, list.get( 1 ) ); } @Test @@ -2433,24 +2436,24 @@ public void testAccumulatesExpireVsCancel() throws Exception { " $d : FactTest() over window:time(1m), $tot : count($d); $tot > 0 )\n" + "then\n" + " System.out.println( $tot ); \n" + - " list.add( $tot.intValue() ); \n "+ + " list.add( $tot.intValue() ); \n " + "end\n" + "\n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); - kbuilder.add(ResourceFactory.newByteArrayResource( drl.getBytes() ), ResourceType.DRL); + kbuilder.add( ResourceFactory.newByteArrayResource( drl.getBytes() ), ResourceType.DRL ); assertFalse( kbuilder.hasErrors() ); KieBaseConfiguration kbConf = KnowledgeBaseFactory.newKnowledgeBaseConfiguration(); - kbConf.setOption(EventProcessingOption.STREAM); + kbConf.setOption( EventProcessingOption.STREAM ); - KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(kbConf); - kbase.addKnowledgePackages(kbuilder.getKnowledgePackages()); + KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase( kbConf ); + kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() ); KieSessionConfiguration ksConf = KnowledgeBaseFactory.newKnowledgeSessionConfiguration(); - ksConf.setOption( ClockTypeOption.get("pseudo") ); - StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession(ksConf,null); + ksConf.setOption( ClockTypeOption.get( "pseudo" ) ); + StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession( ksConf, null ); ArrayList list = new ArrayList(); ksession.setGlobal( "list", list ); @@ -2504,15 +2507,15 @@ public void testManySlidingWindows() throws Exception { KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); - kbuilder.add(ResourceFactory.newByteArrayResource( drl.getBytes() ), ResourceType.DRL); + kbuilder.add( ResourceFactory.newByteArrayResource( drl.getBytes() ), ResourceType.DRL ); System.out.println( kbuilder.getErrors() ); assertFalse( kbuilder.hasErrors() ); KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); - kbase.addKnowledgePackages(kbuilder.getKnowledgePackages()); + kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() ); StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession(); - List list = new ArrayList( ); + List list = new ArrayList(); ksession.setGlobal( "list", list ); ksession.insert( new Integer( 20 ) ); @@ -2540,96 +2543,104 @@ public void testManySlidingWindows() throws Exception { assertEquals( list, Arrays.asList( 2, 5 ) ); } - + @Test public void testImportAccumulateFunction() throws Exception { String drl = "package org.foo.bar\n" - + "import accumulate "+TestFunction.class.getCanonicalName()+" f\n" - + "rule X when\n" - + " accumulate( $s : String(),\n" - + " $v : f( $s ) )\n" - + "then\n" - + "end\n"; - ReleaseId releaseId = new ReleaseIdImpl("foo", "bar", "1.0"); + + "import accumulate " + TestFunction.class.getCanonicalName() + " f\n" + + "rule X when\n" + + " accumulate( $s : String(),\n" + + " $v : f( $s ) )\n" + + "then\n" + + "end\n"; + ReleaseId releaseId = new ReleaseIdImpl( "foo", "bar", "1.0" ); KieServices ks = KieServices.Factory.get(); createAndDeployJar( ks, releaseId, drl ); - + KieContainer kc = ks.newKieContainer( releaseId ); KieSession ksession = kc.newKieSession(); - - AgendaEventListener ael = mock(AgendaEventListener.class); - ksession.addEventListener(ael); - - ksession.insert("x"); + + AgendaEventListener ael = mock( AgendaEventListener.class ); + ksession.addEventListener( ael ); + + ksession.insert( "x" ); ksession.fireAllRules(); - - ArgumentCaptor<AfterMatchFiredEvent> ac = ArgumentCaptor.forClass(AfterMatchFiredEvent.class); - verify( ael ).afterMatchFired(ac.capture()); - - assertThat( (Integer) ac.getValue().getMatch().getDeclarationValue("$v"), is(Integer.valueOf(1)) ); + + ArgumentCaptor<AfterMatchFiredEvent> ac = ArgumentCaptor.forClass( AfterMatchFiredEvent.class ); + verify( ael ).afterMatchFired( ac.capture() ); + + assertThat( (Integer) ac.getValue().getMatch().getDeclarationValue( "$v" ), is( Integer.valueOf( 1 ) ) ); } @Test public void testImportAccumulateFunctionWithDeclaration() throws Exception { // DROOLS-750 String drl = "package org.foo.bar\n" - + "import accumulate "+TestFunction.class.getCanonicalName()+" f;\n" - + "import "+Person.class.getCanonicalName()+";\n" - + "declare Person \n" - + " @propertyReactive\n" - + "end\n" - + "rule X when\n" - + " accumulate( $s : String(),\n" - + " $v : f( $s ) )\n" - + "then\n" - + "end\n"; - ReleaseId releaseId = new ReleaseIdImpl("foo", "bar", "1.0"); + + "import accumulate " + TestFunction.class.getCanonicalName() + " f;\n" + + "import " + Person.class.getCanonicalName() + ";\n" + + "declare Person \n" + + " @propertyReactive\n" + + "end\n" + + "rule X when\n" + + " accumulate( $s : String(),\n" + + " $v : f( $s ) )\n" + + "then\n" + + "end\n"; + ReleaseId releaseId = new ReleaseIdImpl( "foo", "bar", "1.0" ); KieServices ks = KieServices.Factory.get(); createAndDeployJar( ks, releaseId, drl ); KieContainer kc = ks.newKieContainer( releaseId ); KieSession ksession = kc.newKieSession(); - AgendaEventListener ael = mock(AgendaEventListener.class); - ksession.addEventListener(ael); + AgendaEventListener ael = mock( AgendaEventListener.class ); + ksession.addEventListener( ael ); - ksession.insert("x"); + ksession.insert( "x" ); ksession.fireAllRules(); - ArgumentCaptor<AfterMatchFiredEvent> ac = ArgumentCaptor.forClass(AfterMatchFiredEvent.class); - verify( ael ).afterMatchFired(ac.capture()); + ArgumentCaptor<AfterMatchFiredEvent> ac = ArgumentCaptor.forClass( AfterMatchFiredEvent.class ); + verify( ael ).afterMatchFired( ac.capture() ); - assertThat( (Integer) ac.getValue().getMatch().getDeclarationValue("$v"), is(Integer.valueOf(1)) ); + assertThat( (Integer) ac.getValue().getMatch().getDeclarationValue( "$v" ), is( Integer.valueOf( 1 ) ) ); } public static class TestFunction implements AccumulateFunction { @Override - public void writeExternal(ObjectOutput out) throws IOException { + public void writeExternal( ObjectOutput out ) throws IOException { } + @Override - public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { + public void readExternal( ObjectInput in ) throws IOException, ClassNotFoundException { } + @Override public Serializable createContext() { return null; } + @Override - public void init(Serializable context) throws Exception { + public void init( Serializable context ) throws Exception { } + @Override - public void accumulate(Serializable context, Object value) { + public void accumulate( Serializable context, Object value ) { } + @Override - public void reverse(Serializable context, Object value) throws Exception { + public void reverse( Serializable context, Object value ) throws Exception { } + @Override - public Object getResult(Serializable context) throws Exception { - return Integer.valueOf(1); + public Object getResult( Serializable context ) throws Exception { + return Integer.valueOf( 1 ); } + @Override public boolean supportsReverse() { return true; } + @Override public Class<?> getResultType() { return Number.class; @@ -2662,15 +2673,15 @@ public void testAccumulateWithSharedNode() throws Exception { KieSession ksession = helper.build().newKieSession(); List<String> list = new java.util.ArrayList(); - ksession.insert(list); + ksession.insert( list ); - ksession.insert(42.0); - ksession.insert(9000); - ksession.insert("a"); - ksession.insert("b"); + ksession.insert( 42.0 ); + ksession.insert( 9000 ); + ksession.insert( "a" ); + ksession.insert( "b" ); ksession.fireAllRules(); - assertEquals(1, list.size()); + assertEquals( 1, list.size() ); } @Test @@ -2689,17 +2700,17 @@ public void testEmptyAccumulateInSubnetwork() { "end"; KieHelper helper = new KieHelper(); - helper.addContent(drl, ResourceType.DRL); + helper.addContent( drl, ResourceType.DRL ); KieSession ksession = helper.build().newKieSession(); List<Long> list = new ArrayList<Long>(); - ksession.setGlobal("list", list); + ksession.setGlobal( "list", list ); - ksession.insert(1); + ksession.insert( 1 ); ksession.fireAllRules(); - assertEquals(1, list.size()); - assertEquals(0, (long)list.get(0)); + assertEquals( 1, list.size() ); + assertEquals( 0, (long) list.get( 0 ) ); } @Test @@ -2719,18 +2730,18 @@ public void testEmptyAccumulateInSubnetworkFollwedByPattern() { "end"; KieHelper helper = new KieHelper(); - helper.addContent(drl, ResourceType.DRL); + helper.addContent( drl, ResourceType.DRL ); KieSession ksession = helper.build().newKieSession(); List<Long> list = new ArrayList<Long>(); - ksession.setGlobal("list", list); + ksession.setGlobal( "list", list ); - ksession.insert(1); - ksession.insert(1L); + ksession.insert( 1 ); + ksession.insert( 1L ); ksession.fireAllRules(); - assertEquals(1, list.size()); - assertEquals(0, (long)list.get(0)); + assertEquals( 1, list.size() ); + assertEquals( 0, (long) list.get( 0 ) ); } @Test @@ -2754,7 +2765,7 @@ public void testAccumulateWithoutSeparator() throws Exception { KieServices ks = KieServices.Factory.get(); KieFileSystem kfs = ks.newKieFileSystem().write( "src/main/resources/r1.drl", str ); Results results = ks.newKieBuilder( kfs ).buildAll().getResults(); - assertFalse(results.getMessages().isEmpty()); + assertFalse( results.getMessages().isEmpty() ); } @Test @@ -2772,7 +2783,7 @@ public void testFromAccumulateWithoutSeparator() throws Exception { KieServices ks = KieServices.Factory.get(); KieFileSystem kfs = ks.newKieFileSystem().write( "src/main/resources/r1.drl", str ); Results results = ks.newKieBuilder( kfs ).buildAll().getResults(); - assertFalse(results.getMessages().isEmpty()); + assertFalse( results.getMessages().isEmpty() ); } @Test @@ -2835,9 +2846,9 @@ public void testAccFunctionOpaqueJoins() throws Exception { KieHelper helper = new KieHelper(); KieSession ks = helper.addContent( str, ResourceType.DRL ).build().newKieSession(); - List list = new ArrayList( ); + List list = new ArrayList(); ks.setGlobal( "list", list ); - List list2 = new ArrayList( ); + List list2 = new ArrayList(); ks.setGlobal( "list2", list2 ); // init data @@ -2858,9 +2869,11 @@ public void testAccFunctionOpaqueJoins() throws Exception { public static class ExpectedMessage { String type; - public ExpectedMessage(String type) { + + public ExpectedMessage( String type ) { this.type = type; } + public String getType() { return type; } @@ -2871,19 +2884,24 @@ public static class ExpectedMessageToRegister { String type; boolean registered = false; List<ExpectedMessage> msgs = new ArrayList<ExpectedMessage>(); - public ExpectedMessageToRegister(String type) { + + public ExpectedMessageToRegister( String type ) { this.type = type; } + public String getType() { return type; } + public List<ExpectedMessage> getExpectedMessages() { return msgs; } + public boolean isRegistered() { return registered; } - public void setRegistered(boolean registered) { + + public void setRegistered( boolean registered ) { this.registered = registered; } } @@ -2913,16 +2931,16 @@ public void testReaccumulateForLeftTuple() { + " java.lang.System.out.println( $l.size() );" + " end\n"; - KieSession ksession = new KieHelper().addContent(drl1, ResourceType.DRL) + KieSession ksession = new KieHelper().addContent( drl1, ResourceType.DRL ) .build() .newKieSession(); - ExpectedMessage psExpMsg1 = new ExpectedMessage("Index"); + ExpectedMessage psExpMsg1 = new ExpectedMessage( "Index" ); - ExpectedMessageToRegister etr1 = new ExpectedMessageToRegister("Index"); - etr1.msgs.add(psExpMsg1); + ExpectedMessageToRegister etr1 = new ExpectedMessageToRegister( "Index" ); + etr1.msgs.add( psExpMsg1 ); - ksession.insert(etr1); + ksession.insert( etr1 ); ksession.fireAllRules(); } @@ -2941,41 +2959,113 @@ public void testNoLoopAccumulate() { " modify($a) { set($val.intValue()) }\n" + "end"; - KieSession ksession = new KieHelper().addContent(drl1, ResourceType.DRL) + KieSession ksession = new KieHelper().addContent( drl1, ResourceType.DRL ) .build() .newKieSession(); - AtomicInteger counter = new AtomicInteger(0); - ksession.insert(counter); + AtomicInteger counter = new AtomicInteger( 0 ); + ksession.insert( counter ); - ksession.insert("1"); + ksession.insert( "1" ); ksession.fireAllRules(); - assertEquals(1, counter.get()); + assertEquals( 1, counter.get() ); - ksession.insert("2"); + ksession.insert( "2" ); ksession.fireAllRules(); - assertEquals(2, counter.get()); + assertEquals( 2, counter.get() ); } - private KieSession createKieSession(KieBase kbase) { + private KieSession createKieSession( KieBase kbase ) { return kbase.newKieSession(); } - private KieSession getKieSessionFromResources(String... classPathResources){ - KieBase kbase = loadKnowledgeBase(null, null, classPathResources); + private KieSession getKieSessionFromResources( String... classPathResources ) { + KieBase kbase = loadKnowledgeBase( null, null, classPathResources ); return kbase.newKieSession(); } - private KieBase loadKieBaseFromString(String... drlContentStrings) { - return loadKnowledgeBaseFromString(null, null, phreak, - drlContentStrings); + private KieBase loadKieBaseFromString( String... drlContentStrings ) { + return loadKnowledgeBaseFromString( null, null, phreak, + drlContentStrings ); } - private KieSession getKieSessionFromContentStrings(String... drlContentStrings) { - KieBase kbase = loadKnowledgeBaseFromString(null, null, phreak, - drlContentStrings); + private KieSession getKieSessionFromContentStrings( String... drlContentStrings ) { + KieBase kbase = loadKnowledgeBaseFromString( null, null, phreak, + drlContentStrings ); return kbase.newKieSession(); } + + @Test + public void testAccumulateWithOr() { + // DROOLS-839 + String drl1 = + "import " + Converter.class.getCanonicalName() + ";\n" + + "global java.util.List list;\n" + + "rule R when\n" + + " (or\n" + + " Integer (this == 1)\n" + + " Integer (this == 2)\n" + + " )\n" + + "String( $length : length )\n" + + "accumulate ( $c : Converter(), $result : sum( $c.convert($length) ) )\n" + + "then\n" + + " list.add($result);\n" + + "end"; + + KieSession ksession = new KieHelper().addContent( drl1, ResourceType.DRL ) + .build() + .newKieSession(); + + List<Double> list = new ArrayList<Double>(); + ksession.setGlobal( "list", list ); + + ksession.insert( 1 ); + ksession.insert( "hello" ); + ksession.insert( new Converter() ); + ksession.fireAllRules(); + + assertEquals( 1, list.size() ); + assertEquals( "hello".length(), (double)list.get(0), 0.01 ); + } + + @Test + public void testMvelAccumulateWithOr() { + // DROOLS-839 + String drl1 = + "import " + Converter.class.getCanonicalName() + ";\n" + + "global java.util.List list;\n" + + "rule R dialect \"mvel\" when\n" + + " (or\n" + + " Integer (this == 1)\n" + + " Integer (this == 2)\n" + + " )\n" + + "String( $length : length )\n" + + "accumulate ( $c : Converter(), $result : sum( $c.convert($length) ) )\n" + + "then\n" + + " list.add($result);\n" + + "end"; + + KieSession ksession = new KieHelper().addContent( drl1, ResourceType.DRL ) + .build() + .newKieSession(); + + List<Double> list = new ArrayList<Double>(); + ksession.setGlobal( "list", list ); + + ksession.insert( 1 ); + ksession.insert( "hello" ); + ksession.insert( new Converter() ); + ksession.fireAllRules(); + + assertEquals( 1, list.size() ); + assertEquals( "hello".length(), (double)list.get(0), 0.01 ); + } + + public static class Converter { + public static int convert(int i) { + return i; + } + } } diff --git a/drools-core/src/main/java/org/drools/core/base/accumulators/MVELAccumulatorFunctionExecutor.java b/drools-core/src/main/java/org/drools/core/base/accumulators/MVELAccumulatorFunctionExecutor.java index 2814dfb994c..9ab3a89c275 100644 --- a/drools-core/src/main/java/org/drools/core/base/accumulators/MVELAccumulatorFunctionExecutor.java +++ b/drools-core/src/main/java/org/drools/core/base/accumulators/MVELAccumulatorFunctionExecutor.java @@ -124,8 +124,7 @@ public void accumulate(Object workingMemoryContext, handle.getObject(), factory ); if ( this.function.supportsReverse() ) { - ((MVELAccumulatorFunctionContext) context).reverseSupport.put( Integer.valueOf( handle.getId() ), - value ); + ((MVELAccumulatorFunctionContext) context).reverseSupport.put( handle.getId(), value ); } this.function.accumulate( ((MVELAccumulatorFunctionContext) context).context, value ); @@ -138,7 +137,7 @@ public void reverse(Object workingMemoryContext, Declaration[] declarations, Declaration[] innerDeclarations, WorkingMemory workingMemory) throws Exception { - final Object value = ((MVELAccumulatorFunctionContext) context).reverseSupport.remove( Integer.valueOf( handle.getId() ) ); + final Object value = ((MVELAccumulatorFunctionContext) context).reverseSupport.remove( handle.getId() ); this.function.reverse( ((MVELAccumulatorFunctionContext) context).context, value ); } @@ -167,6 +166,10 @@ public Declaration[] getRequiredDeclarations() { return unit.getPreviousDeclarations(); } + public void replaceDeclaration( Declaration declaration, Declaration resolved ) { + unit.replaceDeclaration( declaration, resolved ); + } + private static class MVELAccumulatorFunctionContext implements Externalizable { diff --git a/drools-core/src/main/java/org/drools/core/base/mvel/MVELCompilationUnit.java b/drools-core/src/main/java/org/drools/core/base/mvel/MVELCompilationUnit.java index 0c1f70f4601..aecb9203c9c 100644 --- a/drools-core/src/main/java/org/drools/core/base/mvel/MVELCompilationUnit.java +++ b/drools-core/src/main/java/org/drools/core/base/mvel/MVELCompilationUnit.java @@ -118,8 +118,6 @@ public class MVELCompilationUnit char.class ); } - public static final Object COMPILER_LOCK = new Object(); - public MVELCompilationUnit() { } @@ -240,10 +238,7 @@ public Serializable getCompiledExpression(MVELDialectRuntimeData runtimeData, Ob String[] varNames = parserContext.getIndexedVarNames(); - ExecutableStatement stmt = (ExecutableStatement) compile( expression, - runtimeData.getPackageClassLoader(), - parserContext, - languageLevel ); + ExecutableStatement stmt = (ExecutableStatement) compile( expression, parserContext ); Set<String> localNames = parserContext.getVariables().keySet(); @@ -415,10 +410,8 @@ public static InternalFactHandle getFactHandle( Declaration declaration, return handles.length > declaration.getPattern().getOffset() ? handles[declaration.getPattern().getOffset()] : null; } - public static Serializable compile( final String text, - final ClassLoader classLoader, - final ParserContext parserContext, - final int languageLevel ) { + private static Serializable compile( final String text, + final ParserContext parserContext ) { MVEL.COMPILER_OPT_ALLOW_NAKED_METH_CALL = true; MVEL.COMPILER_OPT_ALLOW_OVERRIDE_ALL_PROPHANDLING = true; MVEL.COMPILER_OPT_ALLOW_RESOLVE_INNERCLASSES_WITH_DOTNOTATION = true; @@ -541,10 +534,6 @@ public static Map getInterceptors() { return primitivesMap; } - public static Object getCompilerLock() { - return COMPILER_LOCK; - } - public static class DroolsVarFactory implements VariableResolverFactory { private KnowledgeHelper knowledgeHelper; @@ -668,15 +657,6 @@ protected VariableResolver addResolver( String name, // return vr; } - private VariableResolver getResolver( String name ) { -// for ( int i = 0; i < indexedVariableNames.length; i++ ) { -// if ( indexedVariableNames[i].equals( name ) ) { -// return indexedVariableResolvers[i]; -// } -// } - return null; - } - public boolean isTarget( String name ) { // for ( String indexedVariableName : indexedVariableNames ) { // if ( indexedVariableName.equals( name ) ) { diff --git a/drools-core/src/main/java/org/drools/core/rule/Accumulate.java b/drools-core/src/main/java/org/drools/core/rule/Accumulate.java index 812e818db23..4f1cfce08ca 100755 --- a/drools-core/src/main/java/org/drools/core/rule/Accumulate.java +++ b/drools-core/src/main/java/org/drools/core/rule/Accumulate.java @@ -109,7 +109,6 @@ public abstract Object getResult(final Object workingMemoryContext, /** * Returns true if this accumulate supports reverse - * @return */ public abstract boolean supportsReverse(); @@ -138,7 +137,7 @@ public Map<String, Declaration> getOuterDeclarations() { * @inheritDoc */ public Declaration resolveDeclaration(final String identifier) { - return (Declaration) this.source.getInnerDeclarations().get( identifier ); + return this.source.getInnerDeclarations().get( identifier ); } public abstract Object createWorkingMemoryContext(); @@ -160,7 +159,11 @@ public void replaceDeclaration(Declaration declaration, this.requiredDeclarations[i] = resolved; } } + replaceAccumulatorDeclaration(declaration, resolved); } + + protected abstract void replaceAccumulatorDeclaration(Declaration declaration, + Declaration resolved); public void resetInnerDeclarationCache() { this.innerDeclarationCache = null; @@ -175,6 +178,10 @@ protected Declaration[] getInnerDeclarationCache() { return this.innerDeclarationCache; } + public Declaration[] getRequiredDeclarations() { + return requiredDeclarations; + } + public boolean hasRequiredDeclarations() { return requiredDeclarations != null && requiredDeclarations.length > 0; } diff --git a/drools-core/src/main/java/org/drools/core/rule/LogicTransformer.java b/drools-core/src/main/java/org/drools/core/rule/LogicTransformer.java index e1715654041..91b7332fe48 100644 --- a/drools-core/src/main/java/org/drools/core/rule/LogicTransformer.java +++ b/drools-core/src/main/java/org/drools/core/rule/LogicTransformer.java @@ -26,7 +26,6 @@ import java.util.ArrayList; import java.util.HashMap; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; @@ -40,7 +39,7 @@ * delegated to the Builder. */ public class LogicTransformer { - private final Map orTransformations = new HashMap(); + private final Map<GroupElement.Type, Transformation> orTransformations = new HashMap<GroupElement.Type, Transformation>(); private static LogicTransformer INSTANCE = new LogicTransformer(); @@ -93,10 +92,10 @@ public GroupElement[] transform( final GroupElement cloned, Map<String, Class<?> ands = new GroupElement[]{wrapper}; } - for ( int i = 0; i < ands.length; i++ ) { + for ( GroupElement and : ands ) { // fix the cloned declarations - this.fixClonedDeclarations( ands[i], globals ); - ands[i].setRoot( true ); + this.fixClonedDeclarations( and, globals ); + and.setRoot( true ); } return hasNamedConsequenceAndIsStream ? processNamedConsequences(ands) : ands; @@ -129,9 +128,8 @@ private GroupElement[] processNamedConsequences(GroupElement[] ands) { protected GroupElement[] splitOr( final GroupElement cloned ) { GroupElement[] ands = new GroupElement[cloned.getChildren().size()]; int i = 0; - for ( final Iterator it = cloned.getChildren().iterator(); it.hasNext(); ) { - final RuleConditionElement branch = (RuleConditionElement) it.next(); - if ( (branch instanceof GroupElement) && (((GroupElement) branch).isAnd()) ) { + for ( final RuleConditionElement branch : cloned.getChildren() ) { + if ( ( branch instanceof GroupElement ) && ( ( (GroupElement) branch ).isAnd() ) ) { ands[i++] = (GroupElement) branch; } else { ands[i] = GroupElementFactory.newAndInstance(); @@ -146,11 +144,9 @@ protected GroupElement[] splitOr( final GroupElement cloned ) { * During the logic transformation, we eventually clone CEs, * specially patterns and corresponding declarations. So now * we need to fix any references to cloned declarations. - * @param and - * @param globals */ protected void fixClonedDeclarations( GroupElement and, Map<String, Class<?>> globals ) { - Stack contextStack = new Stack(); + Stack<RuleConditionElement> contextStack = new Stack<RuleConditionElement>(); DeclarationScopeResolver resolver = new DeclarationScopeResolver( globals, contextStack ); @@ -163,85 +159,53 @@ protected void fixClonedDeclarations( GroupElement and, Map<String, Class<?>> gl /** * recurse through the rule condition elements updating the declaration objecs - * @param resolver - * @param contextStack - * @param element */ private void processElement(final DeclarationScopeResolver resolver, - final Stack contextStack, + final Stack<RuleConditionElement> contextStack, final RuleConditionElement element) { if ( element instanceof Pattern ) { Pattern pattern = (Pattern) element; - for ( Iterator it = pattern.getNestedElements().iterator(); it.hasNext(); ) { + for ( RuleConditionElement ruleConditionElement : pattern.getNestedElements() ) { processElement( resolver, contextStack, - (RuleConditionElement) it.next() ); + ruleConditionElement ); } - for (Constraint next : pattern.getConstraints()) { - if (next instanceof Declaration) { + for (Constraint constraint : pattern.getConstraints()) { + if (constraint instanceof Declaration) { continue; } - Constraint constraint = (Constraint) next; - Declaration[] decl = constraint.getRequiredDeclarations(); - for (int i = 0; i < decl.length; i++) { - Declaration resolved = resolver.getDeclaration(null, - decl[i].getIdentifier()); - - if (constraint instanceof MvelConstraint && ((MvelConstraint) constraint).isUnification()) { - if (ClassObjectType.DroolsQuery_ObjectType.isAssignableFrom(resolved.getPattern().getObjectType())) { - Declaration redeclaredDeclr = new Declaration(resolved.getIdentifier(), ((MvelConstraint) constraint).getFieldExtractor(), pattern, false); - pattern.addDeclaration(redeclaredDeclr); - } else if ( resolved.getPattern() != pattern ) { - ((MvelConstraint) constraint).unsetUnification(); - } - } - - if (resolved != null && resolved != decl[i] && resolved.getPattern() != pattern) { - constraint.replaceDeclaration(decl[i], - resolved); - } else if (resolved == null) { - // it is probably an implicit declaration, so find the corresponding pattern - Pattern old = decl[i].getPattern(); - Pattern current = resolver.findPatternByIndex(old.getIndex()); - if (current != null && old != current) { - resolved = new Declaration(decl[i].getIdentifier(), decl[i].getExtractor(), - current); - constraint.replaceDeclaration(decl[i], resolved); - } - } - } + replaceDeclarations( resolver, pattern, constraint ); } + } else if ( element instanceof EvalCondition ) { processEvalCondition(resolver, (EvalCondition) element); + } else if ( element instanceof Accumulate ) { for ( RuleConditionElement rce : element.getNestedElements() ) { - processElement( resolver, - contextStack, - rce ); + processElement( resolver, contextStack, rce ); } - ((Accumulate)element).resetInnerDeclarationCache(); + Accumulate accumulate = (Accumulate)element; + replaceDeclarations( resolver, accumulate ); + accumulate.resetInnerDeclarationCache(); + } else if ( element instanceof From ) { DataProvider provider = ((From) element).getDataProvider(); Declaration[] decl = provider.getRequiredDeclarations(); for (Declaration aDecl : decl) { - Declaration resolved = resolver.getDeclaration(null, - aDecl.getIdentifier()); + Declaration resolved = resolver.getDeclaration(null, aDecl.getIdentifier()); if (resolved != null && resolved != aDecl) { - provider.replaceDeclaration(aDecl, - resolved); + provider.replaceDeclaration(aDecl, resolved); } else if (resolved == null) { // it is probably an implicit declaration, so find the corresponding pattern Pattern old = aDecl.getPattern(); Pattern current = resolver.findPatternByIndex(old.getIndex()); if (current != null && old != current) { - resolved = new Declaration(aDecl.getIdentifier(), - aDecl.getExtractor(), - current); - provider.replaceDeclaration(aDecl, - resolved); + resolved = new Declaration(aDecl.getIdentifier(), aDecl.getExtractor(), current); + provider.replaceDeclaration(aDecl, resolved); } } } + } else if ( element instanceof QueryElement ) { QueryElement qe = ( QueryElement ) element; Pattern pattern = qe.getResultPattern(); @@ -254,7 +218,6 @@ private void processElement(final DeclarationScopeResolver resolver, } } - List<Integer> varIndexes = asList( qe.getVariableIndexes() ); for ( int i = 0; i < qe.getDeclIndexes().length; i++ ) { Declaration declr = (Declaration) qe.getArgTemplate()[qe.getDeclIndexes()[i]]; @@ -272,15 +235,16 @@ private void processElement(final DeclarationScopeResolver resolver, ArrayElementReader reader = new ArrayElementReader( new SelfReferenceClassFieldReader(Object[].class, "this"), qe.getDeclIndexes()[i], resolved.getExtractor().getExtractToClass() ); - - declr.setReadAccessor( reader ); + declr.setReadAccessor( reader ); varIndexes.add( qe.getDeclIndexes()[i] ); } } qe.setVariableIndexes( toIntArray( varIndexes ) ); + } else if ( element instanceof ConditionalBranch ) { processBranch( resolver, (ConditionalBranch) element ); + } else { contextStack.push( element ); for (RuleConditionElement ruleConditionElement : element.getNestedElements()) { @@ -292,6 +256,59 @@ private void processElement(final DeclarationScopeResolver resolver, } } + private void replaceDeclarations( DeclarationScopeResolver resolver, Pattern pattern, Constraint constraint ) { + Declaration[] decl = constraint.getRequiredDeclarations(); + for ( Declaration aDecl : decl ) { + Declaration resolved = resolver.getDeclaration( null, + aDecl.getIdentifier() ); + + if ( constraint instanceof MvelConstraint && ( (MvelConstraint) constraint ).isUnification() ) { + if ( ClassObjectType.DroolsQuery_ObjectType.isAssignableFrom( resolved.getPattern().getObjectType() ) ) { + Declaration redeclaredDeclr = new Declaration( resolved.getIdentifier(), ( (MvelConstraint) constraint ).getFieldExtractor(), pattern, false ); + pattern.addDeclaration( redeclaredDeclr ); + } else if ( resolved.getPattern() != pattern ) { + ( (MvelConstraint) constraint ).unsetUnification(); + } + } + + if ( resolved != null && resolved != aDecl && resolved.getPattern() != pattern ) { + constraint.replaceDeclaration( aDecl, + resolved ); + } else if ( resolved == null ) { + // it is probably an implicit declaration, so find the corresponding pattern + Pattern old = aDecl.getPattern(); + Pattern current = resolver.findPatternByIndex( old.getIndex() ); + if ( current != null && old != current ) { + resolved = new Declaration( aDecl.getIdentifier(), aDecl.getExtractor(), + current ); + constraint.replaceDeclaration( aDecl, resolved ); + } + } + } + } + + private void replaceDeclarations( DeclarationScopeResolver resolver, Accumulate accumulate ) { + Declaration[] decl = accumulate.getRequiredDeclarations(); + for ( Declaration aDecl : decl ) { + Declaration resolved = resolver.getDeclaration( null, + aDecl.getIdentifier() ); + + if ( resolved != null && resolved != aDecl ) { + accumulate.replaceDeclaration( aDecl, + resolved ); + } else if ( resolved == null ) { + // it is probably an implicit declaration, so find the corresponding pattern + Pattern old = aDecl.getPattern(); + Pattern current = resolver.findPatternByIndex( old.getIndex() ); + if ( current != null && old != current ) { + resolved = new Declaration( aDecl.getIdentifier(), aDecl.getExtractor(), + current ); + accumulate.replaceDeclaration( aDecl, resolved ); + } + } + } + } + private static List<Integer> asList(int[] ints) { List<Integer> list = new ArrayList<Integer>(ints.length); for ( int i : ints ) { @@ -309,13 +326,12 @@ private static int[] toIntArray(List<Integer> list) { } private void processEvalCondition(DeclarationScopeResolver resolver, EvalCondition element) { - Declaration[] decl = ((EvalCondition) element).getRequiredDeclarations(); + Declaration[] decl = element.getRequiredDeclarations(); for (Declaration aDecl : decl) { Declaration resolved = resolver.getDeclaration(null, aDecl.getIdentifier()); if (resolved != null && resolved != aDecl) { - ((EvalCondition) element).replaceDeclaration(aDecl, - resolved); + element.replaceDeclaration( aDecl, resolved ); } } } @@ -351,8 +367,7 @@ private void processTree(final GroupElement ce, boolean[] result) throws Invalid // first we elimininate any redundancy ce.pack(); - Object[] children = (Object[]) ce.getChildren().toArray(); - for (Object child : children) { + for (Object child : ce.getChildren().toArray()) { if (child instanceof GroupElement) { final GroupElement group = (GroupElement) child; @@ -375,7 +390,7 @@ private void processTree(final GroupElement ce, boolean[] result) throws Invalid } void applyOrTransformation(final GroupElement parent) throws InvalidPatternException { - final Transformation transformation = (Transformation) this.orTransformations.get( parent.getType() ); + final Transformation transformation = this.orTransformations.get( parent.getType() ); if ( transformation == null ) { throw new RuntimeException( "applyOrTransformation could not find transformation for parent '" + parent.getType() + "' and child 'OR'" ); @@ -419,9 +434,9 @@ class AndOrTransformation Transformation { public void transform(final GroupElement parent) throws InvalidPatternException { - final List orsList = new ArrayList(); + final List<GroupElement> orsList = new ArrayList<GroupElement>(); // must keep order, so, using array - final Object[] others = new Object[parent.getChildren().size()]; + final RuleConditionElement[] others = new RuleConditionElement[parent.getChildren().size()]; // first we split children as OR or not OR int permutations = 1; @@ -429,7 +444,7 @@ public void transform(final GroupElement parent) throws InvalidPatternException for (final RuleConditionElement child : parent.getChildren()) { if ((child instanceof GroupElement) && ((GroupElement) child).isOr()) { permutations *= ((GroupElement) child).getChildren().size(); - orsList.add(child); + orsList.add((GroupElement)child); } else { others[index] = child; } @@ -441,8 +456,7 @@ public void transform(final GroupElement parent) throws InvalidPatternException parent.getChildren().clear(); // prepare arrays and indexes to calculate permutation - final GroupElement[] ors = (GroupElement[]) orsList.toArray( new GroupElement[orsList.size()] ); - final int[] indexes = new int[ors.length]; + final int[] indexes = new int[orsList.size()]; // now we know how many permutations we will have, so create it for ( int i = 1; i <= permutations; i++ ) { @@ -450,14 +464,15 @@ public void transform(final GroupElement parent) throws InvalidPatternException // create the actual permutations int mod = 1; - for ( int j = ors.length - 1; j >= 0; j-- ) { + for ( int j = orsList.size() - 1; j >= 0; j-- ) { + GroupElement or = orsList.get(j); // we must insert at the beginning to keep the order and.getChildren().add(0, - ors[j].getChildren().get(indexes[j]).clone()); + or.getChildren().get(indexes[j]).clone()); if ( (i % mod) == 0 ) { - indexes[j] = (indexes[j] + 1) % ors[j].getChildren().size(); + indexes[j] = (indexes[j] + 1) % or.getChildren().size(); } - mod *= ors[j].getChildren().size(); + mod *= or.getChildren().size(); } // elements originally outside OR will be in every permutation, so add them @@ -467,8 +482,7 @@ public void transform(final GroupElement parent) throws InvalidPatternException // always add clone of them to avoid offset conflicts in declarations // HERE IS THE MESSY PROBLEM: need to change further references to the appropriate cloned ref - and.getChildren().add( j, - (RuleConditionElement) ((RuleConditionElement) others[j]).clone() ); + and.getChildren().add( j, others[j].clone() ); } } parent.addChild( and ); diff --git a/drools-core/src/main/java/org/drools/core/rule/MultiAccumulate.java b/drools-core/src/main/java/org/drools/core/rule/MultiAccumulate.java index 857dc286b8c..5fe9e858f51 100644 --- a/drools-core/src/main/java/org/drools/core/rule/MultiAccumulate.java +++ b/drools-core/src/main/java/org/drools/core/rule/MultiAccumulate.java @@ -16,6 +16,7 @@ package org.drools.core.rule; import org.drools.core.WorkingMemory; +import org.drools.core.base.accumulators.MVELAccumulatorFunctionExecutor; import org.drools.core.common.InternalFactHandle; import org.drools.core.spi.Accumulator; import org.drools.core.spi.CompiledInvoker; @@ -165,6 +166,14 @@ public Object[] getResult(final Object workingMemoryContext, } } + protected void replaceAccumulatorDeclaration(Declaration declaration, Declaration resolved) { + for (Accumulator accumulator : accumulators) { + if ( accumulator instanceof MVELAccumulatorFunctionExecutor ) { + ( (MVELAccumulatorFunctionExecutor) accumulator ).replaceDeclaration( declaration, resolved ); + } + } + } + public MultiAccumulate clone() { RuleConditionElement clonedSource = source instanceof GroupElement ? ((GroupElement) source).cloneOnlyGroup() : source.clone(); MultiAccumulate clone = new MultiAccumulate( clonedSource, diff --git a/drools-core/src/main/java/org/drools/core/rule/Pattern.java b/drools-core/src/main/java/org/drools/core/rule/Pattern.java index 68b83f0df31..2ee21dc537d 100644 --- a/drools-core/src/main/java/org/drools/core/rule/Pattern.java +++ b/drools-core/src/main/java/org/drools/core/rule/Pattern.java @@ -191,9 +191,7 @@ public void setClassObjectType(ClassObjectType objectType) { public Declaration[] getRequiredDeclarations() { Set<Declaration> decl = new HashSet<Declaration>(); for( Constraint constr : this.constraints ) { - for( Declaration d : constr.getRequiredDeclarations() ) { - decl.add( d ); - } + Collections.addAll( decl, constr.getRequiredDeclarations() ); } return decl.toArray( new Declaration[decl.size()] ); } @@ -450,7 +448,7 @@ public boolean equals(final Object object) { return (this.source == null) ? other.source == null : this.source.equals( other.source ); } - public List getNestedElements() { + public List<RuleConditionElement> getNestedElements() { return this.source != null ? Collections.singletonList( this.source ) : Collections.EMPTY_LIST; } @@ -458,9 +456,6 @@ public boolean isPatternScopeDelimiter() { return true; } - /** - * @param constraint - */ private void setConstraintType(final MutableTypeConstraint constraint) { final Declaration[] declarations = constraint.getRequiredDeclarations(); diff --git a/drools-core/src/main/java/org/drools/core/rule/SingleAccumulate.java b/drools-core/src/main/java/org/drools/core/rule/SingleAccumulate.java index 65b0d935978..8ee99d5f915 100644 --- a/drools-core/src/main/java/org/drools/core/rule/SingleAccumulate.java +++ b/drools-core/src/main/java/org/drools/core/rule/SingleAccumulate.java @@ -16,6 +16,7 @@ package org.drools.core.rule; import org.drools.core.WorkingMemory; +import org.drools.core.base.accumulators.MVELAccumulatorFunctionExecutor; import org.drools.core.common.InternalFactHandle; import org.drools.core.spi.Accumulator; import org.drools.core.spi.CompiledInvoker; @@ -153,6 +154,12 @@ public SingleAccumulate clone() { return clone; } + protected void replaceAccumulatorDeclaration(Declaration declaration, Declaration resolved) { + if (accumulator instanceof MVELAccumulatorFunctionExecutor) { + ( (MVELAccumulatorFunctionExecutor) accumulator ).replaceDeclaration( declaration, resolved ); + } + } + public Object createWorkingMemoryContext() { return this.accumulator.createWorkingMemoryContext(); }
75ff5a4f93b0e817bf4ad4262421a25b0dac062b
drools
- reducing error level to warn to avoid- unnecessary warnings when session id cannot be found--
p
https://github.com/kiegroup/drools
diff --git a/drools-persistence-jpa/src/main/java/org/drools/persistence/SingleSessionCommandService.java b/drools-persistence-jpa/src/main/java/org/drools/persistence/SingleSessionCommandService.java index 4cce7071b64..ee91b7c8b4d 100644 --- a/drools-persistence-jpa/src/main/java/org/drools/persistence/SingleSessionCommandService.java +++ b/drools-persistence-jpa/src/main/java/org/drools/persistence/SingleSessionCommandService.java @@ -400,7 +400,7 @@ public synchronized <T> T execute(Command<T> command) { private void rollbackTransaction(Exception t1, boolean transactionOwner) { try { - logger.error( "Could not commit session", + logger.warn( "Could not commit session", t1 ); txm.rollback( transactionOwner ); } catch ( Exception t2 ) {
d1bdfa2c80fa011aef572a598e7a1e0671f70e25
intellij-community
cleanup--
p
https://github.com/JetBrains/intellij-community
diff --git a/platform/platform-impl/src/com/intellij/openapi/editor/actions/ToggleShowWhitespacesAction.java b/platform/platform-impl/src/com/intellij/openapi/editor/actions/ToggleShowWhitespacesAction.java index d943dc8a18f39..a46af300fc8d7 100644 --- a/platform/platform-impl/src/com/intellij/openapi/editor/actions/ToggleShowWhitespacesAction.java +++ b/platform/platform-impl/src/com/intellij/openapi/editor/actions/ToggleShowWhitespacesAction.java @@ -12,11 +12,14 @@ import com.intellij.openapi.actionSystem.PlatformDataKeys; import com.intellij.openapi.actionSystem.ToggleAction; import com.intellij.openapi.editor.Editor; +import org.jetbrains.annotations.Nullable; public class ToggleShowWhitespacesAction extends ToggleAction { public void setSelected(AnActionEvent e, boolean state) { - getEditor(e).getSettings().setWhitespacesShown(state); - getEditor(e).getComponent().repaint(); + final Editor editor = getEditor(e); + assert editor != null; + editor.getSettings().setWhitespacesShown(state); + editor.getComponent().repaint(); } public boolean isSelected(AnActionEvent e) { @@ -24,6 +27,7 @@ public boolean isSelected(AnActionEvent e) { return editor != null && editor.getSettings().isWhitespacesShown(); } + @Nullable private static Editor getEditor(AnActionEvent e) { return e.getData(PlatformDataKeys.EDITOR); }
01327d7136472ba8b720f6c788b17f8527960d32
elasticsearch
Facets: deprecation.--Users are encouraged to move to the new aggregation framework that was-introduced in Elasticsearch 1.0.--Close -6485-
p
https://github.com/elastic/elasticsearch
diff --git a/docs/reference/search/aggregations.asciidoc b/docs/reference/search/aggregations.asciidoc index 5b8cedf0bf4a8..4253bf6f53001 100644 --- a/docs/reference/search/aggregations.asciidoc +++ b/docs/reference/search/aggregations.asciidoc @@ -10,14 +10,6 @@ This context is defined by the executed query in combination with the different (filtered queries, top-level filters, and facet level filters). While powerful, their implementation is not designed from the ground up to support complex aggregations and is thus limited. -.Are facets deprecated? -********************************** -As the functionality facets offer is a subset of the one offered by aggregations, over time, we would like to -see users move to aggregations for all realtime data analytics. That said, we are well aware that such -transitions/migrations take time, and for this reason we are keeping facets around for the time being. -Facets are not officially deprecated yet but are likely to be in the future. -********************************** - The aggregations module breaks the barriers the current facet implementation put in place. The new name ("Aggregations") also indicates the intention here - a generic yet extremely powerful framework for building aggregations - any types of aggregations. diff --git a/docs/reference/search/facets.asciidoc b/docs/reference/search/facets.asciidoc index a6529a4421085..774727073fe73 100644 --- a/docs/reference/search/facets.asciidoc +++ b/docs/reference/search/facets.asciidoc @@ -1,6 +1,8 @@ [[search-facets]] == Facets +include::facets/deprecated.asciidoc[] + The usual purpose of a full-text search engine is to return a small number of documents matching your query. diff --git a/docs/reference/search/facets/date-histogram-facet.asciidoc b/docs/reference/search/facets/date-histogram-facet.asciidoc index 47e8c1c094db5..b90ffc1ef0ef5 100644 --- a/docs/reference/search/facets/date-histogram-facet.asciidoc +++ b/docs/reference/search/facets/date-histogram-facet.asciidoc @@ -1,6 +1,8 @@ [[search-facets-date-histogram-facet]] === Date Histogram Facet +include::deprecated.asciidoc[] + A specific histogram facet that can work with `date` field types enhancing it over the regular <<search-facets-histogram-facet,histogram diff --git a/docs/reference/search/facets/deprecated.asciidoc b/docs/reference/search/facets/deprecated.asciidoc new file mode 100644 index 0000000000000..51a356179e784 --- /dev/null +++ b/docs/reference/search/facets/deprecated.asciidoc @@ -0,0 +1,3 @@ +[WARNING] +Facets are deprecated and will be removed in a future release. You are +encouraged to migrate to <<search-aggregations, aggregations>> instead. diff --git a/docs/reference/search/facets/filter-facet.asciidoc b/docs/reference/search/facets/filter-facet.asciidoc index 74dece4bb3a41..84af9bbcecf25 100644 --- a/docs/reference/search/facets/filter-facet.asciidoc +++ b/docs/reference/search/facets/filter-facet.asciidoc @@ -1,6 +1,8 @@ [[search-facets-filter-facet]] === Filter Facets +include::deprecated.asciidoc[] + A filter facet (not to be confused with a <<search-facets,facet filter>>) allows you to return a count of the hits matching the filter. The filter itself can be diff --git a/docs/reference/search/facets/geo-distance-facet.asciidoc b/docs/reference/search/facets/geo-distance-facet.asciidoc index e56f4bda73685..4094b46aa556f 100644 --- a/docs/reference/search/facets/geo-distance-facet.asciidoc +++ b/docs/reference/search/facets/geo-distance-facet.asciidoc @@ -1,6 +1,8 @@ [[search-facets-geo-distance-facet]] === Geo Distance Facets +include::deprecated.asciidoc[] + The geo_distance facet is a facet providing information for ranges of distances from a provided geo_point including count of the number of hits that fall within each range, and aggregation information (like diff --git a/docs/reference/search/facets/histogram-facet.asciidoc b/docs/reference/search/facets/histogram-facet.asciidoc index 284a058584d41..a90fd0cf97102 100644 --- a/docs/reference/search/facets/histogram-facet.asciidoc +++ b/docs/reference/search/facets/histogram-facet.asciidoc @@ -1,6 +1,8 @@ [[search-facets-histogram-facet]] === Histogram Facets +include::deprecated.asciidoc[] + The histogram facet works with numeric data by building a histogram across intervals of the field values. Each value is "rounded" into an interval (or placed in a bucket), and statistics are provided per diff --git a/docs/reference/search/facets/query-facet.asciidoc b/docs/reference/search/facets/query-facet.asciidoc index 3f360da4bdf65..acb624bc83ce8 100644 --- a/docs/reference/search/facets/query-facet.asciidoc +++ b/docs/reference/search/facets/query-facet.asciidoc @@ -1,6 +1,8 @@ [[search-facets-query-facet]] === Query Facets +include::deprecated.asciidoc[] + A facet query allows to return a count of the hits matching the facet query. The query itself can be expressed using the Query DSL. For example: diff --git a/docs/reference/search/facets/range-facet.asciidoc b/docs/reference/search/facets/range-facet.asciidoc index fa263ee55957d..b325e077b11b1 100644 --- a/docs/reference/search/facets/range-facet.asciidoc +++ b/docs/reference/search/facets/range-facet.asciidoc @@ -1,6 +1,8 @@ [[search-facets-range-facet]] === Range Facets +include::deprecated.asciidoc[] + `range` facet allows to specify a set of ranges and get both the number of docs (count) that fall within each range, and aggregated data either based on the field, or using another field. Here is a simple example: diff --git a/docs/reference/search/facets/statistical-facet.asciidoc b/docs/reference/search/facets/statistical-facet.asciidoc index dfa51dfe8d510..bfedcd59d753a 100644 --- a/docs/reference/search/facets/statistical-facet.asciidoc +++ b/docs/reference/search/facets/statistical-facet.asciidoc @@ -1,6 +1,8 @@ [[search-facets-statistical-facet]] === Statistical Facet +include::deprecated.asciidoc[] + Statistical facet allows to compute statistical data on a numeric fields. The statistical data include count, total, sum of squares, mean (average), minimum, maximum, variance, and standard deviation. Here is diff --git a/docs/reference/search/facets/terms-facet.asciidoc b/docs/reference/search/facets/terms-facet.asciidoc index 29b24dcdf684e..8c45907dfc85d 100644 --- a/docs/reference/search/facets/terms-facet.asciidoc +++ b/docs/reference/search/facets/terms-facet.asciidoc @@ -1,6 +1,8 @@ [[search-facets-terms-facet]] === Terms Facet +include::deprecated.asciidoc[] + Allow to specify field facets that return the N most frequent terms. For example: diff --git a/docs/reference/search/facets/terms-stats-facet.asciidoc b/docs/reference/search/facets/terms-stats-facet.asciidoc index cd4875789f5d4..e5f6ed73615d4 100644 --- a/docs/reference/search/facets/terms-stats-facet.asciidoc +++ b/docs/reference/search/facets/terms-stats-facet.asciidoc @@ -1,6 +1,8 @@ [[search-facets-terms-stats-facet]] === Terms Stats Facet +include::deprecated.asciidoc[] + The `terms_stats` facet combines both the <<search-facets-terms-facet,terms>> and <<search-facets-statistical-facet,statistical>> diff --git a/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index 4a12b37a4c146..e51f33d3ecbb2 100644 --- a/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -525,7 +525,9 @@ public SearchRequestBuilder addFields(String... fields) { /** * Adds a facet to the search operation. + * @deprecated Facets are deprecated and will be removed in a future release. Please use aggregations instead. */ + @Deprecated public SearchRequestBuilder addFacet(FacetBuilder facet) { sourceBuilder().facet(facet); return this; @@ -533,7 +535,9 @@ public SearchRequestBuilder addFacet(FacetBuilder facet) { /** * Sets a raw (xcontent) binary representation of facets to use. + * @deprecated Facets are deprecated and will be removed in a future release. Please use aggregations instead. */ + @Deprecated public SearchRequestBuilder setFacets(BytesReference facets) { sourceBuilder().facets(facets); return this; @@ -541,7 +545,9 @@ public SearchRequestBuilder setFacets(BytesReference facets) { /** * Sets a raw (xcontent) binary representation of facets to use. + * @deprecated Facets are deprecated and will be removed in a future release. Please use aggregations instead. */ + @Deprecated public SearchRequestBuilder setFacets(byte[] facets) { sourceBuilder().facets(facets); return this; @@ -549,7 +555,9 @@ public SearchRequestBuilder setFacets(byte[] facets) { /** * Sets a raw (xcontent) binary representation of facets to use. + * @deprecated Facets are deprecated and will be removed in a future release. Please use aggregations instead. */ + @Deprecated public SearchRequestBuilder setFacets(byte[] facets, int facetsOffset, int facetsLength) { sourceBuilder().facets(facets, facetsOffset, facetsLength); return this; @@ -557,7 +565,9 @@ public SearchRequestBuilder setFacets(byte[] facets, int facetsOffset, int facet /** * Sets a raw (xcontent) binary representation of facets to use. + * @deprecated Facets are deprecated and will be removed in a future release. Please use aggregations instead. */ + @Deprecated public SearchRequestBuilder setFacets(XContentBuilder facets) { sourceBuilder().facets(facets); return this; @@ -565,7 +575,9 @@ public SearchRequestBuilder setFacets(XContentBuilder facets) { /** * Sets a raw (xcontent) binary representation of facets to use. + * @deprecated Facets are deprecated and will be removed in a future release. Please use aggregations instead. */ + @Deprecated public SearchRequestBuilder setFacets(Map facets) { sourceBuilder().facets(facets); return this; diff --git a/src/main/java/org/elasticsearch/search/facet/FacetBuilders.java b/src/main/java/org/elasticsearch/search/facet/FacetBuilders.java index dc32ccd83e13f..cb4de4fb13593 100644 --- a/src/main/java/org/elasticsearch/search/facet/FacetBuilders.java +++ b/src/main/java/org/elasticsearch/search/facet/FacetBuilders.java @@ -35,8 +35,9 @@ import org.elasticsearch.search.facet.termsstats.TermsStatsFacetBuilder; /** - * + * @deprecated Facets are deprecated and will be removed in a future release. Please use aggregations instead. */ +@Deprecated public class FacetBuilders { public static QueryFacetBuilder queryFacet(String facetName) {
175d222bfc03ad84023cefb40e48b27356148ec5
hadoop
YARN-2830. Add backwords compatible- ContainerId.newInstance constructor. Contributed by Jonathan Eagles.--(cherry picked from commit 43cd07b408c6613d2c9aa89203cfa3110d830538)-
a
https://github.com/apache/hadoop
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java index 19efe171356e9..74dfb39af4966 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java @@ -140,7 +140,7 @@ public void handle(ContainerAllocatorEvent event) { LOG.info("Processing the event " + event.toString()); // Assign the same container ID as the AM ContainerId cID = - ContainerId.newInstance(getContext().getApplicationAttemptId(), + ContainerId.newContainerId(getContext().getApplicationAttemptId(), this.containerId.getContainerId()); Container container = recordFactory.newRecordInstance(Container.class); container.setId(cID); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java index 1edadb9742e07..de35d840b9473 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java @@ -716,7 +716,7 @@ private class TestParams { ApplicationId appId = ApplicationId.newInstance(200, 1); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); - ContainerId containerId = ContainerId.newInstance(appAttemptId, 1); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1); TaskID taskID = TaskID.forName("task_200707121733_0003_m_000005"); TaskAttemptID taskAttemptID = new TaskAttemptID(taskID, 0); JobId jobId = MRBuilderUtils.newJobId(appId, 1); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java index 9885582d88a0d..3100d12ce1499 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java @@ -179,7 +179,7 @@ private static ContainerId getContainerId(ApplicationId applicationId, ApplicationAttemptId appAttemptId = getApplicationAttemptId(applicationId, startCount); ContainerId containerId = - ContainerId.newInstance(appAttemptId, startCount); + ContainerId.newContainerId(appAttemptId, startCount); return containerId; } @@ -565,7 +565,7 @@ protected class MRAppContainerAllocator @Override public void handle(ContainerAllocatorEvent event) { ContainerId cId = - ContainerId.newInstance(getContext().getApplicationAttemptId(), + ContainerId.newContainerId(getContext().getApplicationAttemptId(), containerCount++); NodeId nodeId = NodeId.newInstance(NM_HOST, NM_PORT); Resource resource = Resource.newInstance(1234, 2); @@ -773,7 +773,7 @@ public static ContainerId newContainerId(int appId, int appAttemptId, ApplicationId applicationId = ApplicationId.newInstance(timestamp, appId); ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.newInstance(applicationId, appAttemptId); - return ContainerId.newInstance(applicationAttemptId, containerId); + return ContainerId.newContainerId(applicationAttemptId, containerId); } public static ContainerTokenIdentifier newContainerTokenIdentifier( diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java index 160303289568e..744ca103affba 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java @@ -140,7 +140,7 @@ public void run() { if (concurrentRunningTasks < maxConcurrentRunningTasks) { event = eventQueue.take(); ContainerId cId = - ContainerId.newInstance(getContext() + ContainerId.newContainerId(getContext() .getApplicationAttemptId(), containerCount++); //System.out.println("Allocating " + containerCount); @@ -233,7 +233,7 @@ public AllocateResponse allocate(AllocateRequest request) int numContainers = req.getNumContainers(); for (int i = 0; i < numContainers; i++) { ContainerId containerId = - ContainerId.newInstance( + ContainerId.newContainerId( getContext().getApplicationAttemptId(), request.getResponseId() + i); containers.add(Container.newInstance(containerId, diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java index 19ac0db98dbb8..fd9c094901ae4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java @@ -183,7 +183,7 @@ public static TaskReport newTaskReport(TaskId id) { public static TaskAttemptReport newTaskAttemptReport(TaskAttemptId id) { ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance( id.getTaskId().getJobId().getAppId(), 0); - ContainerId containerId = ContainerId.newInstance(appAttemptId, 0); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, 0); TaskAttemptReport report = Records.newRecord(TaskAttemptReport.class); report.setTaskAttemptId(id); report @@ -315,7 +315,7 @@ public ContainerId getAssignedContainerID() { ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(taid.getTaskId().getJobId() .getAppId(), 0); - ContainerId id = ContainerId.newInstance(appAttemptId, 0); + ContainerId id = ContainerId.newContainerId(appAttemptId, 0); return id; } @@ -640,7 +640,7 @@ public void setQueueName(String queueName) { private static AMInfo createAMInfo(int attempt) { ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance( ApplicationId.newInstance(100, 1), attempt); - ContainerId containerId = ContainerId.newInstance(appAttemptId, 1); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1); return MRBuilderUtils.newAMInfo(appAttemptId, System.currentTimeMillis(), containerId, NM_HOST, NM_PORT, NM_HTTP_PORT); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java index d356eca623d2e..70437c1ba36cf 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java @@ -382,7 +382,7 @@ public void testMRAppMasterCredentials() throws Exception { ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.newInstance(appId, 1); ContainerId containerId = - ContainerId.newInstance(applicationAttemptId, 546); + ContainerId.newContainerId(applicationAttemptId, 546); String userName = UserGroupInformation.getCurrentUser().getShortUserName(); // Create staging dir, so MRAppMaster doesn't barf. diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java index 1037e7c2ba3dd..fc64996a8e676 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java @@ -253,7 +253,7 @@ private class TestMRApp extends MRAppMaster { public TestMRApp(ApplicationAttemptId applicationAttemptId, ContainerAllocator allocator) { - super(applicationAttemptId, ContainerId.newInstance( + super(applicationAttemptId, ContainerId.newContainerId( applicationAttemptId, 1), "testhost", 2222, 3333, System.currentTimeMillis()); this.allocator = allocator; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskAttempt.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskAttempt.java index 13303449c879e..1807c1c3e099f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskAttempt.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskAttempt.java @@ -359,7 +359,7 @@ public void testLaunchFailedWhileKilling() throws Exception { new SystemClock(), null); NodeId nid = NodeId.newInstance("127.0.0.1", 0); - ContainerId contId = ContainerId.newInstance(appAttemptId, 3); + ContainerId contId = ContainerId.newContainerId(appAttemptId, 3); Container container = mock(Container.class); when(container.getId()).thenReturn(contId); when(container.getNodeId()).thenReturn(nid); @@ -415,7 +415,7 @@ public void testContainerCleanedWhileRunning() throws Exception { new SystemClock(), appCtx); NodeId nid = NodeId.newInstance("127.0.0.2", 0); - ContainerId contId = ContainerId.newInstance(appAttemptId, 3); + ContainerId contId = ContainerId.newContainerId(appAttemptId, 3); Container container = mock(Container.class); when(container.getId()).thenReturn(contId); when(container.getNodeId()).thenReturn(nid); @@ -472,7 +472,7 @@ public void testContainerCleanedWhileCommitting() throws Exception { new SystemClock(), appCtx); NodeId nid = NodeId.newInstance("127.0.0.1", 0); - ContainerId contId = ContainerId.newInstance(appAttemptId, 3); + ContainerId contId = ContainerId.newContainerId(appAttemptId, 3); Container container = mock(Container.class); when(container.getId()).thenReturn(contId); when(container.getNodeId()).thenReturn(nid); @@ -532,7 +532,7 @@ public void testDoubleTooManyFetchFailure() throws Exception { new SystemClock(), appCtx); NodeId nid = NodeId.newInstance("127.0.0.1", 0); - ContainerId contId = ContainerId.newInstance(appAttemptId, 3); + ContainerId contId = ContainerId.newContainerId(appAttemptId, 3); Container container = mock(Container.class); when(container.getId()).thenReturn(contId); when(container.getNodeId()).thenReturn(nid); @@ -599,7 +599,7 @@ public void testAppDiognosticEventOnUnassignedTask() throws Exception { new Token(), new Credentials(), new SystemClock(), appCtx); NodeId nid = NodeId.newInstance("127.0.0.1", 0); - ContainerId contId = ContainerId.newInstance(appAttemptId, 3); + ContainerId contId = ContainerId.newContainerId(appAttemptId, 3); Container container = mock(Container.class); when(container.getId()).thenReturn(contId); when(container.getNodeId()).thenReturn(nid); @@ -649,7 +649,7 @@ public void testTooManyFetchFailureAfterKill() throws Exception { new SystemClock(), appCtx); NodeId nid = NodeId.newInstance("127.0.0.1", 0); - ContainerId contId = ContainerId.newInstance(appAttemptId, 3); + ContainerId contId = ContainerId.newContainerId(appAttemptId, 3); Container container = mock(Container.class); when(container.getId()).thenReturn(contId); when(container.getNodeId()).thenReturn(nid); @@ -714,7 +714,7 @@ public void testAppDiognosticEventOnNewTask() throws Exception { new Token(), new Credentials(), new SystemClock(), appCtx); NodeId nid = NodeId.newInstance("127.0.0.1", 0); - ContainerId contId = ContainerId.newInstance(appAttemptId, 3); + ContainerId contId = ContainerId.newContainerId(appAttemptId, 3); Container container = mock(Container.class); when(container.getId()).thenReturn(contId); when(container.getNodeId()).thenReturn(nid); @@ -760,7 +760,7 @@ public void testFetchFailureAttemptFinishTime() throws Exception{ new SystemClock(), appCtx); NodeId nid = NodeId.newInstance("127.0.0.1", 0); - ContainerId contId = ContainerId.newInstance(appAttemptId, 3); + ContainerId contId = ContainerId.newContainerId(appAttemptId, 3); Container container = mock(Container.class); when(container.getId()).thenReturn(contId); when(container.getNodeId()).thenReturn(nid); @@ -830,7 +830,7 @@ public void testContainerKillAfterAssigned() throws Exception { new Credentials(), new SystemClock(), appCtx); NodeId nid = NodeId.newInstance("127.0.0.2", 0); - ContainerId contId = ContainerId.newInstance(appAttemptId, 3); + ContainerId contId = ContainerId.newContainerId(appAttemptId, 3); Container container = mock(Container.class); when(container.getId()).thenReturn(contId); when(container.getNodeId()).thenReturn(nid); @@ -884,7 +884,7 @@ public void testContainerKillWhileRunning() throws Exception { new Credentials(), new SystemClock(), appCtx); NodeId nid = NodeId.newInstance("127.0.0.2", 0); - ContainerId contId = ContainerId.newInstance(appAttemptId, 3); + ContainerId contId = ContainerId.newContainerId(appAttemptId, 3); Container container = mock(Container.class); when(container.getId()).thenReturn(contId); when(container.getNodeId()).thenReturn(nid); @@ -941,7 +941,7 @@ public void testContainerKillWhileCommitPending() throws Exception { new Credentials(), new SystemClock(), appCtx); NodeId nid = NodeId.newInstance("127.0.0.2", 0); - ContainerId contId = ContainerId.newInstance(appAttemptId, 3); + ContainerId contId = ContainerId.newContainerId(appAttemptId, 3); Container container = mock(Container.class); when(container.getId()).thenReturn(contId); when(container.getNodeId()).thenReturn(nid); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncher.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncher.java index f2c1841e1b0a2..dc1d72f89f0f7 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncher.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncher.java @@ -115,7 +115,7 @@ public void testPoolSize() throws InterruptedException { containerLauncher.expectedCorePoolSize = ContainerLauncherImpl.INITIAL_POOL_SIZE; for (int i = 0; i < 10; i++) { - ContainerId containerId = ContainerId.newInstance(appAttemptId, i); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, i); TaskAttemptId taskAttemptId = MRBuilderUtils.newTaskAttemptId(taskId, i); containerLauncher.handle(new ContainerLauncherEvent(taskAttemptId, containerId, "host" + i + ":1234", null, @@ -137,7 +137,7 @@ public void testPoolSize() throws InterruptedException { Assert.assertEquals(10, containerLauncher.numEventsProcessed.get()); containerLauncher.finishEventHandling = false; for (int i = 0; i < 10; i++) { - ContainerId containerId = ContainerId.newInstance(appAttemptId, + ContainerId containerId = ContainerId.newContainerId(appAttemptId, i + 10); TaskAttemptId taskAttemptId = MRBuilderUtils.newTaskAttemptId(taskId, i + 10); @@ -154,7 +154,7 @@ public void testPoolSize() throws InterruptedException { // Core pool size should be 21 but the live pool size should be only 11. containerLauncher.expectedCorePoolSize = 11 + ContainerLauncherImpl.INITIAL_POOL_SIZE; containerLauncher.finishEventHandling = false; - ContainerId containerId = ContainerId.newInstance(appAttemptId, 21); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, 21); TaskAttemptId taskAttemptId = MRBuilderUtils.newTaskAttemptId(taskId, 21); containerLauncher.handle(new ContainerLauncherEvent(taskAttemptId, containerId, "host11:1234", null, @@ -174,7 +174,7 @@ public void testPoolLimits() throws InterruptedException { JobId jobId = MRBuilderUtils.newJobId(appId, 8); TaskId taskId = MRBuilderUtils.newTaskId(jobId, 9, TaskType.MAP); TaskAttemptId taskAttemptId = MRBuilderUtils.newTaskAttemptId(taskId, 0); - ContainerId containerId = ContainerId.newInstance(appAttemptId, 10); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, 10); AppContext context = mock(AppContext.class); CustomContainerLauncher containerLauncher = new CustomContainerLauncher( diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncherImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncherImpl.java index 74e532a2b4d46..184f1b244d549 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncherImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncherImpl.java @@ -139,7 +139,7 @@ public void waitForPoolToIdle() throws InterruptedException { public static ContainerId makeContainerId(long ts, int appId, int attemptId, int id) { - return ContainerId.newInstance( + return ContainerId.newContainerId( ApplicationAttemptId.newInstance( ApplicationId.newInstance(ts, appId), attemptId), id); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/rm/TestRMContainerAllocator.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/rm/TestRMContainerAllocator.java index b8e13ff96bd57..3a7343c07b2d9 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/rm/TestRMContainerAllocator.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/rm/TestRMContainerAllocator.java @@ -688,7 +688,7 @@ public void testReportedAppProgress() throws Exception { rm.sendAMLaunched(appAttemptId); rmDispatcher.await(); - MRApp mrApp = new MRApp(appAttemptId, ContainerId.newInstance( + MRApp mrApp = new MRApp(appAttemptId, ContainerId.newContainerId( appAttemptId, 0), 10, 10, false, this.getClass().getName(), true, 1) { @Override protected Dispatcher createDispatcher() { @@ -840,7 +840,7 @@ public void testReportedAppProgressWithOnlyMaps() throws Exception { rm.sendAMLaunched(appAttemptId); rmDispatcher.await(); - MRApp mrApp = new MRApp(appAttemptId, ContainerId.newInstance( + MRApp mrApp = new MRApp(appAttemptId, ContainerId.newContainerId( appAttemptId, 0), 10, 0, false, this.getClass().getName(), true, 1) { @Override protected Dispatcher createDispatcher() { @@ -2021,7 +2021,7 @@ public void testCompletedContainerEvent() { ApplicationId applicationId = ApplicationId.newInstance(1, 1); ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.newInstance( applicationId, 1); - ContainerId containerId = ContainerId.newInstance(applicationAttemptId, 1); + ContainerId containerId = ContainerId.newContainerId(applicationAttemptId, 1); ContainerStatus status = ContainerStatus.newInstance( containerId, ContainerState.RUNNING, "", 0); @@ -2038,7 +2038,7 @@ public void testCompletedContainerEvent() { abortedStatus, attemptId); Assert.assertEquals(TaskAttemptEventType.TA_KILL, abortedEvent.getType()); - ContainerId containerId2 = ContainerId.newInstance(applicationAttemptId, 2); + ContainerId containerId2 = ContainerId.newContainerId(applicationAttemptId, 2); ContainerStatus status2 = ContainerStatus.newInstance(containerId2, ContainerState.RUNNING, "", 0); @@ -2077,7 +2077,7 @@ public void testUnregistrationOnlyIfRegistered() throws Exception { rmDispatcher.await(); MRApp mrApp = - new MRApp(appAttemptId, ContainerId.newInstance(appAttemptId, 0), 10, + new MRApp(appAttemptId, ContainerId.newContainerId(appAttemptId, 0), 10, 0, false, this.getClass().getName(), true, 1) { @Override protected Dispatcher createDispatcher() { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestBlocks.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestBlocks.java index 82d578aa12a79..723136769e61f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestBlocks.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestBlocks.java @@ -133,7 +133,7 @@ public void testAttemptsBlock() { ApplicationId appId = ApplicationIdPBImpl.newInstance(0, 5); ApplicationAttemptId appAttemptId = ApplicationAttemptIdPBImpl.newInstance(appId, 1); - ContainerId containerId = ContainerIdPBImpl.newInstance(appAttemptId, 1); + ContainerId containerId = ContainerIdPBImpl.newContainerId(appAttemptId, 1); when(attempt.getAssignedContainerID()).thenReturn(containerId); when(attempt.getAssignedContainerMgrAddress()).thenReturn( diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobsWithHistoryService.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobsWithHistoryService.java index 9fba91dbb1ac2..f9236a926ae83 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobsWithHistoryService.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobsWithHistoryService.java @@ -169,7 +169,7 @@ private void verifyJobReport(JobReport jobReport, JobId jobId) { Assert.assertEquals(1, amInfos.size()); AMInfo amInfo = amInfos.get(0); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(jobId.getAppId(), 1); - ContainerId amContainerId = ContainerId.newInstance(appAttemptId, 1); + ContainerId amContainerId = ContainerId.newContainerId(appAttemptId, 1); Assert.assertEquals(appAttemptId, amInfo.getAppAttemptId()); Assert.assertEquals(amContainerId, amInfo.getContainerId()); Assert.assertTrue(jobReport.getSubmitTime() > 0); diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index 2245a79531bba..78d501096b2ff 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -895,6 +895,9 @@ Release 2.6.0 - UNRELEASED YARN-2607. Fixed issues in TestDistributedShell. (Wangda Tan via vinodkv) + YARN-2830. Add backwords compatible ContainerId.newInstance constructor. + (jeagles via acmurthy) + Release 2.5.2 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java index 5499a19646e73..5d0d65a966a4d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java @@ -42,7 +42,7 @@ public abstract class ContainerId implements Comparable<ContainerId>{ @Private @Unstable - public static ContainerId newInstance(ApplicationAttemptId appAttemptId, + public static ContainerId newContainerId(ApplicationAttemptId appAttemptId, long containerId) { ContainerId id = Records.newRecord(ContainerId.class); id.setContainerId(containerId); @@ -51,6 +51,18 @@ public static ContainerId newInstance(ApplicationAttemptId appAttemptId, return id; } + @Private + @Deprecated + @Unstable + public static ContainerId newInstance(ApplicationAttemptId appAttemptId, + int containerId) { + ContainerId id = Records.newRecord(ContainerId.class); + id.setContainerId(containerId); + id.setApplicationAttemptId(appAttemptId); + id.build(); + return id; + } + /** * Get the <code>ApplicationAttemptId</code> of the application to which the * <code>Container</code> was assigned. @@ -214,7 +226,7 @@ public static ContainerId fromString(String containerIdStr) { } long id = Long.parseLong(it.next()); long cid = (epoch << 40) | id; - ContainerId containerId = ContainerId.newInstance(appAttemptID, cid); + ContainerId containerId = ContainerId.newContainerId(appAttemptID, cid); return containerId; } catch (NumberFormatException n) { throw new IllegalArgumentException("Invalid ContainerId: " diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/src/main/java/org/apache/hadoop/yarn/applications/unmanagedamlauncher/UnmanagedAMLauncher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/src/main/java/org/apache/hadoop/yarn/applications/unmanagedamlauncher/UnmanagedAMLauncher.java index 2414a6777f736..d41434e94dcae 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/src/main/java/org/apache/hadoop/yarn/applications/unmanagedamlauncher/UnmanagedAMLauncher.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/src/main/java/org/apache/hadoop/yarn/applications/unmanagedamlauncher/UnmanagedAMLauncher.java @@ -214,7 +214,7 @@ public void launchAM(ApplicationAttemptId attemptId) if(!setClasspath && classpath!=null) { envAMList.add("CLASSPATH="+classpath); } - ContainerId containerId = ContainerId.newInstance(attemptId, 0); + ContainerId containerId = ContainerId.newContainerId(attemptId, 0); String hostname = InetAddress.getLocalHost().getHostName(); envAMList.add(Environment.CONTAINER_ID.name() + "=" + containerId); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/ProtocolHATestBase.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/ProtocolHATestBase.java index ec00d45a2f13d..da7d50529ac9f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/ProtocolHATestBase.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/ProtocolHATestBase.java @@ -667,7 +667,7 @@ public ApplicationAttemptId createFakeApplicationAttemptId() { } public ContainerId createFakeContainerId() { - return ContainerId.newInstance(createFakeApplicationAttemptId(), 0); + return ContainerId.newContainerId(createFakeApplicationAttemptId(), 0); } public YarnClusterMetrics createFakeYarnClusterMetrics() { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/async/impl/TestAMRMClientAsync.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/async/impl/TestAMRMClientAsync.java index b00598a5d2ec9..74d4aa47cbcde 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/async/impl/TestAMRMClientAsync.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/async/impl/TestAMRMClientAsync.java @@ -402,7 +402,7 @@ public static ContainerId newContainerId(int appId, int appAttemptId, ApplicationId applicationId = ApplicationId.newInstance(timestamp, appId); ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.newInstance(applicationId, appAttemptId); - return ContainerId.newInstance(applicationAttemptId, containerId); + return ContainerId.newContainerId(applicationAttemptId, containerId); } private class TestCallbackHandler implements AMRMClientAsync.CallbackHandler { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/async/impl/TestNMClientAsync.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/async/impl/TestNMClientAsync.java index 0e059d7540d5e..6f9d41d8d5090 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/async/impl/TestNMClientAsync.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/async/impl/TestNMClientAsync.java @@ -547,7 +547,7 @@ private Container mockContainer(int i) { ApplicationId.newInstance(System.currentTimeMillis(), 1); ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(appId, 1); - ContainerId containerId = ContainerId.newInstance(attemptId, i); + ContainerId containerId = ContainerId.newContainerId(attemptId, i); nodeId = NodeId.newInstance("localhost", 0); // Create an empty record containerToken = recordFactory.newRecordInstance(Token.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestAHSClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestAHSClient.java index d3c182b9cf423..a88189e5c0d84 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestAHSClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestAHSClient.java @@ -157,9 +157,9 @@ public void testGetContainers() throws YarnException, IOException { List<ContainerReport> reports = client.getContainers(appAttemptId); Assert.assertNotNull(reports); Assert.assertEquals(reports.get(0).getContainerId(), - (ContainerId.newInstance(appAttemptId, 1))); + (ContainerId.newContainerId(appAttemptId, 1))); Assert.assertEquals(reports.get(1).getContainerId(), - (ContainerId.newInstance(appAttemptId, 2))); + (ContainerId.newContainerId(appAttemptId, 2))); client.stop(); } @@ -176,11 +176,11 @@ public void testGetContainerReport() throws YarnException, IOException { ApplicationId applicationId = ApplicationId.newInstance(1234, 5); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(applicationId, 1); - ContainerId containerId = ContainerId.newInstance(appAttemptId, 1); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1); ContainerReport report = client.getContainerReport(containerId); Assert.assertNotNull(report); Assert.assertEquals(report.getContainerId().toString(), (ContainerId - .newInstance(expectedReports.get(0).getCurrentApplicationAttemptId(), 1)) + .newContainerId(expectedReports.get(0).getCurrentApplicationAttemptId(), 1)) .toString()); client.stop(); } @@ -349,7 +349,7 @@ private void createAppReports() { "oUrl", "diagnostics", YarnApplicationAttemptState.FINISHED, - ContainerId.newInstance( + ContainerId.newContainerId( newApplicationReport.getCurrentApplicationAttemptId(), 1)); appAttempts.add(attempt); ApplicationAttemptReport attempt1 = @@ -361,7 +361,7 @@ private void createAppReports() { "oUrl", "diagnostics", YarnApplicationAttemptState.FINISHED, - ContainerId.newInstance( + ContainerId.newContainerId( newApplicationReport.getCurrentApplicationAttemptId(), 2)); appAttempts.add(attempt1); attempts.put(applicationId, appAttempts); @@ -369,14 +369,14 @@ private void createAppReports() { List<ContainerReport> containerReports = new ArrayList<ContainerReport>(); ContainerReport container = ContainerReport.newInstance( - ContainerId.newInstance(attempt.getApplicationAttemptId(), 1), + ContainerId.newContainerId(attempt.getApplicationAttemptId(), 1), null, NodeId.newInstance("host", 1234), Priority.UNDEFINED, 1234, 5678, "diagnosticInfo", "logURL", 0, ContainerState.COMPLETE); containerReports.add(container); ContainerReport container1 = ContainerReport.newInstance( - ContainerId.newInstance(attempt.getApplicationAttemptId(), 2), + ContainerId.newContainerId(attempt.getApplicationAttemptId(), 2), null, NodeId.newInstance("host", 1234), Priority.UNDEFINED, 1234, 5678, "diagnosticInfo", "logURL", 0, ContainerState.COMPLETE); containerReports.add(container1); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestAMRMClientOnRMRestart.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestAMRMClientOnRMRestart.java index ce3086f57020c..108ad377c6b02 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestAMRMClientOnRMRestart.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestAMRMClientOnRMRestart.java @@ -352,7 +352,7 @@ public void testAMRMClientForUnregisterAMOnRMRestart() throws Exception { // new NM to represent NM re-register nm1 = new MockNM("h1:1234", 10240, rm2.getResourceTrackerService()); - ContainerId containerId = ContainerId.newInstance(appAttemptId, 1); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1); NMContainerStatus containerReport = NMContainerStatus.newInstance(containerId, ContainerState.RUNNING, Resource.newInstance(1024, 1), "recover container", 0, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestYarnClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestYarnClient.java index ca7c50a1270af..02f2882155541 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestYarnClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestYarnClient.java @@ -348,9 +348,9 @@ public void testGetContainers() throws YarnException, IOException { List<ContainerReport> reports = client.getContainers(appAttemptId); Assert.assertNotNull(reports); Assert.assertEquals(reports.get(0).getContainerId(), - (ContainerId.newInstance(appAttemptId, 1))); + (ContainerId.newContainerId(appAttemptId, 1))); Assert.assertEquals(reports.get(1).getContainerId(), - (ContainerId.newInstance(appAttemptId, 2))); + (ContainerId.newContainerId(appAttemptId, 2))); client.stop(); } @@ -367,11 +367,11 @@ public void testGetContainerReport() throws YarnException, IOException { ApplicationId applicationId = ApplicationId.newInstance(1234, 5); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance( applicationId, 1); - ContainerId containerId = ContainerId.newInstance(appAttemptId, 1); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1); ContainerReport report = client.getContainerReport(containerId); Assert.assertNotNull(report); Assert.assertEquals(report.getContainerId().toString(), - (ContainerId.newInstance(expectedReports.get(0) + (ContainerId.newContainerId(expectedReports.get(0) .getCurrentApplicationAttemptId(), 1)).toString()); client.stop(); } @@ -481,7 +481,7 @@ private List<ApplicationReport> createAppReports() { "oUrl", "diagnostics", YarnApplicationAttemptState.FINISHED, - ContainerId.newInstance( + ContainerId.newContainerId( newApplicationReport.getCurrentApplicationAttemptId(), 1)); appAttempts.add(attempt); ApplicationAttemptReport attempt1 = ApplicationAttemptReport.newInstance( @@ -492,20 +492,20 @@ private List<ApplicationReport> createAppReports() { "oUrl", "diagnostics", YarnApplicationAttemptState.FINISHED, - ContainerId.newInstance( + ContainerId.newContainerId( newApplicationReport.getCurrentApplicationAttemptId(), 2)); appAttempts.add(attempt1); attempts.put(applicationId, appAttempts); List<ContainerReport> containerReports = new ArrayList<ContainerReport>(); ContainerReport container = ContainerReport.newInstance( - ContainerId.newInstance(attempt.getApplicationAttemptId(), 1), null, + ContainerId.newContainerId(attempt.getApplicationAttemptId(), 1), null, NodeId.newInstance("host", 1234), Priority.UNDEFINED, 1234, 5678, "diagnosticInfo", "logURL", 0, ContainerState.COMPLETE); containerReports.add(container); ContainerReport container1 = ContainerReport.newInstance( - ContainerId.newInstance(attempt.getApplicationAttemptId(), 2), null, + ContainerId.newContainerId(attempt.getApplicationAttemptId(), 2), null, NodeId.newInstance("host", 1234), Priority.UNDEFINED, 1234, 5678, "diagnosticInfo", "logURL", 0, ContainerState.COMPLETE); containerReports.add(container1); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java index 5ed839847f06b..ef9439d1a77bc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java @@ -172,9 +172,9 @@ public void testFetchApplictionLogs() throws Exception { ApplicationId appId = ApplicationIdPBImpl.newInstance(0, 1); ApplicationAttemptId appAttemptId = ApplicationAttemptIdPBImpl.newInstance(appId, 1); - ContainerId containerId0 = ContainerIdPBImpl.newInstance(appAttemptId, 0); - ContainerId containerId1 = ContainerIdPBImpl.newInstance(appAttemptId, 1); - ContainerId containerId2 = ContainerIdPBImpl.newInstance(appAttemptId, 2); + ContainerId containerId0 = ContainerIdPBImpl.newContainerId(appAttemptId, 0); + ContainerId containerId1 = ContainerIdPBImpl.newContainerId(appAttemptId, 1); + ContainerId containerId2 = ContainerIdPBImpl.newContainerId(appAttemptId, 2); NodeId nodeId = NodeId.newInstance("localhost", 1234); // create local logs diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java index d87277a7a7ad9..9d9a86a310000 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java @@ -146,7 +146,7 @@ public void testGetApplicationAttemptReport() throws Exception { applicationId, 1); ApplicationAttemptReport attemptReport = ApplicationAttemptReport .newInstance(attemptId, "host", 124, "url", "oUrl", "diagnostics", - YarnApplicationAttemptState.FINISHED, ContainerId.newInstance( + YarnApplicationAttemptState.FINISHED, ContainerId.newContainerId( attemptId, 1)); when( client @@ -182,11 +182,11 @@ public void testGetApplicationAttempts() throws Exception { applicationId, 2); ApplicationAttemptReport attemptReport = ApplicationAttemptReport .newInstance(attemptId, "host", 124, "url", "oUrl", "diagnostics", - YarnApplicationAttemptState.FINISHED, ContainerId.newInstance( + YarnApplicationAttemptState.FINISHED, ContainerId.newContainerId( attemptId, 1)); ApplicationAttemptReport attemptReport1 = ApplicationAttemptReport .newInstance(attemptId1, "host", 124, "url", "oUrl", "diagnostics", - YarnApplicationAttemptState.FINISHED, ContainerId.newInstance( + YarnApplicationAttemptState.FINISHED, ContainerId.newContainerId( attemptId1, 1)); List<ApplicationAttemptReport> reports = new ArrayList<ApplicationAttemptReport>(); reports.add(attemptReport); @@ -223,7 +223,7 @@ public void testGetContainerReport() throws Exception { ApplicationId applicationId = ApplicationId.newInstance(1234, 5); ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance( applicationId, 1); - ContainerId containerId = ContainerId.newInstance(attemptId, 1); + ContainerId containerId = ContainerId.newContainerId(attemptId, 1); ContainerReport container = ContainerReport.newInstance(containerId, null, NodeId.newInstance("host", 1234), Priority.UNDEFINED, 1234, 5678, "diagnosticInfo", "logURL", 0, ContainerState.COMPLETE); @@ -255,8 +255,8 @@ public void testGetContainers() throws Exception { ApplicationId applicationId = ApplicationId.newInstance(1234, 5); ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance( applicationId, 1); - ContainerId containerId = ContainerId.newInstance(attemptId, 1); - ContainerId containerId1 = ContainerId.newInstance(attemptId, 2); + ContainerId containerId = ContainerId.newContainerId(attemptId, 1); + ContainerId containerId1 = ContainerId.newContainerId(attemptId, 2); ContainerReport container = ContainerReport.newInstance(containerId, null, NodeId.newInstance("host", 1234), Priority.UNDEFINED, 1234, 5678, "diagnosticInfo", "logURL", 0, ContainerState.COMPLETE); @@ -766,7 +766,7 @@ public void testContainersHelpCommand() throws Exception { sysOutStream.toString()); sysOutStream.reset(); - ContainerId containerId = ContainerId.newInstance(appAttemptId, 7); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, 7); result = cli.run( new String[] { "container", "-status", containerId.toString(), "args" }); verify(spyCli).printUsage(any(String.class), any(Options.class)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerLaunchRPC.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerLaunchRPC.java index 45b2a06429a0b..e2071ddc494e1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerLaunchRPC.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerLaunchRPC.java @@ -97,7 +97,7 @@ private void testRPCTimeout(String rpcClass) throws Exception { ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.newInstance(applicationId, 0); ContainerId containerId = - ContainerId.newInstance(applicationAttemptId, 100); + ContainerId.newContainerId(applicationAttemptId, 100); NodeId nodeId = NodeId.newInstance("localhost", 1234); Resource resource = Resource.newInstance(1234, 2); ContainerTokenIdentifier containerTokenIdentifier = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java index 8271713e26fad..39e616229de17 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java @@ -124,7 +124,7 @@ private void test(String rpcClass) throws Exception { ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.newInstance(applicationId, 0); ContainerId containerId = - ContainerId.newInstance(applicationAttemptId, 100); + ContainerId.newContainerId(applicationAttemptId, 100); NodeId nodeId = NodeId.newInstance("localhost", 1234); Resource resource = Resource.newInstance(1234, 2); ContainerTokenIdentifier containerTokenIdentifier = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerId.java index 2259294bc0fae..1643301072b81 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerId.java @@ -79,6 +79,6 @@ public static ContainerId newContainerId(int appId, int appAttemptId, ApplicationId applicationId = ApplicationId.newInstance(timestamp, appId); ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.newInstance(applicationId, appAttemptId); - return ContainerId.newInstance(applicationAttemptId, containerId); + return ContainerId.newContainerId(applicationAttemptId, containerId); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerResourceDecrease.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerResourceDecrease.java index f497d27a0ca18..29b0ffe38f27d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerResourceDecrease.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerResourceDecrease.java @@ -33,7 +33,7 @@ public class TestContainerResourceDecrease { @Test public void testResourceDecreaseContext() { ContainerId containerId = ContainerId - .newInstance(ApplicationAttemptId.newInstance( + .newContainerId(ApplicationAttemptId.newInstance( ApplicationId.newInstance(1234, 3), 3), 7); Resource resource = Resource.newInstance(1023, 3); ContainerResourceDecrease ctx = ContainerResourceDecrease.newInstance( diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerResourceIncrease.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerResourceIncrease.java index d307e390afb37..932d5a7a87ccf 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerResourceIncrease.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerResourceIncrease.java @@ -38,7 +38,7 @@ public void testResourceIncreaseContext() { byte[] identifier = new byte[] { 1, 2, 3, 4 }; Token token = Token.newInstance(identifier, "", "".getBytes(), ""); ContainerId containerId = ContainerId - .newInstance(ApplicationAttemptId.newInstance( + .newContainerId(ApplicationAttemptId.newInstance( ApplicationId.newInstance(1234, 3), 3), 7); Resource resource = Resource.newInstance(1023, 3); ContainerResourceIncrease ctx = ContainerResourceIncrease.newInstance( diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerResourceIncreaseRequest.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerResourceIncreaseRequest.java index 0acad00d7e96f..cf4dabf71bede 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerResourceIncreaseRequest.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerResourceIncreaseRequest.java @@ -33,7 +33,7 @@ public class TestContainerResourceIncreaseRequest { @Test public void ContainerResourceIncreaseRequest() { ContainerId containerId = ContainerId - .newInstance(ApplicationAttemptId.newInstance( + .newContainerId(ApplicationAttemptId.newInstance( ApplicationId.newInstance(1234, 3), 3), 7); Resource resource = Resource.newInstance(1023, 3); ContainerResourceIncreaseRequest context = ContainerResourceIncreaseRequest diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java index 405cb3d52a500..4301bc9eee7d0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java @@ -295,7 +295,7 @@ public void testContainerLogsFileAccess() throws IOException { ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.newInstance(applicationId, 1); ContainerId testContainerId1 = - ContainerId.newInstance(applicationAttemptId, 1); + ContainerId.newContainerId(applicationAttemptId, 1); Path appDir = new Path(srcFileRoot, testContainerId1.getApplicationAttemptId() .getApplicationId().toString()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java index 0a17433c44fca..2a5762c30228a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java @@ -207,7 +207,7 @@ private void writeLog(Configuration configuration, String user) throws Exception { ApplicationId appId = ApplicationIdPBImpl.newInstance(0, 1); ApplicationAttemptId appAttemptId = ApplicationAttemptIdPBImpl.newInstance(appId, 1); - ContainerId containerId = ContainerIdPBImpl.newInstance(appAttemptId, 1); + ContainerId containerId = ContainerIdPBImpl.newContainerId(appAttemptId, 1); String path = "target/logs/" + user + "/logs/application_0_0001/localhost_1234"; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/security/TestYARNTokenIdentifier.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/security/TestYARNTokenIdentifier.java index dc4f9e2a41dd8..834dcf131c498 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/security/TestYARNTokenIdentifier.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/security/TestYARNTokenIdentifier.java @@ -134,7 +134,7 @@ public void testClientToAMTokenIdentifier() throws IOException { @Test public void testContainerTokenIdentifier() throws IOException { - ContainerId containerID = ContainerId.newInstance( + ContainerId containerID = ContainerId.newContainerId( ApplicationAttemptId.newInstance(ApplicationId.newInstance( 1, 1), 1), 1); String hostName = "host0"; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java index 1708da250f901..de4051a494c4f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java @@ -58,7 +58,7 @@ protected void writeApplicationAttemptStartData( ApplicationAttemptId appAttemptId) throws IOException { store.applicationAttemptStarted(ApplicationAttemptStartData.newInstance( appAttemptId, appAttemptId.toString(), 0, - ContainerId.newInstance(appAttemptId, 1))); + ContainerId.newContainerId(appAttemptId, 1))); } protected void writeApplicationAttemptFinishData( diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java index 60027e9283a83..7c2593d9e0aca 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java @@ -142,7 +142,7 @@ public void testContainerReport() throws IOException, YarnException { ApplicationId appId = ApplicationId.newInstance(0, 1); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); - ContainerId containerId = ContainerId.newInstance(appAttemptId, 1); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1); GetContainerReportRequest request = GetContainerReportRequest.newInstance(containerId); GetContainerReportResponse response = @@ -160,8 +160,8 @@ public void testContainers() throws IOException, YarnException { ApplicationId appId = ApplicationId.newInstance(0, 1); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); - ContainerId containerId = ContainerId.newInstance(appAttemptId, 1); - ContainerId containerId1 = ContainerId.newInstance(appAttemptId, 2); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1); + ContainerId containerId1 = ContainerId.newContainerId(appAttemptId, 2); GetContainersRequest request = GetContainersRequest.newInstance(appAttemptId); GetContainersResponse response = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryManagerOnTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryManagerOnTimelineStore.java index 856b88d28e65c..a093f19f9eaf1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryManagerOnTimelineStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryManagerOnTimelineStore.java @@ -141,7 +141,7 @@ private static void prepareTimelineStore(TimelineStore store, int scale) store.put(entities); for (int k = 1; k <= scale; ++k) { entities = new TimelineEntities(); - ContainerId containerId = ContainerId.newInstance(appAttemptId, k); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, k); entities.addEntity(createContainerEntity(containerId)); store.put(entities); } @@ -238,7 +238,7 @@ public ApplicationAttemptReport run() throws Exception { } Assert.assertNotNull(appAttempt); Assert.assertEquals(appAttemptId, appAttempt.getApplicationAttemptId()); - Assert.assertEquals(ContainerId.newInstance(appAttemptId, 1), + Assert.assertEquals(ContainerId.newContainerId(appAttemptId, 1), appAttempt.getAMContainerId()); Assert.assertEquals("test host", appAttempt.getHost()); Assert.assertEquals(100, appAttempt.getRpcPort()); @@ -253,7 +253,7 @@ public ApplicationAttemptReport run() throws Exception { @Test public void testGetContainerReport() throws Exception { final ContainerId containerId = - ContainerId.newInstance(ApplicationAttemptId.newInstance( + ContainerId.newContainerId(ApplicationAttemptId.newInstance( ApplicationId.newInstance(0, 1), 1), 1); ContainerReport container; if (callerUGI == null) { @@ -466,7 +466,7 @@ private static TimelineEntity createAppAttemptTimelineEntity( eventInfo.put(AppAttemptMetricsConstants.HOST_EVENT_INFO, "test host"); eventInfo.put(AppAttemptMetricsConstants.RPC_PORT_EVENT_INFO, 100); eventInfo.put(AppAttemptMetricsConstants.MASTER_CONTAINER_EVENT_INFO, - ContainerId.newInstance(appAttemptId, 1)); + ContainerId.newContainerId(appAttemptId, 1)); tEvent.setEventInfo(eventInfo); entity.addEvent(tEvent); tEvent = new TimelineEvent(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java index 3a75d9e275733..c91d9f5a6d5ad 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java @@ -121,7 +121,7 @@ private void testWriteHistoryData( } // write container history data for (int k = 1; k <= num; ++k) { - ContainerId containerId = ContainerId.newInstance(appAttemptId, k); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, k); writeContainerStartData(containerId); if (missingContainer && k == num) { continue; @@ -172,7 +172,7 @@ private void testReadHistoryData( // read container history data Assert.assertEquals(num, store.getContainers(appAttemptId).size()); for (int k = 1; k <= num; ++k) { - ContainerId containerId = ContainerId.newInstance(appAttemptId, k); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, k); ContainerHistoryData containerData = store.getContainer(containerId); Assert.assertNotNull(containerData); Assert.assertEquals(Priority.newInstance(containerId.getId()), @@ -187,7 +187,7 @@ private void testReadHistoryData( ContainerHistoryData masterContainer = store.getAMContainer(appAttemptId); Assert.assertNotNull(masterContainer); - Assert.assertEquals(ContainerId.newInstance(appAttemptId, 1), + Assert.assertEquals(ContainerId.newContainerId(appAttemptId, 1), masterContainer.getContainerId()); } } @@ -215,7 +215,7 @@ public void testWriteAfterApplicationFinish() throws IOException { Assert.assertTrue(e.getMessage().contains("is not opened")); } // write container history data - ContainerId containerId = ContainerId.newInstance(appAttemptId, 1); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1); try { writeContainerStartData(containerId); Assert.fail(); @@ -240,7 +240,7 @@ public void testMassiveWriteContainerHistoryData() throws IOException { ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); for (int i = 1; i <= 100000; ++i) { - ContainerId containerId = ContainerId.newInstance(appAttemptId, i); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, i); writeContainerStartData(containerId); writeContainerFinishData(containerId); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java index 6e9e242637a17..556db2beaf4b8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java @@ -137,7 +137,7 @@ public void testReadWriteContainerHistory() throws Exception { ApplicationId appId = ApplicationId.newInstance(0, 1); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); - ContainerId containerId = ContainerId.newInstance(appAttemptId, 1); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1); try { writeContainerFinishData(containerId); Assert.fail(); @@ -149,14 +149,14 @@ public void testReadWriteContainerHistory() throws Exception { writeApplicationAttemptStartData(appAttemptId); int numContainers = 5; for (int i = 1; i <= numContainers; ++i) { - containerId = ContainerId.newInstance(appAttemptId, i); + containerId = ContainerId.newContainerId(appAttemptId, i); writeContainerStartData(containerId); writeContainerFinishData(containerId); } Assert .assertEquals(numContainers, store.getContainers(appAttemptId).size()); for (int i = 1; i <= numContainers; ++i) { - containerId = ContainerId.newInstance(appAttemptId, i); + containerId = ContainerId.newContainerId(appAttemptId, i); ContainerHistoryData data = store.getContainer(containerId); Assert.assertNotNull(data); Assert.assertEquals(Priority.newInstance(containerId.getId()), @@ -165,11 +165,11 @@ public void testReadWriteContainerHistory() throws Exception { } ContainerHistoryData masterContainer = store.getAMContainer(appAttemptId); Assert.assertNotNull(masterContainer); - Assert.assertEquals(ContainerId.newInstance(appAttemptId, 1), + Assert.assertEquals(ContainerId.newContainerId(appAttemptId, 1), masterContainer.getContainerId()); writeApplicationAttemptFinishData(appAttemptId); // Write again - containerId = ContainerId.newInstance(appAttemptId, 1); + containerId = ContainerId.newContainerId(appAttemptId, 1); try { writeContainerStartData(containerId); Assert.fail(); @@ -194,7 +194,7 @@ public void testMassiveWriteContainerHistory() throws IOException { ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); for (int i = 1; i <= numContainers; ++i) { - ContainerId containerId = ContainerId.newInstance(appAttemptId, i); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, i); writeContainerStartData(containerId); writeContainerFinishData(containerId); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebApp.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebApp.java index 82c42766b6694..7bac6f265c256 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebApp.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebApp.java @@ -134,7 +134,7 @@ public void testContainerPage() throws Exception { containerPageInstance.set( YarnWebParams.CONTAINER_ID, ContainerId - .newInstance( + .newContainerId( ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 1), 1), 1).toString()); containerPageInstance.render(); @@ -153,7 +153,7 @@ ApplicationHistoryManager mockApplicationHistoryManager(int numApps, ApplicationAttemptId.newInstance(appId, j); writeApplicationAttemptStartData(appAttemptId); for (int k = 1; k <= numContainers; ++k) { - ContainerId containerId = ContainerId.newInstance(appAttemptId, k); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, k); writeContainerStartData(containerId); writeContainerFinishData(containerId); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java index da39ce3928bd0..76bf8c3c75594 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java @@ -338,7 +338,7 @@ public void testSingleContainer() throws Exception { ApplicationId appId = ApplicationId.newInstance(0, 1); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); - ContainerId containerId = ContainerId.newInstance(appAttemptId, 1); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1); WebResource r = resource(); ClientResponse response = r.path("ws").path("v1").path("applicationhistory").path("apps") diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java index 8f042a87aa36c..a7e5d9cd82081 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java @@ -139,7 +139,7 @@ public static ApplicationId convert(long clustertimestamp, CharSequence id) { public static ContainerId newContainerId(ApplicationAttemptId appAttemptId, long containerId) { - return ContainerId.newInstance(appAttemptId, containerId); + return ContainerId.newContainerId(appAttemptId, containerId); } public static ContainerId newContainerId(int appId, int appAttemptId, @@ -164,7 +164,7 @@ public static Token newContainerToken(ContainerId cId, String host, public static ContainerId newContainerId(RecordFactory recordFactory, ApplicationId appId, ApplicationAttemptId appAttemptId, int containerId) { - return ContainerId.newInstance(appAttemptId, containerId); + return ContainerId.newContainerId(appAttemptId, containerId); } public static NodeId newNodeId(String host, int port) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestYarnServerApiClasses.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestYarnServerApiClasses.java index da25aa275a543..20983b6109ffb 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestYarnServerApiClasses.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestYarnServerApiClasses.java @@ -223,7 +223,7 @@ private ApplicationAttemptId getApplicationAttemptId(int appAttemptId) { } private ContainerId getContainerId(int containerID, int appAttemptId) { - ContainerId containerId = ContainerIdPBImpl.newInstance( + ContainerId containerId = ContainerIdPBImpl.newContainerId( getApplicationAttemptId(appAttemptId), containerID); return containerId; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/api/protocolrecords/TestProtocolRecords.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/api/protocolrecords/TestProtocolRecords.java index ed902baa7ead2..86e49f05e1de5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/api/protocolrecords/TestProtocolRecords.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/api/protocolrecords/TestProtocolRecords.java @@ -51,7 +51,7 @@ public class TestProtocolRecords { public void testNMContainerStatus() { ApplicationId appId = ApplicationId.newInstance(123456789, 1); ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(appId, 1); - ContainerId containerId = ContainerId.newInstance(attemptId, 1); + ContainerId containerId = ContainerId.newContainerId(attemptId, 1); Resource resource = Resource.newInstance(1000, 200); NMContainerStatus report = @@ -76,7 +76,7 @@ public void testNMContainerStatus() { public void testRegisterNodeManagerRequest() { ApplicationId appId = ApplicationId.newInstance(123456789, 1); ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(appId, 1); - ContainerId containerId = ContainerId.newInstance(attemptId, 1); + ContainerId containerId = ContainerId.newContainerId(attemptId, 1); NMContainerStatus containerReport = NMContainerStatus.newInstance(containerId, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/api/protocolrecords/TestRegisterNodeManagerRequest.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/api/protocolrecords/TestRegisterNodeManagerRequest.java index fdacd924d9b1c..947dec19745f1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/api/protocolrecords/TestRegisterNodeManagerRequest.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/api/protocolrecords/TestRegisterNodeManagerRequest.java @@ -38,7 +38,7 @@ public void testRegisterNodeManagerRequest() { RegisterNodeManagerRequest.newInstance( NodeId.newInstance("host", 1234), 1234, Resource.newInstance(0, 0), "version", Arrays.asList(NMContainerStatus.newInstance( - ContainerId.newInstance( + ContainerId.newContainerId( ApplicationAttemptId.newInstance( ApplicationId.newInstance(1234L, 1), 1), 1), ContainerState.RUNNING, Resource.newInstance(1024, 1), "good", -1, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java index fabb03bf3f6fb..d2caefe88fed7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java @@ -139,7 +139,7 @@ public long getRMIdentifier() { ApplicationId applicationId = ApplicationId.newInstance(0, 0); ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.newInstance(applicationId, 0); - ContainerId cID = ContainerId.newInstance(applicationAttemptId, 0); + ContainerId cID = ContainerId.newContainerId(applicationAttemptId, 0); String user = "testing"; StartContainerRequest scRequest = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutor.java index ff477a38ec704..f837bbc72d293 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutor.java @@ -431,7 +431,7 @@ public void testPostExecuteAfterReacquisition() throws Exception { ApplicationId appId = ApplicationId.newInstance(12345, 67890); ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(appId, 54321); - ContainerId cid = ContainerId.newInstance(attemptId, 9876); + ContainerId cid = ContainerId.newContainerId(attemptId, 9876); Configuration conf = new YarnConfiguration(); conf.setClass(YarnConfiguration.NM_LINUX_CONTAINER_RESOURCES_HANDLER, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java index e9aea0ef6c43d..41c16a9d8fc0d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java @@ -290,7 +290,7 @@ private ContainerId createContainerId() { ApplicationId appId = ApplicationId.newInstance(0, 0); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); - ContainerId containerId = ContainerId.newInstance(appAttemptId, 0); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, 0); return containerId; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java index 85bafb3dee585..a58294fe48156 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java @@ -592,7 +592,7 @@ public static NMContainerStatus createNMContainerStatus(int id, ApplicationId applicationId = ApplicationId.newInstance(0, 1); ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.newInstance(applicationId, 1); - ContainerId containerId = ContainerId.newInstance(applicationAttemptId, id); + ContainerId containerId = ContainerId.newContainerId(applicationAttemptId, id); NMContainerStatus containerReport = NMContainerStatus.newInstance(containerId, containerState, Resource.newInstance(1024, 1), "recover container", 0, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java index 11575b8373399..c079006ac10aa 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java @@ -260,7 +260,7 @@ public static ContainerId createContainerId() { ApplicationId appId = ApplicationId.newInstance(0, 0); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); - ContainerId containerId = ContainerId.newInstance(appAttemptId, 0); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, 0); return containerId; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java index 925a249ed1cbc..b34262b461d78 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java @@ -224,7 +224,7 @@ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) ApplicationAttemptId appAttemptID = ApplicationAttemptId.newInstance(appId1, 0); ContainerId firstContainerID = - ContainerId.newInstance(appAttemptID, heartBeatID); + ContainerId.newContainerId(appAttemptID, heartBeatID); ContainerLaunchContext launchContext = recordFactory .newRecordInstance(ContainerLaunchContext.class); Resource resource = BuilderUtils.newResource(2, 1); @@ -254,7 +254,7 @@ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) ApplicationAttemptId appAttemptID = ApplicationAttemptId.newInstance(appId2, 0); ContainerId secondContainerID = - ContainerId.newInstance(appAttemptID, heartBeatID); + ContainerId.newContainerId(appAttemptID, heartBeatID); ContainerLaunchContext launchContext = recordFactory .newRecordInstance(ContainerLaunchContext.class); long currentTime = System.currentTimeMillis(); @@ -818,7 +818,7 @@ public void testRecentlyFinishedContainers() throws Exception { ApplicationId appId = ApplicationId.newInstance(0, 0); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 0); - ContainerId cId = ContainerId.newInstance(appAttemptId, 0); + ContainerId cId = ContainerId.newContainerId(appAttemptId, 0); nm.getNMContext().getApplications().putIfAbsent(appId, mock(Application.class)); nm.getNMContext().getContainers().putIfAbsent(cId, mock(Container.class)); @@ -855,7 +855,7 @@ public void testRemovePreviousCompletedContainersFromContext() throws Exception ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 0); - ContainerId cId = ContainerId.newInstance(appAttemptId, 1); + ContainerId cId = ContainerId.newContainerId(appAttemptId, 1); Token containerToken = BuilderUtils.newContainerToken(cId, "anyHost", 1234, "anyUser", BuilderUtils.newResource(1024, 1), 0, 123, @@ -876,7 +876,7 @@ public org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Cont }; ContainerId runningContainerId = - ContainerId.newInstance(appAttemptId, 3); + ContainerId.newContainerId(appAttemptId, 3); Token runningContainerToken = BuilderUtils.newContainerToken(runningContainerId, "anyHost", 1234, "anyUser", BuilderUtils.newResource(1024, 1), 0, 123, @@ -936,7 +936,7 @@ public void testCleanedupApplicationContainerCleanup() throws IOException { ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 0); - ContainerId cId = ContainerId.newInstance(appAttemptId, 1); + ContainerId cId = ContainerId.newContainerId(appAttemptId, 1); Token containerToken = BuilderUtils.newContainerToken(cId, "anyHost", 1234, "anyUser", BuilderUtils.newResource(1024, 1), 0, 123, @@ -1494,7 +1494,7 @@ public static ContainerStatus createContainerStatus(int id, ApplicationId applicationId = ApplicationId.newInstance(0, 1); ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.newInstance(applicationId, 1); - ContainerId contaierId = ContainerId.newInstance(applicationAttemptId, id); + ContainerId contaierId = ContainerId.newContainerId(applicationAttemptId, id); ContainerStatus containerStatus = BuilderUtils.newContainerStatus(contaierId, containerState, "test_containerStatus: id=" + id + ", containerState: " diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestAuxServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestAuxServices.java index 59cc947e3b5d0..757cdc8f3ee63 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestAuxServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestAuxServices.java @@ -189,7 +189,7 @@ public void testAuxEventDispatch() { ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(appId1, 1); ContainerTokenIdentifier cti = new ContainerTokenIdentifier( - ContainerId.newInstance(attemptId, 1), "", "", + ContainerId.newContainerId(attemptId, 1), "", "", Resource.newInstance(1, 1), 0,0,0, Priority.newInstance(0), 0); Container container = new ContainerImpl(null, null, null, null, null, null, cti); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java index 45d9925a61c87..86cc4dcedeb8b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java @@ -104,7 +104,7 @@ private ContainerId createContainerId(int id) { ApplicationId appId = ApplicationId.newInstance(0, 0); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); - ContainerId containerId = ContainerId.newInstance(appAttemptId, id); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, id); return containerId; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManagerRecovery.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManagerRecovery.java index 007fc36fcde75..a73d58341bbdf 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManagerRecovery.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManagerRecovery.java @@ -111,7 +111,7 @@ public void testApplicationRecovery() throws Exception { ApplicationId appId = ApplicationId.newInstance(0, 1); ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(appId, 1); - ContainerId cid = ContainerId.newInstance(attemptId, 1); + ContainerId cid = ContainerId.newContainerId(attemptId, 1); Map<String, LocalResource> localResources = Collections.emptyMap(); Map<String, String> containerEnv = Collections.emptyMap(); List<String> containerCmds = Collections.emptyList(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java index 001643b434ed9..cbc41c411ed3e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java @@ -385,7 +385,7 @@ public void testContainerEnvVariables() throws Exception { ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); - ContainerId cId = ContainerId.newInstance(appAttemptId, 0); + ContainerId cId = ContainerId.newContainerId(appAttemptId, 0); Map<String, String> userSetEnv = new HashMap<String, String>(); userSetEnv.put(Environment.CONTAINER_ID.name(), "user_set_container_id"); userSetEnv.put(Environment.NM_HOST.name(), "user_set_NM_HOST"); @@ -634,7 +634,7 @@ private void internalKillTest(boolean delayed) throws Exception { ApplicationId appId = ApplicationId.newInstance(1, 1); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); - ContainerId cId = ContainerId.newInstance(appAttemptId, 0); + ContainerId cId = ContainerId.newContainerId(appAttemptId, 0); File processStartFile = new File(tmpDir, "pid.txt").getAbsoluteFile(); @@ -771,7 +771,7 @@ public void testImmediateKill() throws Exception { @Test (timeout = 10000) public void testCallFailureWithNullLocalizedResources() { Container container = mock(Container.class); - when(container.getContainerId()).thenReturn(ContainerId.newInstance( + when(container.getContainerId()).thenReturn(ContainerId.newContainerId( ApplicationAttemptId.newInstance(ApplicationId.newInstance( System.currentTimeMillis(), 1), 1), 1)); ContainerLaunchContext clc = mock(ContainerLaunchContext.class); @@ -980,7 +980,7 @@ public void testKillProcessGroup() throws Exception { ApplicationId appId = ApplicationId.newInstance(2, 2); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); - ContainerId cId = ContainerId.newInstance(appAttemptId, 0); + ContainerId cId = ContainerId.newContainerId(appAttemptId, 0); File processStartFile = new File(tmpDir, "pid.txt").getAbsoluteFile(); File childProcessStartFile = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java index 99d722f9c7a38..1f2d0677c5f7a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java @@ -206,7 +206,7 @@ public void testContainerKillOnMemoryOverflow() throws IOException, // ////// Construct the Container-id ApplicationId appId = ApplicationId.newInstance(0, 0); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); - ContainerId cId = ContainerId.newInstance(appAttemptId, 0); + ContainerId cId = ContainerId.newContainerId(appAttemptId, 0); int port = 12345; URL resource_alpha = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/recovery/TestNMLeveldbStateStoreService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/recovery/TestNMLeveldbStateStoreService.java index db377f5f0c7ca..438cec3a793a7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/recovery/TestNMLeveldbStateStoreService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/recovery/TestNMLeveldbStateStoreService.java @@ -226,7 +226,7 @@ public void testContainerStorage() throws IOException { ApplicationId appId = ApplicationId.newInstance(1234, 3); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 4); - ContainerId containerId = ContainerId.newInstance(appAttemptId, 5); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, 5); LocalResource lrsrc = LocalResource.newInstance( URL.newInstance("hdfs", "somehost", 12345, "/some/path/to/rsrc"), LocalResourceType.FILE, LocalResourceVisibility.APPLICATION, 123L, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationCleanup.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationCleanup.java index c07882da0ad8f..891130f0f7616 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationCleanup.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationCleanup.java @@ -378,7 +378,7 @@ public void testAppCleanupWhenRMRestartedBeforeAppFinished() throws Exception { // nm1/nm2 register to rm2, and do a heartbeat nm1.setResourceTrackerService(rm2.getResourceTrackerService()); nm1.registerNode(Arrays.asList(NMContainerStatus.newInstance( - ContainerId.newInstance(am0.getApplicationAttemptId(), 1), + ContainerId.newContainerId(am0.getApplicationAttemptId(), 1), ContainerState.COMPLETE, Resource.newInstance(1024, 1), "", 0, Priority.newInstance(0), 1234)), Arrays.asList(app0.getApplicationId())); nm2.setResourceTrackerService(rm2.getResourceTrackerService()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java index 5652b6ea68295..15aca428268e3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java @@ -404,7 +404,7 @@ public void testGetContainerReport() throws YarnException, IOException { .newRecordInstance(GetContainerReportRequest.class); ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance( ApplicationId.newInstance(123456, 1), 1); - ContainerId containerId = ContainerId.newInstance(attemptId, 1); + ContainerId containerId = ContainerId.newContainerId(attemptId, 1); request.setContainerId(containerId); try { @@ -425,7 +425,7 @@ public void testGetContainers() throws YarnException, IOException { .newRecordInstance(GetContainersRequest.class); ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance( ApplicationId.newInstance(123456, 1), 1); - ContainerId containerId = ContainerId.newInstance(attemptId, 1); + ContainerId containerId = ContainerId.newContainerId(attemptId, 1); request.setApplicationAttemptId(attemptId); try { GetContainersResponse response = rmService.getContainers(request); @@ -1213,7 +1213,7 @@ public ApplicationReport createAndGetApplicationReport( RMAppAttemptImpl rmAppAttemptImpl = spy(new RMAppAttemptImpl(attemptId, rmContext, yarnScheduler, null, asContext, config, false, null)); Container container = Container.newInstance( - ContainerId.newInstance(attemptId, 1), null, "", null, null, null); + ContainerId.newContainerId(attemptId, 1), null, "", null, null, null); RMContainerImpl containerimpl = spy(new RMContainerImpl(container, attemptId, null, "", rmContext)); Map<ApplicationAttemptId, RMAppAttempt> attempts = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestContainerResourceUsage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestContainerResourceUsage.java index c200df46c70fd..b9397bf070f20 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestContainerResourceUsage.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestContainerResourceUsage.java @@ -207,7 +207,7 @@ public void testUsageWithMultipleContainersAndRMRestart() throws Exception { // usage metrics. This will cause the attempt to fail, and, since the max // attempt retries is 1, the app will also fail. This is intentional so // that all containers will complete prior to saving. - ContainerId cId = ContainerId.newInstance(attempt0.getAppAttemptId(), 1); + ContainerId cId = ContainerId.newContainerId(attempt0.getAppAttemptId(), 1); nm.nodeHeartbeat(attempt0.getAppAttemptId(), cId.getContainerId(), ContainerState.COMPLETE); rm0.waitForState(nm, cId, RMContainerState.COMPLETED); @@ -289,7 +289,7 @@ private void amRestartTests(boolean keepRunningContainers) // launch the 2nd container. ContainerId containerId2 = - ContainerId.newInstance(am0.getApplicationAttemptId(), 2); + ContainerId.newContainerId(am0.getApplicationAttemptId(), 2); nm.nodeHeartbeat(am0.getApplicationAttemptId(), containerId2.getContainerId(), ContainerState.RUNNING); rm.waitForState(nm, containerId2, RMContainerState.RUNNING); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java index a9683f13dfc13..a0f86272b782d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java @@ -1963,7 +1963,7 @@ private void writeToHostsFile(String... hosts) throws IOException { public static NMContainerStatus createNMContainerStatus( ApplicationAttemptId appAttemptId, int id, ContainerState containerState) { - ContainerId containerId = ContainerId.newInstance(appAttemptId, id); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, id); NMContainerStatus containerReport = NMContainerStatus.newInstance(containerId, containerState, Resource.newInstance(1024, 1), "recover container", 0, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestResourceTrackerService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestResourceTrackerService.java index 28d1d6383d37a..7c128481c73b8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestResourceTrackerService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestResourceTrackerService.java @@ -510,7 +510,7 @@ public void testHandleContainerStatusInvalidCompletions() throws Exception { // Case 1.1: AppAttemptId is null NMContainerStatus report = NMContainerStatus.newInstance( - ContainerId.newInstance( + ContainerId.newContainerId( ApplicationAttemptId.newInstance(app.getApplicationId(), 2), 1), ContainerState.COMPLETE, Resource.newInstance(1024, 1), "Dummy Completed", 0, Priority.newInstance(10), 1234); @@ -522,7 +522,7 @@ public void testHandleContainerStatusInvalidCompletions() throws Exception { (RMAppAttemptImpl) app.getCurrentAppAttempt(); currentAttempt.setMasterContainer(null); report = NMContainerStatus.newInstance( - ContainerId.newInstance(currentAttempt.getAppAttemptId(), 0), + ContainerId.newContainerId(currentAttempt.getAppAttemptId(), 0), ContainerState.COMPLETE, Resource.newInstance(1024, 1), "Dummy Completed", 0, Priority.newInstance(10), 1234); rm.getResourceTrackerService().handleNMContainerStatus(report, null); @@ -533,7 +533,7 @@ public void testHandleContainerStatusInvalidCompletions() throws Exception { // Case 2.1: AppAttemptId is null report = NMContainerStatus.newInstance( - ContainerId.newInstance( + ContainerId.newContainerId( ApplicationAttemptId.newInstance(app.getApplicationId(), 2), 1), ContainerState.COMPLETE, Resource.newInstance(1024, 1), "Dummy Completed", 0, Priority.newInstance(10), 1234); @@ -549,7 +549,7 @@ public void testHandleContainerStatusInvalidCompletions() throws Exception { (RMAppAttemptImpl) app.getCurrentAppAttempt(); currentAttempt.setMasterContainer(null); report = NMContainerStatus.newInstance( - ContainerId.newInstance(currentAttempt.getAppAttemptId(), 0), + ContainerId.newContainerId(currentAttempt.getAppAttemptId(), 0), ContainerState.COMPLETE, Resource.newInstance(1024, 1), "Dummy Completed", 0, Priority.newInstance(10), 1234); try { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestWorkPreservingRMRestart.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestWorkPreservingRMRestart.java index 536dbd77d318c..2f0a839e9e95c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestWorkPreservingRMRestart.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestWorkPreservingRMRestart.java @@ -849,7 +849,7 @@ public void testReleasedContainerNotRecovered() throws Exception { // try to release a container before the container is actually recovered. final ContainerId runningContainer = - ContainerId.newInstance(am1.getApplicationAttemptId(), 2); + ContainerId.newContainerId(am1.getApplicationAttemptId(), 2); am1.allocate(null, Arrays.asList(runningContainer)); // send container statuses to recover the containers diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/TestRMApplicationHistoryWriter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/TestRMApplicationHistoryWriter.java index 78077d4fa3416..f827bf4285d69 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/TestRMApplicationHistoryWriter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/TestRMApplicationHistoryWriter.java @@ -153,7 +153,7 @@ private static RMAppAttempt createRMAppAttempt( when(appAttempt.getRpcPort()).thenReturn(-100); Container container = mock(Container.class); when(container.getId()) - .thenReturn(ContainerId.newInstance(appAttemptId, 1)); + .thenReturn(ContainerId.newContainerId(appAttemptId, 1)); when(appAttempt.getMasterContainer()).thenReturn(container); when(appAttempt.getDiagnostics()).thenReturn("test diagnostics info"); when(appAttempt.getTrackingUrl()).thenReturn("test url"); @@ -254,7 +254,7 @@ public void testWriteApplicationAttempt() throws Exception { Assert.assertNotNull(appAttemptHD); Assert.assertEquals("test host", appAttemptHD.getHost()); Assert.assertEquals(-100, appAttemptHD.getRPCPort()); - Assert.assertEquals(ContainerId.newInstance( + Assert.assertEquals(ContainerId.newContainerId( ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 1), 1), 1), appAttemptHD.getMasterContainerId()); @@ -281,14 +281,14 @@ public void testWriteApplicationAttempt() throws Exception { @Test public void testWriteContainer() throws Exception { RMContainer container = - createRMContainer(ContainerId.newInstance( + createRMContainer(ContainerId.newContainerId( ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 1), 1), 1)); writer.containerStarted(container); ContainerHistoryData containerHD = null; for (int i = 0; i < MAX_RETRIES; ++i) { containerHD = - store.getContainer(ContainerId.newInstance(ApplicationAttemptId + store.getContainer(ContainerId.newContainerId(ApplicationAttemptId .newInstance(ApplicationId.newInstance(0, 1), 1), 1)); if (containerHD != null) { break; @@ -307,7 +307,7 @@ public void testWriteContainer() throws Exception { writer.containerFinished(container); for (int i = 0; i < MAX_RETRIES; ++i) { containerHD = - store.getContainer(ContainerId.newInstance(ApplicationAttemptId + store.getContainer(ContainerId.newContainerId(ApplicationAttemptId .newInstance(ApplicationId.newInstance(0, 1), 1), 1)); if (containerHD.getContainerState() != null) { break; @@ -337,7 +337,7 @@ public void testParallelWrite() throws Exception { RMAppAttempt appAttempt = createRMAppAttempt(appAttemptId); writer.applicationAttemptStarted(appAttempt); for (int k = 1; k <= 10; ++k) { - ContainerId containerId = ContainerId.newInstance(appAttemptId, k); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, k); RMContainer container = createRMContainer(containerId); writer.containerStarted(container); writer.containerFinished(container); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/MockAsm.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/MockAsm.java index 800f65baaae68..62e3e5c8b9d0d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/MockAsm.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/MockAsm.java @@ -189,7 +189,7 @@ public static RMApp newApplication(int i) { final ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(newAppID(i), 0); final Container masterContainer = Records.newRecord(Container.class); - ContainerId containerId = ContainerId.newInstance(appAttemptId, 0); + ContainerId containerId = ContainerId.newContainerId(appAttemptId, 0); masterContainer.setId(containerId); masterContainer.setNodeHttpAddress("node:port"); final String user = newUserName(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/TestAMRestart.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/TestAMRestart.java index fcb4e450b0890..a93123e91871d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/TestAMRestart.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/TestAMRestart.java @@ -101,20 +101,20 @@ public void testAMRestartWithExistingContainers() throws Exception { // launch the 2nd container, for testing running container transferred. nm1.nodeHeartbeat(am1.getApplicationAttemptId(), 2, ContainerState.RUNNING); ContainerId containerId2 = - ContainerId.newInstance(am1.getApplicationAttemptId(), 2); + ContainerId.newContainerId(am1.getApplicationAttemptId(), 2); rm1.waitForState(nm1, containerId2, RMContainerState.RUNNING); // launch the 3rd container, for testing container allocated by previous // attempt is completed by the next new attempt/ nm1.nodeHeartbeat(am1.getApplicationAttemptId(), 3, ContainerState.RUNNING); ContainerId containerId3 = - ContainerId.newInstance(am1.getApplicationAttemptId(), 3); + ContainerId.newContainerId(am1.getApplicationAttemptId(), 3); rm1.waitForState(nm1, containerId3, RMContainerState.RUNNING); // 4th container still in AQUIRED state. for testing Acquired container is // always killed. ContainerId containerId4 = - ContainerId.newInstance(am1.getApplicationAttemptId(), 4); + ContainerId.newContainerId(am1.getApplicationAttemptId(), 4); rm1.waitForState(nm1, containerId4, RMContainerState.ACQUIRED); // 5th container is in Allocated state. for testing allocated container is @@ -122,14 +122,14 @@ public void testAMRestartWithExistingContainers() throws Exception { am1.allocate("127.0.0.1", 1024, 1, new ArrayList<ContainerId>()); nm1.nodeHeartbeat(true); ContainerId containerId5 = - ContainerId.newInstance(am1.getApplicationAttemptId(), 5); + ContainerId.newContainerId(am1.getApplicationAttemptId(), 5); rm1.waitForContainerAllocated(nm1, containerId5); rm1.waitForState(nm1, containerId5, RMContainerState.ALLOCATED); // 6th container is in Reserved state. am1.allocate("127.0.0.1", 6000, 1, new ArrayList<ContainerId>()); ContainerId containerId6 = - ContainerId.newInstance(am1.getApplicationAttemptId(), 6); + ContainerId.newContainerId(am1.getApplicationAttemptId(), 6); nm1.nodeHeartbeat(true); SchedulerApplicationAttempt schedulerAttempt = ((AbstractYarnScheduler) rm1.getResourceScheduler()) @@ -295,12 +295,12 @@ public void testNMTokensRebindOnAMRestart() throws Exception { // launch the container-2 nm1.nodeHeartbeat(am1.getApplicationAttemptId(), 2, ContainerState.RUNNING); ContainerId containerId2 = - ContainerId.newInstance(am1.getApplicationAttemptId(), 2); + ContainerId.newContainerId(am1.getApplicationAttemptId(), 2); rm1.waitForState(nm1, containerId2, RMContainerState.RUNNING); // launch the container-3 nm1.nodeHeartbeat(am1.getApplicationAttemptId(), 3, ContainerState.RUNNING); ContainerId containerId3 = - ContainerId.newInstance(am1.getApplicationAttemptId(), 3); + ContainerId.newContainerId(am1.getApplicationAttemptId(), 3); rm1.waitForState(nm1, containerId3, RMContainerState.RUNNING); // fail am1 @@ -335,7 +335,7 @@ public void testNMTokensRebindOnAMRestart() throws Exception { } nm1.nodeHeartbeat(am2.getApplicationAttemptId(), 2, ContainerState.RUNNING); ContainerId am2ContainerId2 = - ContainerId.newInstance(am2.getApplicationAttemptId(), 2); + ContainerId.newContainerId(am2.getApplicationAttemptId(), 2); rm1.waitForState(nm1, am2ContainerId2, RMContainerState.RUNNING); // fail am2. @@ -379,7 +379,7 @@ public void testShouldNotCountFailureToMaxAttemptRetry() throws Exception { CapacityScheduler scheduler = (CapacityScheduler) rm1.getResourceScheduler(); ContainerId amContainer = - ContainerId.newInstance(am1.getApplicationAttemptId(), 1); + ContainerId.newContainerId(am1.getApplicationAttemptId(), 1); // Preempt the first attempt; scheduler.killContainer(scheduler.getRMContainer(amContainer)); @@ -396,7 +396,7 @@ public void testShouldNotCountFailureToMaxAttemptRetry() throws Exception { // Preempt the second attempt. ContainerId amContainer2 = - ContainerId.newInstance(am2.getApplicationAttemptId(), 1); + ContainerId.newContainerId(am2.getApplicationAttemptId(), 1); scheduler.killContainer(scheduler.getRMContainer(amContainer2)); am2.waitForState(RMAppAttemptState.FAILED); @@ -487,7 +487,7 @@ public void testPreemptedAMRestartOnRMRestart() throws Exception { CapacityScheduler scheduler = (CapacityScheduler) rm1.getResourceScheduler(); ContainerId amContainer = - ContainerId.newInstance(am1.getApplicationAttemptId(), 1); + ContainerId.newContainerId(am1.getApplicationAttemptId(), 1); // Forcibly preempt the am container; scheduler.killContainer(scheduler.getRMContainer(amContainer)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TestSystemMetricsPublisher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TestSystemMetricsPublisher.java index bc509a0e2508f..65c8547218097 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TestSystemMetricsPublisher.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TestSystemMetricsPublisher.java @@ -250,7 +250,7 @@ public void testPublishAppAttemptMetrics() throws Exception { @Test(timeout = 10000) public void testPublishContainerMetrics() throws Exception { ContainerId containerId = - ContainerId.newInstance(ApplicationAttemptId.newInstance( + ContainerId.newContainerId(ApplicationAttemptId.newInstance( ApplicationId.newInstance(0, 1), 1), 1); RMContainer container = createRMContainer(containerId); metricsPublisher.containerCreated(container, container.getCreationTime()); @@ -347,7 +347,7 @@ private static RMAppAttempt createRMAppAttempt( when(appAttempt.getRpcPort()).thenReturn(-100); Container container = mock(Container.class); when(container.getId()) - .thenReturn(ContainerId.newInstance(appAttemptId, 1)); + .thenReturn(ContainerId.newContainerId(appAttemptId, 1)); when(appAttempt.getMasterContainer()).thenReturn(container); when(appAttempt.getDiagnostics()).thenReturn("test diagnostics info"); when(appAttempt.getTrackingUrl()).thenReturn("test tracking url"); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/monitor/capacity/TestProportionalCapacityPreemptionPolicy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/monitor/capacity/TestProportionalCapacityPreemptionPolicy.java index a0c2b01607ba7..24e70bb4c9e08 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/monitor/capacity/TestProportionalCapacityPreemptionPolicy.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/monitor/capacity/TestProportionalCapacityPreemptionPolicy.java @@ -728,7 +728,7 @@ FiCaSchedulerApp mockApp(int qid, int id, int used, int pending, int reserved, RMContainer mockContainer(ApplicationAttemptId appAttId, int id, Resource r, int priority) { - ContainerId cId = ContainerId.newInstance(appAttId, id); + ContainerId cId = ContainerId.newContainerId(appAttId, id); Container c = mock(Container.class); when(c.getResource()).thenReturn(r); when(c.getPriority()).thenReturn(Priority.create(priority)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/TestRMAppAttemptTransitions.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/TestRMAppAttemptTransitions.java index e5daf6fd320ad..2b5c2b882b9ed 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/TestRMAppAttemptTransitions.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/TestRMAppAttemptTransitions.java @@ -1395,7 +1395,7 @@ public void testFailedToFailed() { // failed attempt captured the container finished event. assertEquals(0, applicationAttempt.getJustFinishedContainers().size()); ContainerStatus cs2 = - ContainerStatus.newInstance(ContainerId.newInstance(appAttemptId, 2), + ContainerStatus.newInstance(ContainerId.newContainerId(appAttemptId, 2), ContainerState.COMPLETE, "", 0); applicationAttempt.handle(new RMAppAttemptContainerFinishedEvent( appAttemptId, cs2, anyNodeId)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/TestRMContainerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/TestRMContainerImpl.java index 553587ed17447..76cdcaeb0b24a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/TestRMContainerImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/TestRMContainerImpl.java @@ -249,7 +249,7 @@ public void testExistenceOfResourceRequestInRMContainer() throws Exception { // request a container. am1.allocate("127.0.0.1", 1024, 1, new ArrayList<ContainerId>()); - ContainerId containerId2 = ContainerId.newInstance( + ContainerId containerId2 = ContainerId.newContainerId( am1.getApplicationAttemptId(), 2); rm1.waitForState(nm1, containerId2, RMContainerState.ALLOCATED); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/TestSchedulerApplicationAttempt.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/TestSchedulerApplicationAttempt.java index c168b955c1e73..c648b83ad4bf5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/TestSchedulerApplicationAttempt.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/TestSchedulerApplicationAttempt.java @@ -138,7 +138,7 @@ private RMContainer createReservedRMContainer(ApplicationAttemptId appAttId, private RMContainer createRMContainer(ApplicationAttemptId appAttId, int id, Resource resource) { - ContainerId containerId = ContainerId.newInstance(appAttId, id); + ContainerId containerId = ContainerId.newContainerId(appAttId, id); RMContainer rmContainer = mock(RMContainer.class); Container container = mock(Container.class); when(container.getResource()).thenReturn(resource); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/TestSchedulerUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/TestSchedulerUtils.java index c3ae38c364a97..c9e81eebb9714 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/TestSchedulerUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/TestSchedulerUtils.java @@ -560,7 +560,7 @@ public void testComparePriorities(){ @Test public void testCreateAbnormalContainerStatus() { ContainerStatus cd = SchedulerUtils.createAbnormalContainerStatus( - ContainerId.newInstance(ApplicationAttemptId.newInstance( + ContainerId.newContainerId(ApplicationAttemptId.newInstance( ApplicationId.newInstance(System.currentTimeMillis(), 1), 1), 1), "x"); Assert.assertEquals(ContainerExitStatus.ABORTED, cd.getExitStatus()); } @@ -568,7 +568,7 @@ public void testCreateAbnormalContainerStatus() { @Test public void testCreatePreemptedContainerStatus() { ContainerStatus cd = SchedulerUtils.createPreemptedContainerStatus( - ContainerId.newInstance(ApplicationAttemptId.newInstance( + ContainerId.newContainerId(ApplicationAttemptId.newInstance( ApplicationId.newInstance(System.currentTimeMillis(), 1), 1), 1), "x"); Assert.assertEquals(ContainerExitStatus.PREEMPTED, cd.getExitStatus()); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacityScheduler.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacityScheduler.java index 98dc673da2563..2aa57a0d79524 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacityScheduler.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacityScheduler.java @@ -1085,7 +1085,7 @@ public void testRecoverRequestAfterPreemption() throws Exception { // request a container. am1.allocate("127.0.0.1", 1024, 1, new ArrayList<ContainerId>()); - ContainerId containerId1 = ContainerId.newInstance( + ContainerId containerId1 = ContainerId.newContainerId( am1.getApplicationAttemptId(), 2); rm1.waitForState(nm1, containerId1, RMContainerState.ALLOCATED); @@ -1122,7 +1122,7 @@ public void testRecoverRequestAfterPreemption() throws Exception { } // New container will be allocated and will move to ALLOCATED state - ContainerId containerId2 = ContainerId.newInstance( + ContainerId containerId2 = ContainerId.newContainerId( am1.getApplicationAttemptId(), 3); rm1.waitForState(nm1, containerId2, RMContainerState.ALLOCATED); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestContainerAllocation.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestContainerAllocation.java index 0c32c0cecf716..ad834ac1b3c39 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestContainerAllocation.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestContainerAllocation.java @@ -164,7 +164,7 @@ public void testContainerTokenGeneratedOnPullRequest() throws Exception { // request a container. am1.allocate("127.0.0.1", 1024, 1, new ArrayList<ContainerId>()); ContainerId containerId2 = - ContainerId.newInstance(am1.getApplicationAttemptId(), 2); + ContainerId.newContainerId(am1.getApplicationAttemptId(), 2); rm1.waitForState(nm1, containerId2, RMContainerState.ALLOCATED); RMContainer container = @@ -194,7 +194,7 @@ public void testNormalContainerAllocationWhenDNSUnavailable() throws Exception{ // request a container. am1.allocate("127.0.0.1", 1024, 1, new ArrayList<ContainerId>()); ContainerId containerId2 = - ContainerId.newInstance(am1.getApplicationAttemptId(), 2); + ContainerId.newContainerId(am1.getApplicationAttemptId(), 2); rm1.waitForState(nm1, containerId2, RMContainerState.ALLOCATED); // acquire the container. @@ -247,7 +247,7 @@ private LogAggregationContext getLogAggregationContextFromContainerToken( // request a container. am2.allocate("127.0.0.1", 512, 1, new ArrayList<ContainerId>()); ContainerId containerId = - ContainerId.newInstance(am2.getApplicationAttemptId(), 2); + ContainerId.newContainerId(am2.getApplicationAttemptId(), 2); rm1.waitForState(nm1, containerId, RMContainerState.ALLOCATED); // acquire the container. @@ -480,13 +480,13 @@ public RMNodeLabelsManager createNodeLabelManager() { // A has only 10% of x, so it can only allocate one container in label=empty ContainerId containerId = - ContainerId.newInstance(am1.getApplicationAttemptId(), 2); + ContainerId.newContainerId(am1.getApplicationAttemptId(), 2); am1.allocate("*", 1024, 1, new ArrayList<ContainerId>(), ""); Assert.assertTrue(rm1.waitForState(nm3, containerId, RMContainerState.ALLOCATED, 10 * 1000)); // Cannot allocate 2nd label=empty container containerId = - ContainerId.newInstance(am1.getApplicationAttemptId(), 3); + ContainerId.newContainerId(am1.getApplicationAttemptId(), 3); am1.allocate("*", 1024, 1, new ArrayList<ContainerId>(), ""); Assert.assertFalse(rm1.waitForState(nm3, containerId, RMContainerState.ALLOCATED, 10 * 1000)); @@ -495,7 +495,7 @@ public RMNodeLabelsManager createNodeLabelManager() { // We can allocate floor(8000 / 1024) = 7 containers for (int id = 3; id <= 8; id++) { containerId = - ContainerId.newInstance(am1.getApplicationAttemptId(), id); + ContainerId.newContainerId(am1.getApplicationAttemptId(), id); am1.allocate("*", 1024, 1, new ArrayList<ContainerId>(), "x"); Assert.assertTrue(rm1.waitForState(nm1, containerId, RMContainerState.ALLOCATED, 10 * 1000)); @@ -571,7 +571,7 @@ public RMNodeLabelsManager createNodeLabelManager() { // request a container (label = x && y). can only allocate on nm2 am1.allocate("*", 1024, 1, new ArrayList<ContainerId>(), "x && y"); containerId = - ContainerId.newInstance(am1.getApplicationAttemptId(), 2); + ContainerId.newContainerId(am1.getApplicationAttemptId(), 2); Assert.assertFalse(rm1.waitForState(nm1, containerId, RMContainerState.ALLOCATED, 10 * 1000)); Assert.assertTrue(rm1.waitForState(nm2, containerId, @@ -588,7 +588,7 @@ public RMNodeLabelsManager createNodeLabelManager() { // and now b1's queue capacity will be used, cannot allocate more containers // (Maximum capacity reached) am2.allocate("*", 1024, 1, new ArrayList<ContainerId>()); - containerId = ContainerId.newInstance(am2.getApplicationAttemptId(), 2); + containerId = ContainerId.newContainerId(am2.getApplicationAttemptId(), 2); Assert.assertFalse(rm1.waitForState(nm4, containerId, RMContainerState.ALLOCATED, 10 * 1000)); Assert.assertFalse(rm1.waitForState(nm5, containerId, @@ -601,7 +601,7 @@ public RMNodeLabelsManager createNodeLabelManager() { // request a container. try to allocate on nm1 (label = x) and nm3 (label = // y,z). Will successfully allocate on nm3 am3.allocate("*", 1024, 1, new ArrayList<ContainerId>(), "y"); - containerId = ContainerId.newInstance(am3.getApplicationAttemptId(), 2); + containerId = ContainerId.newContainerId(am3.getApplicationAttemptId(), 2); Assert.assertFalse(rm1.waitForState(nm1, containerId, RMContainerState.ALLOCATED, 10 * 1000)); Assert.assertTrue(rm1.waitForState(nm3, containerId, @@ -612,7 +612,7 @@ public RMNodeLabelsManager createNodeLabelManager() { // try to allocate container (request label = y && z) on nm3 (label = y) and // nm4 (label = y,z). Will sucessfully allocate on nm4 only. am3.allocate("*", 1024, 1, new ArrayList<ContainerId>(), "y && z"); - containerId = ContainerId.newInstance(am3.getApplicationAttemptId(), 3); + containerId = ContainerId.newContainerId(am3.getApplicationAttemptId(), 3); Assert.assertFalse(rm1.waitForState(nm3, containerId, RMContainerState.ALLOCATED, 10 * 1000)); Assert.assertTrue(rm1.waitForState(nm4, containerId, @@ -654,7 +654,7 @@ public RMNodeLabelsManager createNodeLabelManager() { // request a container. am1.allocate("*", 1024, 1, new ArrayList<ContainerId>(), "x"); containerId = - ContainerId.newInstance(am1.getApplicationAttemptId(), 2); + ContainerId.newContainerId(am1.getApplicationAttemptId(), 2); Assert.assertFalse(rm1.waitForState(nm2, containerId, RMContainerState.ALLOCATED, 10 * 1000)); Assert.assertTrue(rm1.waitForState(nm1, containerId, @@ -669,7 +669,7 @@ public RMNodeLabelsManager createNodeLabelManager() { // request a container. am2.allocate("*", 1024, 1, new ArrayList<ContainerId>(), "y"); - containerId = ContainerId.newInstance(am2.getApplicationAttemptId(), 2); + containerId = ContainerId.newContainerId(am2.getApplicationAttemptId(), 2); Assert.assertFalse(rm1.waitForState(nm1, containerId, RMContainerState.ALLOCATED, 10 * 1000)); Assert.assertTrue(rm1.waitForState(nm2, containerId, @@ -684,7 +684,7 @@ public RMNodeLabelsManager createNodeLabelManager() { // request a container. am3.allocate("*", 1024, 1, new ArrayList<ContainerId>()); - containerId = ContainerId.newInstance(am3.getApplicationAttemptId(), 2); + containerId = ContainerId.newContainerId(am3.getApplicationAttemptId(), 2); Assert.assertFalse(rm1.waitForState(nm2, containerId, RMContainerState.ALLOCATED, 10 * 1000)); Assert.assertTrue(rm1.waitForState(nm3, containerId, @@ -730,7 +730,7 @@ public RMNodeLabelsManager createNodeLabelManager() { // request a container. am1.allocate("*", 1024, 1, new ArrayList<ContainerId>()); containerId = - ContainerId.newInstance(am1.getApplicationAttemptId(), 2); + ContainerId.newContainerId(am1.getApplicationAttemptId(), 2); Assert.assertFalse(rm1.waitForState(nm3, containerId, RMContainerState.ALLOCATED, 10 * 1000)); Assert.assertTrue(rm1.waitForState(nm1, containerId, @@ -745,7 +745,7 @@ public RMNodeLabelsManager createNodeLabelManager() { // request a container. am2.allocate("*", 1024, 1, new ArrayList<ContainerId>()); - containerId = ContainerId.newInstance(am2.getApplicationAttemptId(), 2); + containerId = ContainerId.newContainerId(am2.getApplicationAttemptId(), 2); Assert.assertFalse(rm1.waitForState(nm3, containerId, RMContainerState.ALLOCATED, 10 * 1000)); Assert.assertTrue(rm1.waitForState(nm2, containerId, @@ -760,7 +760,7 @@ public RMNodeLabelsManager createNodeLabelManager() { // request a container. am3.allocate("*", 1024, 1, new ArrayList<ContainerId>()); - containerId = ContainerId.newInstance(am3.getApplicationAttemptId(), 2); + containerId = ContainerId.newContainerId(am3.getApplicationAttemptId(), 2); Assert.assertFalse(rm1.waitForState(nm2, containerId, RMContainerState.ALLOCATED, 10 * 1000)); Assert.assertTrue(rm1.waitForState(nm3, containerId, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairScheduler.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairScheduler.java index 519425145a89e..ca0e954e7290c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairScheduler.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairScheduler.java @@ -3530,7 +3530,7 @@ public void testRecoverRequestAfterPreemption() throws Exception { // ResourceRequest will be empty once NodeUpdate is completed Assert.assertNull(app.getResourceRequest(priority, host)); - ContainerId containerId1 = ContainerId.newInstance(appAttemptId, 1); + ContainerId containerId1 = ContainerId.newContainerId(appAttemptId, 1); RMContainer rmContainer = app.getRMContainer(containerId1); // Create a preempt event and register for preemption diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java index de8d3029778d2..f0dcb562a234c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java @@ -231,7 +231,7 @@ private void testNMTokens(Configuration conf) throws Exception { ApplicationAttemptId.newInstance(appId, 1); ContainerId validContainerId = - ContainerId.newInstance(validAppAttemptId, 0); + ContainerId.newContainerId(validAppAttemptId, 0); NodeId validNode = yarnCluster.getNodeManager(0).getNMContext().getNodeId(); NodeId invalidNode = NodeId.newInstance("InvalidHost", 1234); @@ -311,7 +311,7 @@ private void testNMTokens(Configuration conf) throws Exception { ApplicationAttemptId.newInstance(appId, 2); ContainerId validContainerId2 = - ContainerId.newInstance(validAppAttemptId2, 0); + ContainerId.newContainerId(validAppAttemptId2, 0); org.apache.hadoop.yarn.api.records.Token validContainerToken2 = containerTokenSecretManager.createContainerToken(validContainerId2, @@ -401,7 +401,7 @@ private void testNMTokens(Configuration conf) throws Exception { .createNMToken(validAppAttemptId, validNode, user); org.apache.hadoop.yarn.api.records.Token newContainerToken = containerTokenSecretManager.createContainerToken( - ContainerId.newInstance(attempt2, 1), validNode, user, r, + ContainerId.newContainerId(attempt2, 1), validNode, user, r, Priority.newInstance(0), 0); Assert.assertTrue(testStartContainer(rpc, attempt2, validNode, newContainerToken, attempt1NMToken, false).isEmpty()); @@ -638,7 +638,7 @@ private void testContainerToken(Configuration conf) throws IOException, ApplicationId appId = ApplicationId.newInstance(1, 1); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 0); - ContainerId cId = ContainerId.newInstance(appAttemptId, 0); + ContainerId cId = ContainerId.newContainerId(appAttemptId, 0); NodeManager nm = yarnCluster.getNodeManager(0); NMTokenSecretManagerInNM nmTokenSecretManagerInNM = nm.getNMContext().getNMTokenSecretManager(); @@ -691,7 +691,7 @@ private void testContainerToken(Configuration conf) throws IOException, } while (containerTokenSecretManager.getCurrentKey().getKeyId() == tamperedContainerTokenSecretManager.getCurrentKey().getKeyId()); - ContainerId cId2 = ContainerId.newInstance(appAttemptId, 1); + ContainerId cId2 = ContainerId.newContainerId(appAttemptId, 1); // Creating modified containerToken Token containerToken2 = tamperedContainerTokenSecretManager.createContainerToken(cId2, nodeId, @@ -733,7 +733,7 @@ private void testContainerTokenWithEpoch(Configuration conf) ApplicationId appId = ApplicationId.newInstance(1, 1); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 0); - ContainerId cId = ContainerId.newInstance(appAttemptId, (5L << 40) | 3L); + ContainerId cId = ContainerId.newContainerId(appAttemptId, (5L << 40) | 3L); NodeManager nm = yarnCluster.getNodeManager(0); NMTokenSecretManagerInNM nmTokenSecretManagerInNM = nm.getNMContext().getNMTokenSecretManager();
ae6a2d38200dfe98755abfedf645621fe21ecf00
kotlin
Base class for surrounders for statements--
p
https://github.com/JetBrains/kotlin
diff --git a/idea/src/org/jetbrains/jet/plugin/codeInsight/surroundWith/statement/KotlinIfSurrounderBase.java b/idea/src/org/jetbrains/jet/plugin/codeInsight/surroundWith/statement/KotlinIfSurrounderBase.java index 44851a6119c68..cc8467b3ef236 100644 --- a/idea/src/org/jetbrains/jet/plugin/codeInsight/surroundWith/statement/KotlinIfSurrounderBase.java +++ b/idea/src/org/jetbrains/jet/plugin/codeInsight/surroundWith/statement/KotlinIfSurrounderBase.java @@ -18,12 +18,10 @@ import com.intellij.codeInsight.CodeInsightUtilBase; -import com.intellij.lang.surroundWith.Surrounder; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.TextRange; import com.intellij.psi.PsiElement; -import com.intellij.util.IncorrectOperationException; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.jet.lang.psi.JetBlockExpression; @@ -34,30 +32,13 @@ import java.lang.String; -public abstract class KotlinIfSurrounderBase implements Surrounder { - - @Override - public boolean isApplicable(@NotNull PsiElement[] elements) { - return true; - } +public abstract class KotlinIfSurrounderBase extends KotlinStatementsSurrounder { @Nullable @Override - public TextRange surroundElements( - @NotNull Project project, @NotNull Editor editor, @NotNull PsiElement[] elements - ) throws IncorrectOperationException { - PsiElement container = elements[0].getParent(); - if (container == null) return null; - return surroundStatements (project, editor, container, elements); - } - - public TextRange surroundStatements(Project project, Editor editor, PsiElement container, PsiElement[] statements) throws IncorrectOperationException{ + protected TextRange surroundStatements(Project project, Editor editor, PsiElement container, PsiElement[] statements) { // TODO extract variables declaration - if (statements.length == 0){ - return null; - } - JetIfExpression ifExpression = (JetIfExpression) JetPsiFactory.createExpression(project, getCodeTemplate()); ifExpression = (JetIfExpression) container.addAfter(ifExpression, statements[statements.length - 1]); diff --git a/idea/src/org/jetbrains/jet/plugin/codeInsight/surroundWith/statement/KotlinStatementsSurrounder.java b/idea/src/org/jetbrains/jet/plugin/codeInsight/surroundWith/statement/KotlinStatementsSurrounder.java new file mode 100644 index 0000000000000..93dd50169c15c --- /dev/null +++ b/idea/src/org/jetbrains/jet/plugin/codeInsight/surroundWith/statement/KotlinStatementsSurrounder.java @@ -0,0 +1,54 @@ +/* + * Copyright 2010-2013 JetBrains s.r.o. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.jetbrains.jet.plugin.codeInsight.surroundWith.statement; + +import com.intellij.lang.surroundWith.Surrounder; +import com.intellij.openapi.editor.Editor; +import com.intellij.openapi.project.Project; +import com.intellij.openapi.util.TextRange; +import com.intellij.psi.PsiElement; +import com.intellij.util.IncorrectOperationException; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +public abstract class KotlinStatementsSurrounder implements Surrounder { + + @Override + public boolean isApplicable(@NotNull PsiElement[] elements) { + return elements.length > 0; + } + + @Override + @Nullable + public TextRange surroundElements( + @NotNull Project project, + @NotNull Editor editor, + @NotNull PsiElement[] elements + ) throws IncorrectOperationException { + PsiElement container = elements[0].getParent(); + if (container == null) return null; + return surroundStatements(project, editor, container, elements); + } + + @Nullable + protected abstract TextRange surroundStatements( + final Project project, + final Editor editor, + final PsiElement container, + final PsiElement[] statements + ); +}
1e84c439e593999ae148a459e560380f59ca7124
elasticsearch
Shard Allocation: Closed indices are not properly- taken into account when rebalancing, closes -858.--
c
https://github.com/elastic/elasticsearch
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java index 6f51e058b99a3..5c84673400c97 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java @@ -20,6 +20,7 @@ package org.elasticsearch.cluster.routing; import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.util.concurrent.NotThreadSafe; @@ -113,7 +114,14 @@ public ClusterBlocks getBlocks() { } public int requiredAverageNumberOfShardsPerNode() { - return metaData.totalNumberOfShards() / nodesToShards.size(); + int totalNumberOfShards = 0; + // we need to recompute to take closed shards into account + for (IndexMetaData indexMetaData : metaData.indices().values()) { + if (indexMetaData.state() == IndexMetaData.State.OPEN) { + totalNumberOfShards += indexMetaData.totalNumberOfShards(); + } + } + return totalNumberOfShards / nodesToShards.size(); } public boolean hasUnassigned() {
af183a16193c1b8743e609c713c61a7c0ce0ee49
elasticsearch
Fix- `indices.recovery.concurrent_small_file_streams` not being dynamically- settable--Fixes -4094-
c
https://github.com/elastic/elasticsearch
diff --git a/src/main/java/org/elasticsearch/cluster/settings/ClusterDynamicSettingsModule.java b/src/main/java/org/elasticsearch/cluster/settings/ClusterDynamicSettingsModule.java index 0f9d1060e5407..e46fbf3fb320a 100644 --- a/src/main/java/org/elasticsearch/cluster/settings/ClusterDynamicSettingsModule.java +++ b/src/main/java/org/elasticsearch/cluster/settings/ClusterDynamicSettingsModule.java @@ -64,6 +64,7 @@ public ClusterDynamicSettingsModule() { clusterDynamicSettings.addDynamicSetting(RecoverySettings.INDICES_RECOVERY_TRANSLOG_SIZE, Validator.BYTES_SIZE); clusterDynamicSettings.addDynamicSetting(RecoverySettings.INDICES_RECOVERY_COMPRESS); clusterDynamicSettings.addDynamicSetting(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS, Validator.POSITIVE_INTEGER); + clusterDynamicSettings.addDynamicSetting(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, Validator.POSITIVE_INTEGER); clusterDynamicSettings.addDynamicSetting(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC, Validator.BYTES_SIZE); clusterDynamicSettings.addDynamicSetting(RecoverySettings.INDICES_RECOVERY_MAX_SIZE_PER_SEC, Validator.BYTES_SIZE); clusterDynamicSettings.addDynamicSetting(ThreadPool.THREADPOOL_GROUP + "*");
62b7a6dfa2676ee4386cc8b4e81ce8a5d08bc8f3
camel
Fixed tests.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@1062115 13f79535-47bb-0310-9956-ffa450edef68-
c
https://github.com/apache/camel
diff --git a/components/camel-script/pom.xml b/components/camel-script/pom.xml index ba5507fae016b..4f8f0b4bf3ee5 100644 --- a/components/camel-script/pom.xml +++ b/components/camel-script/pom.xml @@ -105,6 +105,7 @@ </dependency> <!-- testing --> + <!-- TODO: use by language test, which we should refactor into camel-test JAR --> <dependency> <groupId>org.apache.camel</groupId> <artifactId>camel-core</artifactId> @@ -115,9 +116,8 @@ <dependency> <groupId>org.apache.camel</groupId> - <artifactId>camel-spring</artifactId> + <artifactId>camel-test</artifactId> <scope>test</scope> - <optional>true</optional> </dependency> <dependency> diff --git a/components/camel-script/src/test/java/org/apache/camel/builder/script/BeanShellScriptRouteTest.java b/components/camel-script/src/test/java/org/apache/camel/builder/script/BeanShellScriptRouteTest.java index 64894c37996a4..715f17f3432b4 100644 --- a/components/camel-script/src/test/java/org/apache/camel/builder/script/BeanShellScriptRouteTest.java +++ b/components/camel-script/src/test/java/org/apache/camel/builder/script/BeanShellScriptRouteTest.java @@ -19,17 +19,19 @@ import java.util.HashMap; import java.util.Map; -import org.apache.camel.ContextTestSupport; import org.apache.camel.ScriptTestHelper; import org.apache.camel.builder.RouteBuilder; +import org.apache.camel.test.junit4.CamelTestSupport; +import org.junit.Test; import static org.apache.camel.builder.script.ScriptBuilder.script; /** * Unit test for a BeanSheel script */ -public class BeanShellScriptRouteTest extends ContextTestSupport { +public class BeanShellScriptRouteTest extends CamelTestSupport { + @Test public void testSendMatchingMessage() throws Exception { if (!ScriptTestHelper.canRunTestOnThisPlatform()) { return; @@ -45,6 +47,7 @@ public void testSendMatchingMessage() throws Exception { assertMockEndpointsSatisfied(); } + @Test public void testSendNonMatchingMessage() throws Exception { if (!ScriptTestHelper.canRunTestOnThisPlatform()) { return; diff --git a/components/camel-script/src/test/java/org/apache/camel/builder/script/GroovyScriptRouteTest.java b/components/camel-script/src/test/java/org/apache/camel/builder/script/GroovyScriptRouteTest.java index 8101931071682..3dfa21e06e9ba 100644 --- a/components/camel-script/src/test/java/org/apache/camel/builder/script/GroovyScriptRouteTest.java +++ b/components/camel-script/src/test/java/org/apache/camel/builder/script/GroovyScriptRouteTest.java @@ -16,29 +16,32 @@ */ package org.apache.camel.builder.script; -import org.apache.camel.ContextTestSupport; import org.apache.camel.ScriptTestHelper; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; +import org.apache.camel.test.junit4.CamelTestSupport; +import org.junit.Ignore; +import org.junit.Test; /** * Unit test for a Groovy script based on end-user question. */ -public class GroovyScriptRouteTest extends ContextTestSupport { +@Ignore("May fail on CI server on JDK 1.6") +public class GroovyScriptRouteTest extends CamelTestSupport { + @Test public void testGroovyScript() throws Exception { if (!ScriptTestHelper.canRunTestOnThisPlatform()) { return; } - // TODO: fails on some JDL1.6 boxes -// MockEndpoint mock = getMockEndpoint("mock:result"); -// mock.expectedBodiesReceived("Hello World"); -// mock.expectedHeaderReceived("foo", "Hello World"); -// -// template.sendBodyAndHeader("seda:a", "Hello World", "foo", "London"); -// -// mock.assertIsSatisfied(); + MockEndpoint mock = getMockEndpoint("mock:result"); + mock.expectedBodiesReceived("Hello World"); + mock.expectedHeaderReceived("foo", "Hello World"); + + template.sendBodyAndHeader("seda:a", "Hello World", "foo", "London"); + + mock.assertIsSatisfied(); } protected RouteBuilder createRouteBuilder() throws Exception { diff --git a/components/camel-script/src/test/java/org/apache/camel/builder/script/JavaScriptExpressionTest.java b/components/camel-script/src/test/java/org/apache/camel/builder/script/JavaScriptExpressionTest.java index 6624a9b34a753..7b1fb1c441dd7 100644 --- a/components/camel-script/src/test/java/org/apache/camel/builder/script/JavaScriptExpressionTest.java +++ b/components/camel-script/src/test/java/org/apache/camel/builder/script/JavaScriptExpressionTest.java @@ -19,24 +19,23 @@ import java.util.HashMap; import java.util.Map; -import org.apache.camel.ContextTestSupport; import org.apache.camel.ScriptTestHelper; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; +import org.apache.camel.test.junit4.CamelTestSupport; +import org.junit.Test; /** * Tests a routing expression using JavaScript */ -public class JavaScriptExpressionTest extends ContextTestSupport { +public class JavaScriptExpressionTest extends CamelTestSupport { + @Test public void testSendMatchingMessage() throws Exception { if (!ScriptTestHelper.canRunTestOnThisPlatform()) { return; } - // TODO Currently, this test fails because the JavaScript expression in createRouteBuilder - // below returns false - // To fix that, we need to figure out how to get the expression to return the right value MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedMessageCount(1); @@ -50,6 +49,7 @@ public void testSendMatchingMessage() throws Exception { assertMockEndpointsSatisfied(); } + @Test public void testSendNonMatchingMessage() throws Exception { if (!ScriptTestHelper.canRunTestOnThisPlatform()) { return; diff --git a/components/camel-script/src/test/java/org/apache/camel/builder/script/Jsr223Test.java b/components/camel-script/src/test/java/org/apache/camel/builder/script/Jsr223Test.java index 894a133fe4440..627f33278263b 100644 --- a/components/camel-script/src/test/java/org/apache/camel/builder/script/Jsr223Test.java +++ b/components/camel-script/src/test/java/org/apache/camel/builder/script/Jsr223Test.java @@ -20,8 +20,8 @@ import javax.script.ScriptEngineManager; import junit.framework.TestCase; - import org.apache.camel.ScriptTestHelper; +import org.junit.Test; /** * @version $Revision$ @@ -29,6 +29,7 @@ public class Jsr223Test extends TestCase { private String [] scriptNames = {"beanshell", "groovy", "js", "python", "ruby", "javascript"}; + @Test public void testLanguageNames() throws Exception { if (!ScriptTestHelper.canRunTestOnThisPlatform()) { return; diff --git a/components/camel-script/src/test/java/org/apache/camel/builder/script/PythonExpressionTest.java b/components/camel-script/src/test/java/org/apache/camel/builder/script/PythonExpressionTest.java index ca03ebdb4ddcc..fdbd802caf16d 100644 --- a/components/camel-script/src/test/java/org/apache/camel/builder/script/PythonExpressionTest.java +++ b/components/camel-script/src/test/java/org/apache/camel/builder/script/PythonExpressionTest.java @@ -19,15 +19,17 @@ import java.util.HashMap; import java.util.Map; -import org.apache.camel.ContextTestSupport; import org.apache.camel.ScriptTestHelper; import org.apache.camel.builder.RouteBuilder; +import org.apache.camel.test.junit4.CamelTestSupport; +import org.junit.Test; /** * Tests a routing expression using Python */ -public class PythonExpressionTest extends ContextTestSupport { - +public class PythonExpressionTest extends CamelTestSupport { + + @Test public void testSendMatchingMessage() throws Exception { if (!ScriptTestHelper.canRunTestOnThisPlatform()) { return; @@ -43,6 +45,7 @@ public void testSendMatchingMessage() throws Exception { assertMockEndpointsSatisfied(); } + @Test public void testSendNonMatchingMessage() throws Exception { if (!ScriptTestHelper.canRunTestOnThisPlatform()) { return; diff --git a/components/camel-script/src/test/java/org/apache/camel/builder/script/RubyExpressionTest.java b/components/camel-script/src/test/java/org/apache/camel/builder/script/RubyExpressionTest.java index edd2d93519136..aaeb97d18d601 100644 --- a/components/camel-script/src/test/java/org/apache/camel/builder/script/RubyExpressionTest.java +++ b/components/camel-script/src/test/java/org/apache/camel/builder/script/RubyExpressionTest.java @@ -19,15 +19,19 @@ import java.util.HashMap; import java.util.Map; -import org.apache.camel.ContextTestSupport; import org.apache.camel.ScriptTestHelper; import org.apache.camel.builder.RouteBuilder; +import org.apache.camel.test.junit4.CamelTestSupport; +import org.junit.Ignore; +import org.junit.Test; /** * Tests a routing expression using Ruby */ -public class RubyExpressionTest extends ContextTestSupport { +@Ignore("May fail on CI server on JDK 1.6") +public class RubyExpressionTest extends CamelTestSupport { + @Test public void testSendMatchingMessage() throws Exception { if (!ScriptTestHelper.canRunTestOnThisPlatform()) { return; @@ -43,6 +47,7 @@ public void testSendMatchingMessage() throws Exception { assertMockEndpointsSatisfied(); } + @Test public void testSendNonMatchingMessage() throws Exception { if (!ScriptTestHelper.canRunTestOnThisPlatform()) { return; diff --git a/components/camel-script/src/test/java/org/apache/camel/language/script/JavaScriptLanguageTest.java b/components/camel-script/src/test/java/org/apache/camel/language/script/JavaScriptLanguageTest.java index 7a6a2ec66f22a..a7358aa1d9bc0 100644 --- a/components/camel-script/src/test/java/org/apache/camel/language/script/JavaScriptLanguageTest.java +++ b/components/camel-script/src/test/java/org/apache/camel/language/script/JavaScriptLanguageTest.java @@ -18,12 +18,14 @@ import org.apache.camel.LanguageTestSupport; import org.apache.camel.ScriptTestHelper; +import org.junit.Test; /** * @version $Revision$ */ public class JavaScriptLanguageTest extends LanguageTestSupport { - + + @Test public void testLanguageExpressions() throws Exception { if (!ScriptTestHelper.canRunTestOnThisPlatform()) { return; diff --git a/components/camel-script/src/test/java/org/apache/camel/language/script/PythonLanguageTest.java b/components/camel-script/src/test/java/org/apache/camel/language/script/PythonLanguageTest.java index cfbce63899845..ee8e04a57cae9 100644 --- a/components/camel-script/src/test/java/org/apache/camel/language/script/PythonLanguageTest.java +++ b/components/camel-script/src/test/java/org/apache/camel/language/script/PythonLanguageTest.java @@ -18,12 +18,14 @@ import org.apache.camel.LanguageTestSupport; import org.apache.camel.ScriptTestHelper; +import org.junit.Test; /** * @version $Revision$ */ public class PythonLanguageTest extends LanguageTestSupport { - + + @Test public void testLanguageExpressions() throws Exception { if (!ScriptTestHelper.canRunTestOnThisPlatform()) { return; diff --git a/components/camel-spring/src/test/java/org/apache/camel/spring/DefaultJMXAgentTest.java b/components/camel-spring/src/test/java/org/apache/camel/spring/DefaultJMXAgentTest.java index c75fb27e666d6..28d748ea9242f 100644 --- a/components/camel-spring/src/test/java/org/apache/camel/spring/DefaultJMXAgentTest.java +++ b/components/camel-spring/src/test/java/org/apache/camel/spring/DefaultJMXAgentTest.java @@ -18,7 +18,6 @@ import java.lang.management.ManagementFactory; import java.util.List; - import javax.management.MBeanServer; import javax.management.MBeanServerConnection; import javax.management.MBeanServerFactory; @@ -61,11 +60,15 @@ protected void releaseMBeanServers() { } public void testQueryMbeans() throws Exception { - int routes = mbsc.queryNames(new ObjectName("org.apache.camel" + ":type=routes,*"), null).size(); - int processors = mbsc.queryNames(new ObjectName("org.apache.camel" + ":type=processors,*"), null).size(); + // whats the numbers before, because the JVM can have left overs when unit testing + int before = mbsc.queryNames(new ObjectName("org.apache.camel" + ":type=consumers,*"), null).size(); + + // start route should enlist the consumer to JMX + context.startRoute("foo"); + + int after = mbsc.queryNames(new ObjectName("org.apache.camel" + ":type=consumers,*"), null).size(); - assertTrue("Should contain routes", routes > 0); - assertTrue("Should contain processors", processors > 0); + assertTrue("Should have added consumer to JMX, before: " + before + ", after: " + after, after > before); } @Override diff --git a/components/camel-spring/src/test/java/org/apache/camel/spring/DisableJmxAgentTest.java b/components/camel-spring/src/test/java/org/apache/camel/spring/DisableJmxAgentTest.java index da146df47b06a..bb38e1dbd43cd 100644 --- a/components/camel-spring/src/test/java/org/apache/camel/spring/DisableJmxAgentTest.java +++ b/components/camel-spring/src/test/java/org/apache/camel/spring/DisableJmxAgentTest.java @@ -36,8 +36,15 @@ protected AbstractXmlApplicationContext createApplicationContext() { @Override public void testQueryMbeans() throws Exception { - assertEquals(0, mbsc.queryNames(new ObjectName("org.apache.camel" + ":type=routes,*"), null).size()); - assertEquals(0, mbsc.queryNames(new ObjectName("org.apache.camel" + ":type=processors,*"), null).size()); + // whats the numbers before, because the JVM can have left overs when unit testing + int before = mbsc.queryNames(new ObjectName("org.apache.camel" + ":type=consumers,*"), null).size(); + + // start route should enlist the consumer to JMX if JMX was enabled + context.startRoute("foo"); + + int after = mbsc.queryNames(new ObjectName("org.apache.camel" + ":type=consumers,*"), null).size(); + + assertEquals("Should not have added consumer to JMX", before, after); } } diff --git a/components/camel-spring/src/test/resources/org/apache/camel/spring/defaultJmxConfig.xml b/components/camel-spring/src/test/resources/org/apache/camel/spring/defaultJmxConfig.xml index a17858b5d3c8c..252998f2de2a6 100644 --- a/components/camel-spring/src/test/resources/org/apache/camel/spring/defaultJmxConfig.xml +++ b/components/camel-spring/src/test/resources/org/apache/camel/spring/defaultJmxConfig.xml @@ -24,7 +24,7 @@ <!-- START SNIPPET: example --> <camelContext xmlns="http://camel.apache.org/schema/spring"> - <route> + <route id="foo" autoStartup="false"> <from uri="seda:start"/> <to uri="mock:result"/> </route> diff --git a/components/camel-spring/src/test/resources/org/apache/camel/spring/disableJmxConfig.xml b/components/camel-spring/src/test/resources/org/apache/camel/spring/disableJmxConfig.xml index d5758c1b83f62..3a3a1c663b448 100644 --- a/components/camel-spring/src/test/resources/org/apache/camel/spring/disableJmxConfig.xml +++ b/components/camel-spring/src/test/resources/org/apache/camel/spring/disableJmxConfig.xml @@ -26,7 +26,7 @@ <camelContext xmlns="http://camel.apache.org/schema/spring"> <jmxAgent id="agent" disabled="true"/> - <route> + <route id="foo" autoStartup="false"> <from uri="seda:start"/> <to uri="mock:result"/> </route>
8091ea54d5e5c47da95ac1b1d921134399a5f066
hadoop
YARN-2730. DefaultContainerExecutor runs only one- localizer at a time. Contributed by Siqi Li (cherry picked from commit- 6157ace5475fff8d2513fd3cd99134b532b0b406)--
c
https://github.com/apache/hadoop
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index 9ae57e4905d51..56359e5b00736 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -809,6 +809,9 @@ Release 2.6.0 - UNRELEASED YARN-2785. Fixed intermittent TestContainerResourceUsage failure. (Varun Vasudev via zjshen) + YARN-2730. DefaultContainerExecutor runs only one localizer at a time + (Siqi Li via jlowe) + Release 2.5.1 - 2014-09-05 INCOMPATIBLE CHANGES diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java index 834b138fcd272..cc2de999bc838 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java @@ -94,7 +94,7 @@ public void init() throws IOException { } @Override - public synchronized void startLocalizer(Path nmPrivateContainerTokensPath, + public void startLocalizer(Path nmPrivateContainerTokensPath, InetSocketAddress nmAddr, String user, String appId, String locId, LocalDirsHandlerService dirsHandler) throws IOException, InterruptedException { @@ -102,10 +102,6 @@ public synchronized void startLocalizer(Path nmPrivateContainerTokensPath, List<String> localDirs = dirsHandler.getLocalDirs(); List<String> logDirs = dirsHandler.getLogDirs(); - ContainerLocalizer localizer = - new ContainerLocalizer(lfs, user, appId, locId, getPaths(localDirs), - RecordFactoryProvider.getRecordFactory(getConf())); - createUserLocalDirs(localDirs, user); createUserCacheDirs(localDirs, user); createAppDirs(localDirs, user, appId); @@ -118,8 +114,17 @@ public synchronized void startLocalizer(Path nmPrivateContainerTokensPath, Path tokenDst = new Path(appStorageDir, tokenFn); copyFile(nmPrivateContainerTokensPath, tokenDst, user); LOG.info("Copying from " + nmPrivateContainerTokensPath + " to " + tokenDst); - lfs.setWorkingDirectory(appStorageDir); - LOG.info("CWD set to " + appStorageDir + " = " + lfs.getWorkingDirectory()); + + + FileContext localizerFc = FileContext.getFileContext( + lfs.getDefaultFileSystem(), getConf()); + localizerFc.setUMask(lfs.getUMask()); + localizerFc.setWorkingDirectory(appStorageDir); + LOG.info("Localizer CWD set to " + appStorageDir + " = " + + localizerFc.getWorkingDirectory()); + ContainerLocalizer localizer = + new ContainerLocalizer(localizerFc, user, appId, locId, + getPaths(localDirs), RecordFactoryProvider.getRecordFactory(getConf())); // TODO: DO it over RPC for maintaining similarity? localizer.runLocalization(nmAddr); }
1e6f2e79b9e0162f8a111acbe7f2876d0d8eeebe
spring-framework
Adapted getXmlAsSource implementation to avoid- compilation failure in IntelliJ IDEA--
p
https://github.com/spring-projects/spring-framework
diff --git a/spring-jdbc/src/main/java/org/springframework/jdbc/support/xml/Jdbc4SqlXmlHandler.java b/spring-jdbc/src/main/java/org/springframework/jdbc/support/xml/Jdbc4SqlXmlHandler.java index 0234cf59b25f..247e53681467 100644 --- a/spring-jdbc/src/main/java/org/springframework/jdbc/support/xml/Jdbc4SqlXmlHandler.java +++ b/spring-jdbc/src/main/java/org/springframework/jdbc/support/xml/Jdbc4SqlXmlHandler.java @@ -1,5 +1,5 @@ /* - * Copyright 2002-2012 the original author or authors. + * Copyright 2002-2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -23,15 +23,15 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.sql.SQLXML; - import javax.xml.transform.Result; import javax.xml.transform.Source; import javax.xml.transform.dom.DOMResult; import javax.xml.transform.dom.DOMSource; -import org.springframework.dao.DataAccessResourceFailureException; import org.w3c.dom.Document; +import org.springframework.dao.DataAccessResourceFailureException; + /** * Default implementation of the {@link SqlXmlHandler} interface. * Provides database-specific implementations for storing and @@ -83,12 +83,14 @@ public Reader getXmlAsCharacterStream(ResultSet rs, int columnIndex) throws SQLE @Override public Source getXmlAsSource(ResultSet rs, String columnName, Class<? extends Source> sourceClass) throws SQLException { - return rs.getSQLXML(columnName).getSource(sourceClass != null ? sourceClass : DOMSource.class); + SQLXML xmlObject = rs.getSQLXML(columnName); + return (sourceClass != null ? xmlObject.getSource(sourceClass) : xmlObject.getSource(DOMSource.class)); } @Override public Source getXmlAsSource(ResultSet rs, int columnIndex, Class<? extends Source> sourceClass) throws SQLException { - return rs.getSQLXML(columnIndex).getSource(sourceClass != null ? sourceClass : DOMSource.class); + SQLXML xmlObject = rs.getSQLXML(columnIndex); + return (sourceClass != null ? xmlObject.getSource(sourceClass) : xmlObject.getSource(DOMSource.class)); }
2d7af3e17d5c33f79890cc7993b37da1b7d60fc9
camel
tidied up some of the default JMS configurations;- for more detail see http://cwiki.apache.org/CAMEL/jms.html--git-svn-id: https://svn.apache.org/repos/asf/activemq/camel/trunk@545124 13f79535-47bb-0310-9956-ffa450edef68-
p
https://github.com/apache/camel
diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsConfiguration.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsConfiguration.java index 40a58d7552d12..1961d6a8d8fb3 100644 --- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsConfiguration.java +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsConfiguration.java @@ -43,39 +43,42 @@ public class JmsConfiguration implements Cloneable { protected static final String TRANSACTED = "TRANSACTED"; protected static final String CLIENT_ACKNOWLEDGE = "CLIENT_ACKNOWLEDGE"; + protected static final String AUTO_ACKNOWLEDGE = "AUTO_ACKNOWLEDGE"; + protected static final String DUPS_OK_ACKNOWLEDGE = "DUPS_OK_ACKNOWLEDGE"; + private ConnectionFactory connectionFactory; private ConnectionFactory templateConnectionFactory; private ConnectionFactory listenerConnectionFactory; private int acknowledgementMode = -1; - private String acknowledgementModeName; + private String acknowledgementModeName = AUTO_ACKNOWLEDGE; // Used to configure the spring Container private ExceptionListener exceptionListener; private ConsumerType consumerType = ConsumerType.Default; - private boolean autoStartup; + private boolean autoStartup = true; private boolean acceptMessagesWhileStopping; private String clientId; private String durableSubscriptionName; private boolean subscriptionDurable; - private boolean exposeListenerSession; + private boolean exposeListenerSession = true; private TaskExecutor taskExecutor; private boolean pubSubNoLocal; - private int concurrentConsumers = -1; - private int maxMessagesPerTask = -1; + private int concurrentConsumers = 1; + private int maxMessagesPerTask = 1; private ServerSessionFactory serverSessionFactory; private int cacheLevel = -1; - private String cacheLevelName; + private String cacheLevelName = "CACHE_CONSUMER"; private long recoveryInterval = -1; private long receiveTimeout = -1; - private int idleTaskExecutionLimit = -1; - private int maxConcurrentConsumers = -1; + private int idleTaskExecutionLimit = 1; + private int maxConcurrentConsumers = 1; // JmsTemplate only - private boolean useVersion102; - private boolean explicitQosEnabled; + private boolean useVersion102 = false; + private boolean explicitQosEnabled = false; private boolean deliveryPersistent = true; private long timeToLive = -1; private MessageConverter messageConverter; private boolean messageIdEnabled = true; - private boolean messageTimestampEnabled; + private boolean messageTimestampEnabled = true; private int priority = -1; // Transaction related configuration private boolean transacted;
0995565b24eeac3652ecef1e5ffbe5b872ac8a5c
restlet-framework-java
EvaluatePrecondition* to Request* renamed--
p
https://github.com/restlet/restlet-framework-java
diff --git a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/EvaluatePreconditionService.java b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/RequestService.java similarity index 94% rename from modules/org.restlet.test/src/org/restlet/test/jaxrs/services/EvaluatePreconditionService.java rename to modules/org.restlet.test/src/org/restlet/test/jaxrs/services/RequestService.java index 326513ab69..24a7e6cee7 100644 --- a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/EvaluatePreconditionService.java +++ b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/RequestService.java @@ -12,7 +12,7 @@ import javax.ws.rs.core.Response.ResponseBuilder; @Path("/requestTestService") -public class EvaluatePreconditionService { +public class RequestService { @GET @Path("date") diff --git a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/AllServiceTests.java b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/AllServiceTests.java index 1bb0862940..aa03901c50 100644 --- a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/AllServiceTests.java +++ b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/AllServiceTests.java @@ -31,7 +31,7 @@ public static Test suite() { mySuite.addTestSuite(SimpleHouseTest.class); mySuite.addTestSuite(MultipleResourcesTest.class); mySuite.addTestSuite(DoublePathTest.class); - mySuite.addTestSuite(EvaluatePreconditionTest.class); + mySuite.addTestSuite(RequestTest.class); return mySuite; } } diff --git a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/EvaluatePreconditionTest.java b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/RequestTest.java similarity index 75% rename from modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/EvaluatePreconditionTest.java rename to modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/RequestTest.java index 96aacb6616..01c403267e 100644 --- a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/EvaluatePreconditionTest.java +++ b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/RequestTest.java @@ -32,7 +32,7 @@ import org.restlet.data.Status; import org.restlet.data.Tag; import org.restlet.ext.jaxrs.util.Util; -import org.restlet.test.jaxrs.services.EvaluatePreconditionService; +import org.restlet.test.jaxrs.services.RequestService; /** * This test class checks if the Request.evaluatePreconditions methods works @@ -40,7 +40,7 @@ * * @author Stephan Koops */ -public class EvaluatePreconditionTest extends JaxRsTestCase { +public class RequestTest extends JaxRsTestCase { private static final Status PREC_FAILED = Status.CLIENT_ERROR_PRECONDITION_FAILED; /** @@ -79,7 +79,7 @@ private static Response accessServer(Class<?> klasse, String subPath, @Override @SuppressWarnings("unchecked") protected Collection createRootResourceColl() { - return Collections.singleton(EvaluatePreconditionService.class); + return Collections.singleton(RequestService.class); } @Override @@ -91,23 +91,23 @@ public void testGetDateAndEntityTag() throws Exception { Conditions conditions = new Conditions(); conditions.setModifiedSince(BEFORE); conditions.setMatch(Util.createList(getEntityTagFromDatastore())); - Response response = accessServer(EvaluatePreconditionService.class, - "date", Method.GET, conditions); + Response response = accessServer(RequestService.class, "date", + Method.GET, conditions); assertEquals(Status.SUCCESS_OK, response.getStatus()); - response = accessServer(EvaluatePreconditionService.class, "date", - Method.PUT, conditions); + response = accessServer(RequestService.class, "date", Method.PUT, + conditions); assertEquals(Status.SUCCESS_OK, response.getStatus()); conditions = new Conditions(); conditions.setModifiedSince(AFTER); conditions.setMatch(Util.createList(getEntityTagFromDatastore())); - response = accessServer(EvaluatePreconditionService.class, "date", - Method.GET, conditions); + response = accessServer(RequestService.class, "date", Method.GET, + conditions); assertEquals(Status.REDIRECTION_NOT_MODIFIED, response.getStatus()); - response = accessServer(EvaluatePreconditionService.class, "date", - Method.PUT, conditions); + response = accessServer(RequestService.class, "date", Method.PUT, + conditions); assertEquals(PREC_FAILED, response.getStatus()); assertTrue("Entity must contain \"was not modified\"", response .getEntity().getText().contains( @@ -116,8 +116,8 @@ public void testGetDateAndEntityTag() throws Exception { conditions = new Conditions(); conditions.setModifiedSince(BEFORE); conditions.setMatch(Util.createList(new Tag("shkhsdk"))); - response = accessServer(EvaluatePreconditionService.class, "date", - Method.GET, conditions); + response = accessServer(RequestService.class, "date", Method.GET, + conditions); assertEquals(PREC_FAILED, response.getStatus()); String entityText = response.getEntity().getText(); assertTrue( @@ -127,8 +127,8 @@ public void testGetDateAndEntityTag() throws Exception { || entityText .contains("The entity does not match Entity Tag")); - response = accessServer(EvaluatePreconditionService.class, "date", - Method.PUT, conditions); + response = accessServer(RequestService.class, "date", Method.PUT, + conditions); assertEquals(PREC_FAILED, response.getStatus()); assertTrue( "Entity must contain \"was not modified\" or \"does not match Entity Tag\", but is \"" @@ -140,8 +140,8 @@ public void testGetDateAndEntityTag() throws Exception { conditions = new Conditions(); conditions.setModifiedSince(AFTER); conditions.setMatch(Util.createList(new Tag("shkhsdk"))); - response = accessServer(EvaluatePreconditionService.class, "date", - Method.GET, conditions); + response = accessServer(RequestService.class, "date", Method.GET, + conditions); assertEquals(PREC_FAILED, response.getStatus()); entityText = response.getEntity().getText(); assertTrue( @@ -151,8 +151,8 @@ public void testGetDateAndEntityTag() throws Exception { || entityText .contains("The entity does not match Entity Tag")); - response = accessServer(EvaluatePreconditionService.class, "date", - Method.PUT, conditions); + response = accessServer(RequestService.class, "date", Method.PUT, + conditions); assertEquals(PREC_FAILED, response.getStatus()); assertTrue( "Entity must contain \"was not modified\" or \"does not match Entity Tag\", but is \"" @@ -166,8 +166,8 @@ public void testGetDateAndEntityTag() throws Exception { public void testGetDateNotModified() throws Exception { Conditions conditions = new Conditions(); conditions.setModifiedSince(AFTER); - Response response = accessServer(EvaluatePreconditionService.class, - "date", Method.GET, conditions); + Response response = accessServer(RequestService.class, "date", + Method.GET, conditions); assertEquals(Status.REDIRECTION_NOT_MODIFIED, response.getStatus()); assertEquals(0, response.getEntity().getText().length()); // from RFC 2616, Section 10.3.5 @@ -183,20 +183,19 @@ public void testGetDateNotModified() throws Exception { public void testGetEntityTagMatch() throws Exception { Conditions conditions = new Conditions(); conditions.setMatch(Util.createList(getEntityTagFromDatastore())); - Response response = accessServer(EvaluatePreconditionService.class, - "date", Method.GET, conditions); + Response response = accessServer(RequestService.class, "date", + Method.GET, conditions); assertEquals(Status.SUCCESS_OK, response.getStatus()); - assertEquals(EvaluatePreconditionService - .getLastModificationDateFromDatastore(), response.getEntity() - .getModificationDate()); + assertEquals(RequestService.getLastModificationDateFromDatastore(), + response.getEntity().getModificationDate()); assertEquals(getEntityTagFromDatastore(), response.getEntity().getTag()); assertNotNull(response.getEntity().getText()); assertTrue(response.getEntity().getSize() > 0); conditions = new Conditions(); conditions.setMatch(Util.createList(new Tag("affer"))); - response = accessServer(EvaluatePreconditionService.class, "date", - Method.GET, conditions); + response = accessServer(RequestService.class, "date", Method.GET, + conditions); assertEquals(PREC_FAILED, response.getStatus()); assertTrue("Entity must contain \"does not match Entity Tag\"", response.getEntity().getText().contains( @@ -206,8 +205,8 @@ public void testGetEntityTagMatch() throws Exception { public void testGetEntityTagNoneMatch() throws Exception { Conditions conditions = new Conditions(); conditions.setNoneMatch(Util.createList(getEntityTagFromDatastore())); - Response response = accessServer(EvaluatePreconditionService.class, - "date", Method.GET, conditions); + Response response = accessServer(RequestService.class, "date", + Method.GET, conditions); assertEquals(PREC_FAILED, response.getStatus()); assertTrue("Entity must contain \"matches Entity Tag\"", response .getEntity().getText() @@ -215,36 +214,34 @@ public void testGetEntityTagNoneMatch() throws Exception { conditions = new Conditions(); conditions.setNoneMatch(Util.createList(new Tag("affer"))); - response = accessServer(EvaluatePreconditionService.class, "date", - Method.GET, conditions); + response = accessServer(RequestService.class, "date", Method.GET, + conditions); assertEquals(Status.SUCCESS_OK, response.getStatus()); } /** - * @see EvaluatePreconditionService#getLastModificationDateFromDatastore() + * @see RequestService#getLastModificationDateFromDatastore() * @throws Exception */ public void testGetModifiedSince() throws Exception { Conditions conditions = new Conditions(); conditions.setModifiedSince(BEFORE); - Response response = accessServer(EvaluatePreconditionService.class, - "date", Method.GET, conditions); + Response response = accessServer(RequestService.class, "date", + Method.GET, conditions); assertEquals(Status.SUCCESS_OK, response.getStatus()); - assertEquals(EvaluatePreconditionService - .getLastModificationDateFromDatastore(), response.getEntity() - .getModificationDate()); + assertEquals(RequestService.getLastModificationDateFromDatastore(), + response.getEntity().getModificationDate()); assertEquals(getEntityTagFromDatastore(), response.getEntity().getTag()); assertNotNull(response.getEntity().getText()); assertTrue(response.getEntity().getSize() > 0); conditions = new Conditions(); conditions.setModifiedSince(AFTER); - response = accessServer(EvaluatePreconditionService.class, "date", - Method.GET, conditions); + response = accessServer(RequestService.class, "date", Method.GET, + conditions); assertEquals(Status.REDIRECTION_NOT_MODIFIED, response.getStatus()); - assertEquals(EvaluatePreconditionService - .getLastModificationDateFromDatastore(), response.getEntity() - .getModificationDate()); + assertEquals(RequestService.getLastModificationDateFromDatastore(), + response.getEntity().getModificationDate()); assertEquals(getEntityTagFromDatastore(), response.getEntity().getTag()); assertEquals(0, response.getEntity().getSize()); @@ -256,27 +253,26 @@ public void testGetModifiedSince() throws Exception { * @return */ private Tag getEntityTagFromDatastore() { - return Util.convertEntityTag(EvaluatePreconditionService - .getEntityTagFromDatastore()); + return Util + .convertEntityTag(RequestService.getEntityTagFromDatastore()); } public void testGetUnmodifiedSince() throws Exception { Conditions conditions = new Conditions(); conditions.setUnmodifiedSince(AFTER); - Response response = accessServer(EvaluatePreconditionService.class, - "date", Method.GET, conditions); + Response response = accessServer(RequestService.class, "date", + Method.GET, conditions); assertEquals(Status.SUCCESS_OK, response.getStatus()); - assertEquals(EvaluatePreconditionService - .getLastModificationDateFromDatastore(), response.getEntity() - .getModificationDate()); + assertEquals(RequestService.getLastModificationDateFromDatastore(), + response.getEntity().getModificationDate()); assertEquals(getEntityTagFromDatastore(), response.getEntity().getTag()); assertNotNull(response.getEntity().getText()); assertTrue(response.getEntity().getSize() > 0); conditions = new Conditions(); conditions.setUnmodifiedSince(BEFORE); - response = accessServer(EvaluatePreconditionService.class, "date", - Method.GET, conditions); + response = accessServer(RequestService.class, "date", Method.GET, + conditions); assertEquals(PREC_FAILED, response.getStatus()); assertTrue("Entity must contain \"was modified\"", response.getEntity() .getText().contains("The entity was modified since")); @@ -286,20 +282,20 @@ public void testGetUnmodifiedSince() throws Exception { } /** - * @see EvaluatePreconditionService#getLastModificationDateFromDatastore() + * @see RequestService#getLastModificationDateFromDatastore() * @throws Exception */ public void testPutModifiedSince() throws Exception { Conditions conditions = new Conditions(); conditions.setModifiedSince(BEFORE); - Response response = accessServer(EvaluatePreconditionService.class, - "date", Method.PUT, conditions); + Response response = accessServer(RequestService.class, "date", + Method.PUT, conditions); assertEquals(Status.SUCCESS_OK, response.getStatus()); conditions = new Conditions(); conditions.setModifiedSince(AFTER); - response = accessServer(EvaluatePreconditionService.class, "date", - Method.PUT, conditions); + response = accessServer(RequestService.class, "date", Method.PUT, + conditions); assertEquals(PREC_FAILED, response.getStatus()); assertTrue("Entity must contain \"was not modified\"", response .getEntity().getText().contains( @@ -309,14 +305,14 @@ public void testPutModifiedSince() throws Exception { public void testPutUnmodifiedSince() throws Exception { Conditions conditions = new Conditions(); conditions.setUnmodifiedSince(AFTER); - Response response = accessServer(EvaluatePreconditionService.class, - "date", Method.PUT, conditions); + Response response = accessServer(RequestService.class, "date", + Method.PUT, conditions); assertEquals(Status.SUCCESS_OK, response.getStatus()); conditions = new Conditions(); conditions.setUnmodifiedSince(BEFORE); - response = accessServer(EvaluatePreconditionService.class, "date", - Method.PUT, conditions); + response = accessServer(RequestService.class, "date", Method.PUT, + conditions); assertEquals(PREC_FAILED, response.getStatus()); assertTrue("Entity must contain \"was not modified\"", response .getEntity().getText() @@ -327,8 +323,7 @@ public void testPutUnmodifiedSince() throws Exception { } public void testOptions() { - Response response = accessServer(EvaluatePreconditionService.class, - Method.OPTIONS); + Response response = accessServer(RequestService.class, Method.OPTIONS); Set<Method> allowedMethods = response.getAllowedMethods(); assertTrue("allowedOptions must contain ABC", allowedMethods .contains(Method.valueOf("ABC")));
063b8da4cfd78758f2eef5244008dc65f0557a4b
orientdb
Patch by Luca Molino about the management of JSON- with document type not only as first attribute--
a
https://github.com/orientechnologies/orientdb
diff --git a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerJSON.java b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerJSON.java index 7ad53982e4b..1fa83610f70 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerJSON.java +++ b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerJSON.java @@ -217,7 +217,7 @@ private Object getValue(final ODocument iRecord, String iFieldName, String iFiel // EMPTY, RETURN an EMPTY HASHMAP return new HashMap<String, Object>(); - if (fields[0].equals("\"@type\"")) + if (hasTypeField(fields)) // OBJECT return fromString(iRecord.getDatabase(), iFieldValue, null); else { @@ -335,6 +335,8 @@ else if (iFieldValueAsString.startsWith("{") && iFieldValueAsString.endsWith("}" return fromString(iRecord.getDatabase(), iFieldValueAsString); case DATE: + if (iFieldValueAsString == null || iFieldValueAsString.equals("")) + return null; try { // TRY TO PARSE AS LONG return Long.parseLong(iFieldValueAsString); @@ -703,6 +705,15 @@ private Integer getDepthLevel(final ORecordSchemaAware<?> record, final Map<Stri return depthLevel; } + private boolean hasTypeField(String[] fields) { + for (int i = 0; i < fields.length; i = i + 2) { + if (fields[i].equals("\"@type\"")) { + return true; + } + } + return false; + } + @Override public String toString() { return NAME;
079039e5531a152388776c637884c891974008e5
restlet-framework-java
Fixed generation of cache directives in the- cache-control header.--
c
https://github.com/restlet/restlet-framework-java
diff --git a/modules/org.restlet/src/org/restlet/data/CacheDirective.java b/modules/org.restlet/src/org/restlet/data/CacheDirective.java index ea761871b3..d76af1d136 100644 --- a/modules/org.restlet/src/org/restlet/data/CacheDirective.java +++ b/modules/org.restlet/src/org/restlet/data/CacheDirective.java @@ -65,7 +65,7 @@ public final class CacheDirective extends Parameter { */ public static CacheDirective maxAge(int maxAge) { return new CacheDirective(HttpConstants.CACHE_MAX_AGE, Integer - .toString(maxAge)); + .toString(maxAge), true); } /** @@ -100,7 +100,7 @@ public static CacheDirective maxStale() { */ public static CacheDirective maxStale(int maxStale) { return new CacheDirective(HttpConstants.CACHE_MAX_STALE, Integer - .toString(maxStale)); + .toString(maxStale), true); } /** @@ -121,7 +121,7 @@ public static CacheDirective maxStale(int maxStale) { */ public static CacheDirective minFresh(int minFresh) { return new CacheDirective(HttpConstants.CACHE_MIN_FRESH, Integer - .toString(minFresh)); + .toString(minFresh), true); } /** @@ -366,9 +366,12 @@ public static CacheDirective publicInfo() { */ public static CacheDirective sharedMaxAge(int sharedMaxAge) { return new CacheDirective(HttpConstants.CACHE_SHARED_MAX_AGE, Integer - .toString(sharedMaxAge)); + .toString(sharedMaxAge), true); } + /** Indicates if the directive is a digit value. */ + private boolean digit; + /** * Constructor for directives with no value. * @@ -388,7 +391,40 @@ public CacheDirective(String name) { * The directive value. */ public CacheDirective(String name, String value) { + this(name, value, false); + } + + /** + * Constructor for directives with a value. + * + * @param name + * The directive name. + * @param value + * The directive value. + * @param digit + * The kind of value (true for a digit value, false otherwise). + */ + public CacheDirective(String name, String value, boolean digit) { super(name, value); + this.digit = digit; + } + + /** + * Returns true if the directive contains a digit value. + * + * @return True if the directive contains a digit value. + */ + public boolean isDigit() { + return digit; } + /** + * Indicates if the directive is a digit value. + * + * @param digit + * True if the directive contains a digit value. + */ + public void setDigit(boolean digit) { + this.digit = digit; + } } \ No newline at end of file diff --git a/modules/org.restlet/src/org/restlet/engine/http/CacheControlUtils.java b/modules/org.restlet/src/org/restlet/engine/http/CacheControlUtils.java index 2cb24cb23c..fa03ab607a 100644 --- a/modules/org.restlet/src/org/restlet/engine/http/CacheControlUtils.java +++ b/modules/org.restlet/src/org/restlet/engine/http/CacheControlUtils.java @@ -84,7 +84,12 @@ public static void format(CacheDirective directive, Appendable destination) destination.append(directive.getName()); if ((directive.getValue() != null) && (directive.getValue().length() > 0)) { - destination.append("=\"").append(directive.getValue()).append('\"'); + if (directive.isDigit()) { + destination.append("=").append(directive.getValue()); + } else { + destination.append("=\"").append(directive.getValue()).append( + '\"'); + } } } }
b28007ee8f236ffbad6d1fc21ff5867e14e3d75b
ReactiveX-RxJava
Fix termination race condition in- OperatorPublish.dispatch--
c
https://github.com/ReactiveX/RxJava
diff --git a/src/main/java/rx/internal/operators/OperatorPublish.java b/src/main/java/rx/internal/operators/OperatorPublish.java index 0fca3d6c64..662d093ebf 100644 --- a/src/main/java/rx/internal/operators/OperatorPublish.java +++ b/src/main/java/rx/internal/operators/OperatorPublish.java @@ -458,6 +458,15 @@ void dispatch() { boolean skipFinal = false; try { for (;;) { + /* + * We need to read terminalEvent before checking the queue for emptyness because + * all enqueue happens before setting the terminal event. + * If it were the other way around, when the emission is paused between + * checking isEmpty and checking terminalEvent, some other thread might + * have produced elements and set the terminalEvent and we'd quit emitting + * prematurely. + */ + Object term = terminalEvent; /* * See if the queue is empty; since we need this information multiple * times later on, we read it one. @@ -468,7 +477,7 @@ void dispatch() { // if the queue is empty and the terminal event was received, quit // and don't bother restoring emitting to false: no further activity is // possible at this point - if (checkTerminated(terminalEvent, empty)) { + if (checkTerminated(term, empty)) { skipFinal = true; return; } @@ -508,10 +517,11 @@ void dispatch() { // it may happen everyone has unsubscribed between here and producers.get() // or we have no subscribers at all to begin with if (len == unsubscribed) { + term = terminalEvent; // so let's consume a value from the queue Object v = queue.poll(); // or terminate if there was a terminal event and the queue is empty - if (checkTerminated(terminalEvent, v == null)) { + if (checkTerminated(term, v == null)) { skipFinal = true; return; } @@ -748,4 +758,4 @@ public void unsubscribe() { } } } -} +} \ No newline at end of file
dd6b2b0860bba3c0fac44092cf388c607b908df1
intellij-community
IDEA-139609 Gradle: New Module Wizard: "Select- Gradle Projec"t dialog: selection is not confirmed by double-clicking or- pressing Enter--
c
https://github.com/JetBrains/intellij-community
diff --git a/platform/external-system-impl/src/com/intellij/openapi/externalSystem/view/ExternalProjectsView.java b/platform/external-system-impl/src/com/intellij/openapi/externalSystem/view/ExternalProjectsView.java index 56e1c622d2d0c..b10f41f615d61 100644 --- a/platform/external-system-impl/src/com/intellij/openapi/externalSystem/view/ExternalProjectsView.java +++ b/platform/external-system-impl/src/com/intellij/openapi/externalSystem/view/ExternalProjectsView.java @@ -24,6 +24,7 @@ import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; +import java.awt.event.InputEvent; import java.util.List; /** @@ -51,4 +52,12 @@ public interface ExternalProjectsView { boolean getGroupTasks(); ProjectSystemId getSystemId(); + + void handleDoubleClickOrEnter(@NotNull ExternalSystemNode node, @Nullable String actionId, InputEvent inputEvent); + + void addListener(@NotNull ExternalProjectsView.Listener listener); + + interface Listener { + void onDoubleClickOrEnter(@NotNull ExternalSystemNode node, InputEvent inputEvent); + } } diff --git a/platform/external-system-impl/src/com/intellij/openapi/externalSystem/view/ExternalProjectsViewAdapter.java b/platform/external-system-impl/src/com/intellij/openapi/externalSystem/view/ExternalProjectsViewAdapter.java index 207ec6bbf6e60..9d8d0c409dde5 100644 --- a/platform/external-system-impl/src/com/intellij/openapi/externalSystem/view/ExternalProjectsViewAdapter.java +++ b/platform/external-system-impl/src/com/intellij/openapi/externalSystem/view/ExternalProjectsViewAdapter.java @@ -24,6 +24,7 @@ import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; +import java.awt.event.InputEvent; import java.util.List; /** @@ -90,4 +91,14 @@ public boolean getGroupTasks() { public ProjectSystemId getSystemId() { return delegate.getSystemId(); } + + @Override + public void handleDoubleClickOrEnter(@NotNull ExternalSystemNode node, @Nullable String actionId, InputEvent inputEvent) { + delegate.handleDoubleClickOrEnter(node, actionId, inputEvent); + } + + @Override + public void addListener(@NotNull Listener listener) { + delegate.addListener(listener); + } } diff --git a/platform/external-system-impl/src/com/intellij/openapi/externalSystem/view/ExternalProjectsViewImpl.java b/platform/external-system-impl/src/com/intellij/openapi/externalSystem/view/ExternalProjectsViewImpl.java index b1ad9adf29d16..716116a0229a9 100644 --- a/platform/external-system-impl/src/com/intellij/openapi/externalSystem/view/ExternalProjectsViewImpl.java +++ b/platform/external-system-impl/src/com/intellij/openapi/externalSystem/view/ExternalProjectsViewImpl.java @@ -24,6 +24,7 @@ import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.externalSystem.ExternalSystemUiAware; +import com.intellij.openapi.externalSystem.action.ExternalSystemActionUtil; import com.intellij.openapi.externalSystem.action.ExternalSystemViewGearAction; import com.intellij.openapi.externalSystem.model.*; import com.intellij.openapi.externalSystem.model.execution.ExternalTaskExecutionInfo; @@ -69,6 +70,7 @@ import javax.swing.tree.TreeSelectionModel; import java.awt.*; +import java.awt.event.InputEvent; import java.util.*; import java.util.List; @@ -89,6 +91,8 @@ public class ExternalProjectsViewImpl extends SimpleToolWindowPanel implements D private final ProjectSystemId myExternalSystemId; @NotNull private final ExternalSystemUiAware myUiAware; + @NotNull + private final Set<Listener> listeners = ContainerUtil.newHashSet(); @Nullable private ExternalProjectsStructure myStructure; @@ -277,6 +281,21 @@ public void run() { scheduleStructureUpdate(); } + @Override + public void handleDoubleClickOrEnter(@NotNull ExternalSystemNode node, @Nullable String actionId, InputEvent inputEvent) { + if (actionId != null) { + ExternalSystemActionUtil.executeAction(actionId, inputEvent); + } + for (Listener listener : listeners) { + listener.onDoubleClickOrEnter(node, inputEvent); + } + } + + @Override + public void addListener(@NotNull Listener listener) { + listeners.add(listener); + } + private ActionGroup createAdditionalGearActionsGroup() { ActionManager actionManager = ActionManager.getInstance(); DefaultActionGroup group = new DefaultActionGroup(); diff --git a/platform/external-system-impl/src/com/intellij/openapi/externalSystem/view/ExternalSystemNode.java b/platform/external-system-impl/src/com/intellij/openapi/externalSystem/view/ExternalSystemNode.java index 33da6647dda0c..43b9b4c67d4d1 100644 --- a/platform/external-system-impl/src/com/intellij/openapi/externalSystem/view/ExternalSystemNode.java +++ b/platform/external-system-impl/src/com/intellij/openapi/externalSystem/view/ExternalSystemNode.java @@ -373,9 +373,7 @@ public Navigatable getNavigatable() { @Override public void handleDoubleClickOrEnter(SimpleTree tree, InputEvent inputEvent) { String actionId = getActionId(); - if (actionId != null) { - ExternalSystemActionUtil.executeAction(actionId, inputEvent); - } + getExternalProjectsView().handleDoubleClickOrEnter(this, actionId, inputEvent); } @Override diff --git a/plugins/gradle/src/org/jetbrains/plugins/gradle/service/project/wizard/SelectExternalProjectDialog.java b/plugins/gradle/src/org/jetbrains/plugins/gradle/service/project/wizard/SelectExternalProjectDialog.java index 53349141be809..3d04e11ff9dba 100644 --- a/plugins/gradle/src/org/jetbrains/plugins/gradle/service/project/wizard/SelectExternalProjectDialog.java +++ b/plugins/gradle/src/org/jetbrains/plugins/gradle/service/project/wizard/SelectExternalProjectDialog.java @@ -16,15 +16,18 @@ package org.jetbrains.plugins.gradle.service.project.wizard; import com.intellij.openapi.externalSystem.model.project.ProjectData; +import com.intellij.openapi.externalSystem.view.ExternalSystemNode; import com.intellij.openapi.externalSystem.view.ProjectNode; import com.intellij.openapi.project.Project; import com.intellij.ui.treeStructure.NullNode; import com.intellij.ui.treeStructure.SimpleNode; import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.gradle.util.GradleConstants; import javax.swing.*; import java.awt.event.ActionEvent; +import java.awt.event.InputEvent; /** * @author Vladislav.Soroka @@ -69,6 +72,13 @@ protected void doOKAction() { super.doOKAction(); } + @Override + protected void handleDoubleClickOrEnter(@NotNull ExternalSystemNode node, @Nullable String actionId, InputEvent inputEvent) { + if(node instanceof ProjectNode ) { + doOKAction(); + } + } + public ProjectData getResult() { return myResult; } diff --git a/plugins/gradle/src/org/jetbrains/plugins/gradle/service/project/wizard/SelectExternalSystemNodeDialog.java b/plugins/gradle/src/org/jetbrains/plugins/gradle/service/project/wizard/SelectExternalSystemNodeDialog.java index e54483a1f7d96..5cb6a0558ec5c 100644 --- a/plugins/gradle/src/org/jetbrains/plugins/gradle/service/project/wizard/SelectExternalSystemNodeDialog.java +++ b/plugins/gradle/src/org/jetbrains/plugins/gradle/service/project/wizard/SelectExternalSystemNodeDialog.java @@ -20,7 +20,10 @@ import com.intellij.openapi.externalSystem.model.project.ProjectData; import com.intellij.openapi.externalSystem.service.project.manage.ExternalProjectsManager; import com.intellij.openapi.externalSystem.service.project.manage.ProjectDataManager; -import com.intellij.openapi.externalSystem.view.*; +import com.intellij.openapi.externalSystem.view.ExternalProjectsStructure; +import com.intellij.openapi.externalSystem.view.ExternalProjectsView; +import com.intellij.openapi.externalSystem.view.ExternalProjectsViewAdapter; +import com.intellij.openapi.externalSystem.view.ExternalSystemNode; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.ui.ScrollPaneFactory; @@ -30,11 +33,13 @@ import com.intellij.util.Function; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.JBUI; +import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.gradle.util.GradleConstants; import javax.swing.*; import javax.swing.tree.TreeSelectionModel; +import java.awt.event.InputEvent; import java.util.Collection; import java.util.List; @@ -77,6 +82,11 @@ public ExternalProjectsStructure getStructure() { public void updateUpTo(ExternalSystemNode node) { treeStructure.updateUpTo(node); } + + @Override + public void handleDoubleClickOrEnter(@NotNull ExternalSystemNode node, @Nullable String actionId, InputEvent inputEvent) { + SelectExternalSystemNodeDialog.this.handleDoubleClickOrEnter(node, actionId, inputEvent); + } }); final Collection<ExternalProjectInfo> projectsData = @@ -107,6 +117,9 @@ public boolean accept(SimpleNode each) { init(); } + protected void handleDoubleClickOrEnter(@NotNull ExternalSystemNode node, @Nullable String actionId, InputEvent inputEvent) { + } + protected SimpleNode getSelectedNode() { return myTree.getNodeFor(myTree.getSelectionPath()); }
05ee5877e9c07e4dce972f84e0741741fefc8efb
drools
Fixed bug with "disconnected" factHandles- (JBRULES-3187) and added a test to verify that I fixed the bug!!--
c
https://github.com/kiegroup/drools
diff --git a/drools-core/src/main/java/org/drools/command/runtime/rule/GetFactHandlesCommand.java b/drools-core/src/main/java/org/drools/command/runtime/rule/GetFactHandlesCommand.java index 39a1dd554be..4c3d4916378 100644 --- a/drools-core/src/main/java/org/drools/command/runtime/rule/GetFactHandlesCommand.java +++ b/drools-core/src/main/java/org/drools/command/runtime/rule/GetFactHandlesCommand.java @@ -45,6 +45,9 @@ public GetFactHandlesCommand(ObjectFilter filter, boolean disconnected) { this.filter = filter; this.disconnected = disconnected; } + public GetFactHandlesCommand(boolean disconnected) { + this.disconnected = disconnected; + } public Collection<FactHandle> execute(Context context) { StatefulKnowledgeSession ksession = ((KnowledgeCommandContext) context).getStatefulKnowledgesession(); @@ -57,8 +60,11 @@ public Collection<FactHandle> execute(Context context) { handle.disconnect(); disconnectedFactHandles.add(handle); } + return disconnectedFactHandles; + } + else { + return ksession.getFactHandles( this.filter ); } - return disconnectedFactHandles; } else { Collection<InternalFactHandle> factHandles = ksession.getFactHandles( ); if(factHandles != null && disconnected){ @@ -67,8 +73,11 @@ public Collection<FactHandle> execute(Context context) { handle.disconnect(); disconnectedFactHandles.add(handle); } + return disconnectedFactHandles; + } + else { + return ksession.getFactHandles(); } - return disconnectedFactHandles; } } diff --git a/drools-core/src/test/java/org/drools/comand/runtime/rule/GetFactHandlesCommandTest.java b/drools-core/src/test/java/org/drools/comand/runtime/rule/GetFactHandlesCommandTest.java new file mode 100644 index 00000000000..a77e722bf39 --- /dev/null +++ b/drools-core/src/test/java/org/drools/comand/runtime/rule/GetFactHandlesCommandTest.java @@ -0,0 +1,223 @@ +package org.drools.comand.runtime.rule; + +import static org.junit.Assert.*; + +import java.lang.ref.Reference; +import java.util.Collection; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Random; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + +import org.drools.KnowledgeBase; +import org.drools.KnowledgeBaseFactory; +import org.drools.command.impl.ContextImpl; +import org.drools.command.impl.DefaultCommandService; +import org.drools.command.impl.KnowledgeCommandContext; +import org.drools.command.runtime.rule.GetFactHandlesCommand; +import org.drools.common.InternalFactHandle; +import org.drools.runtime.StatefulKnowledgeSession; +import org.drools.runtime.rule.FactHandle; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +@SuppressWarnings("unchecked") +public class GetFactHandlesCommandTest { + + private StatefulKnowledgeSession ksession; + private DefaultCommandService commandService; + private Random random = new Random(); + + @Before + public void setup() { + KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); + ksession = kbase.newStatefulKnowledgeSession(); + KnowledgeCommandContext kContext + = new KnowledgeCommandContext( new ContextImpl( "ksession", null ), null, null, this.ksession, null ); + commandService = new DefaultCommandService(kContext); + + } + + @After + public void cleanUp() { + ksession.dispose(); + } + + @Test + public void getEmptyFactHandlesTest() { + GetFactHandlesCommand command = new GetFactHandlesCommand(); + Object result = commandService.execute(command); + if( result instanceof Collection<?> ) { + assertNotNull(result); + assertTrue(((Collection<?>) result).isEmpty()); + } + else { + fail("result of command was NOT a collection of FactHandles"); + } + } + + @Test + public void getOneFactHandleTest() { + String randomFact = "" + random.nextLong(); + ksession.insert(randomFact); + GetFactHandlesCommand command = new GetFactHandlesCommand(); + Object result = commandService.execute(command); + + verifyThatCollectionContains1FactHandleWithThisFact(randomFact, result); + } + + @Test + public void getMultipleFactHandleTest() { + HashSet<String> factSet = new HashSet<String>(); + int numFacts = 4; + for( int i = 0; i < numFacts; ++i ) { + factSet.add("" + random.nextInt()); + } + for( String fact : factSet ) { + ksession.insert(fact); + } + + GetFactHandlesCommand command = new GetFactHandlesCommand(); + Object result = commandService.execute(command); + + verifyThatCollectionContainsTheseFactHandle(factSet, result); + } + + @Test + public void getEmptyDisconnectedFactHandlesTest() { + GetFactHandlesCommand command = new GetFactHandlesCommand(true); + Object result = commandService.execute(command); + if( result instanceof Collection<?> ) { + assertNotNull(result); + assertTrue(((Collection<?>) result).isEmpty()); + } + else { + fail("result of command was NOT a collection of FactHandles"); + } + } + + @Test + public void getOneDisconnectedFactHandleTest() { + System.out.println( Thread.currentThread().getStackTrace()[1].getMethodName() ); + String randomFact = "" + random.nextLong(); + ksession.insert(randomFact); + + // Retrieve and verify fact handle collections + GetFactHandlesCommand command = new GetFactHandlesCommand(false); + Object result = commandService.execute(command); + verifyThatCollectionContains1FactHandleWithThisFact(randomFact, result); + FactHandle factHandle = (FactHandle) ((Collection<FactHandle>) result).toArray()[0]; + + command = new GetFactHandlesCommand(false); + result = commandService.execute(command); + verifyThatCollectionContains1FactHandleWithThisFact(randomFact, result); + FactHandle connectedFactHandle = (FactHandle) ((Collection<FactHandle>) result).toArray()[0]; + + command = new GetFactHandlesCommand(true); + result = commandService.execute(command); + verifyThatCollectionContains1FactHandleWithThisFact(randomFact, result); + FactHandle disconnectedFactHandle = (FactHandle) ((Collection<FactHandle>) result).toArray()[0]; + + // Test fact handle collections + assertTrue( factHandle == connectedFactHandle ); + assertTrue( ! (factHandle == disconnectedFactHandle) ); + } + + @Test + public void getMultipleDisconnectedFactHandleTest() { + System.out.println( "\nTest: " + Thread.currentThread().getStackTrace()[1].getMethodName() ); + HashSet<String> factSet = new HashSet<String>(); + int numFacts = 4; + for( int i = 0; i < numFacts; ++i ) { + factSet.add("" + random.nextInt()); + } + for( String fact : factSet ) { + ksession.insert(fact); + } + + GetFactHandlesCommand command = new GetFactHandlesCommand(false); + Object result = commandService.execute(command); + verifyThatCollectionContainsTheseFactHandle(factSet, result); + Collection<FactHandle> factHandles = ((Collection<FactHandle>) result); + + command = new GetFactHandlesCommand(false); + result = commandService.execute(command); + verifyThatCollectionContainsTheseFactHandle(factSet, result); + Collection<FactHandle> connectedFactHandles = ((Collection<FactHandle>) result); + + command = new GetFactHandlesCommand(true); + result = commandService.execute(command); + verifyThatCollectionContainsTheseFactHandle(factSet, result); + Collection<FactHandle> disconnectedFactHandles = ((Collection<FactHandle>) result); + + // Test fact handle collections + HashSet<FactHandle> factHandlesCopy = new HashSet<FactHandle>(factHandles); + for( int i = 0; i < connectedFactHandles.size(); ++i ) { + for( Object connectedFact : connectedFactHandles ) { + Iterator<FactHandle> iter = factHandlesCopy.iterator(); + while(iter.hasNext() ) { + Object fact = iter.next(); + if( fact == connectedFact ) { + iter.remove(); + } + } + } + } + assertTrue( factHandlesCopy.isEmpty() ); + + for( int i = 0; i < disconnectedFactHandles.size(); ++i ) { + for( Object disconnectedFact : disconnectedFactHandles ) { + for( Object fact : factHandles ) { + assertTrue( ! (fact == disconnectedFact) ); + } + } + } + assertTrue( factHandles.size() == disconnectedFactHandles.size() ); + + } + + /** + * Helper methods + */ + private void verifyThatCollectionContains1FactHandleWithThisFact(String fact, Object collection) { + if( collection instanceof Collection<?> ) { + Collection<FactHandle> factHandles = null; + try { + factHandles = (Collection<FactHandle>) collection; + } + catch( Exception e ) { + fail( "Collection was not a Colleciton<FactHandle> " + e.getMessage()); + } + + assertTrue(! factHandles.isEmpty()); + assertTrue(factHandles.size() == 1); + InternalFactHandle factHandle = (InternalFactHandle) factHandles.toArray()[0]; + assertTrue(fact.equals(factHandle.getObject())); + } + else { + fail("result of command was NOT a collection of FactHandles"); + } + } + + private void verifyThatCollectionContainsTheseFactHandle(HashSet<String> factSet, Object collection) { + factSet = (HashSet<String>) factSet.clone(); + if( collection instanceof Collection<?> ) { + Collection<FactHandle> factHandles = (Collection<FactHandle>) collection; + assertTrue(! factHandles.isEmpty()); + assertTrue(factSet.size() + "inserted but only " + factHandles.size() + " facts retrieved", factHandles.size() == factSet.size()); + Object [] internalFactHandles = factHandles.toArray(); + for( int i = 0; i < internalFactHandles.length; ++i ) { + Object factObject = ((InternalFactHandle) internalFactHandles[i]).getObject(); + assertTrue(factSet.contains(factObject)); + factSet.remove(factObject); + } + assertTrue( "Additional facts found that weren't inserted.", factSet.isEmpty() ); + } + else { + fail("result of command was NOT a collection of FactHandles"); + } + } + +}
8616d08894c8e6cab965174ebb312ff718d83614
orientdb
Minor: fix warning--
p
https://github.com/orientechnologies/orientdb
diff --git a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java index dec59c78280..f9acfb9cb72 100755 --- a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java +++ b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java @@ -541,9 +541,9 @@ protected void addCluster() throws IOException { final int num; if (clusterId < 0) - num = connection.database.addCluster(type, name, location, dataSegmentName, null); + num = connection.database.addCluster(type, name, location, dataSegmentName); else - num = connection.database.addCluster(type, name, clusterId, location, dataSegmentName, null); + num = connection.database.addCluster(type, name, clusterId, location, dataSegmentName); beginResponse(); try {
233a1b898725fc39f3f14346c716b5f52b3f0e0a
camel
improved the camel-bam module to handle- concurrent processing better & dealing with JDBC/JPA exceptions if concurrent- updates to the same process occur at the same time; also added more better- tests--git-svn-id: https://svn.apache.org/repos/asf/activemq/camel/trunk@563909 13f79535-47bb-0310-9956-ffa450edef68-
p
https://github.com/apache/camel
diff --git a/components/camel-bam/pom.xml b/components/camel-bam/pom.xml index ca95a39352679..d590b452ff48c 100644 --- a/components/camel-bam/pom.xml +++ b/components/camel-bam/pom.xml @@ -42,35 +42,38 @@ <groupId>org.apache.camel</groupId> <artifactId>camel-spring</artifactId> </dependency> - - <dependency> - <groupId>org.springframework</groupId> - <artifactId>spring</artifactId> - </dependency> - - <dependency> - <groupId>commons-logging</groupId> - <artifactId>commons-logging-api</artifactId> - </dependency> - <dependency> <groupId>javax.persistence</groupId> <artifactId>persistence-api</artifactId> <version>1.0</version> </dependency> + <!-- testing --> <dependency> <groupId>org.apache.camel</groupId> <artifactId>camel-core</artifactId> <type>test-jar</type> - <optional>true</optional> <scope>test</scope> </dependency> <dependency> <groupId>org.apache.camel</groupId> <artifactId>camel-spring</artifactId> <type>test-jar</type> - <optional>true</optional> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.camel</groupId> + <artifactId>camel-juel</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>commons-logging</groupId> + <artifactId>commons-logging</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>log4j</groupId> + <artifactId>log4j</artifactId> <scope>test</scope> </dependency> <!-- @@ -88,12 +91,6 @@ </dependency> --> - <dependency> - <groupId>mysql</groupId> - <artifactId>mysql-connector-java</artifactId> - <version>5.0.5</version> - <scope>test</scope> - </dependency> <dependency> <groupId>junit</groupId> @@ -110,6 +107,7 @@ <configuration> <childDelegation>false</childDelegation> <useFile>true</useFile> + <forkMode>pertest</forkMode> <includes> <include>**/*Test.*</include> </includes> @@ -163,6 +161,41 @@ </dependency> </dependencies> </profile> + <profile> + <id>derby</id> + <build> + <testResources> + <testResource> + <directory>${basedir}/src/test/profiles/derby</directory> + </testResource> + <testResource> + <directory>${basedir}/src/test/resources</directory> + </testResource> + </testResources> + </build> + <dependencies> + <dependency> + <groupId>org.hibernate</groupId> + <artifactId>hibernate-entitymanager</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.hibernate</groupId> + <artifactId>hibernate</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.derby</groupId> + <artifactId>derby</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.geronimo.specs</groupId> + <artifactId>geronimo-jta_1.0.1B_spec</artifactId> + <scope>test</scope> + </dependency> + </dependencies> + </profile> <profile> <id>mysql</id> @@ -188,8 +221,9 @@ <scope>test</scope> </dependency> <dependency> - <groupId>hsqldb</groupId> - <artifactId>hsqldb</artifactId> + <groupId>mysql</groupId> + <artifactId>mysql-connector-java</artifactId> + <version>5.0.5</version> <scope>test</scope> </dependency> <dependency> diff --git a/components/camel-bam/src/main/java/org/apache/camel/bam/model/ActivityState.java b/components/camel-bam/src/main/java/org/apache/camel/bam/model/ActivityState.java index 2237b7412453d..b00d817dece2f 100644 --- a/components/camel-bam/src/main/java/org/apache/camel/bam/model/ActivityState.java +++ b/components/camel-bam/src/main/java/org/apache/camel/bam/model/ActivityState.java @@ -16,7 +16,9 @@ */ package org.apache.camel.bam.model; -import java.util.Date; +import org.apache.camel.bam.processor.ProcessContext; +import org.apache.camel.bam.rules.ActivityRules; +import org.apache.camel.util.ObjectHelper; import javax.persistence.CascadeType; import javax.persistence.Entity; @@ -26,10 +28,8 @@ import javax.persistence.ManyToOne; import javax.persistence.Temporal; import javax.persistence.TemporalType; - -import org.apache.camel.bam.processor.ProcessContext; -import org.apache.camel.bam.rules.ActivityRules; -import org.apache.camel.util.ObjectHelper; +import javax.persistence.Transient; +import java.util.Date; /** * The default state for a specific activity within a process @@ -56,7 +56,7 @@ public Long getId() { @Override public String toString() { - return "ActivityState[" + getId() + " " + getActivityDefinition() + "]"; + return "ActivityState[" + getId() + " on " + getProcessInstance() + " " + getActivityDefinition() + "]"; } public synchronized void processExchange(ActivityRules activityRules, ProcessContext context) throws Exception { @@ -147,6 +147,15 @@ public void setTimeCompleted(Date timeCompleted) { } } + @Transient + public String getCorrelationKey() { + ProcessInstance pi = getProcessInstance(); + if (pi == null) { + return null; + } + return pi.getCorrelationKey(); + } + // Implementation methods // ----------------------------------------------------------------------- diff --git a/components/camel-bam/src/main/java/org/apache/camel/bam/model/ProcessInstance.java b/components/camel-bam/src/main/java/org/apache/camel/bam/model/ProcessInstance.java index 3acc9b04ade76..a74913be344e5 100644 --- a/components/camel-bam/src/main/java/org/apache/camel/bam/model/ProcessInstance.java +++ b/components/camel-bam/src/main/java/org/apache/camel/bam/model/ProcessInstance.java @@ -19,15 +19,7 @@ import java.util.Collection; import java.util.Date; import java.util.HashSet; - -import javax.persistence.CascadeType; -import javax.persistence.Entity; -import javax.persistence.FetchType; -import javax.persistence.GeneratedValue; -import javax.persistence.Id; -import javax.persistence.ManyToOne; -import javax.persistence.OneToMany; -import javax.persistence.UniqueConstraint; +import javax.persistence.*; import org.apache.camel.bam.rules.ActivityRules; import org.apache.commons.logging.Log; @@ -39,27 +31,29 @@ * @version $Revision: $ */ @Entity -@UniqueConstraint(columnNames = {"correlationKey" }) -public class ProcessInstance extends TemporalEntity { +public class ProcessInstance { private static final transient Log LOG = LogFactory.getLog(ProcessInstance.class); private ProcessDefinition processDefinition; private Collection<ActivityState> activityStates = new HashSet<ActivityState>(); private String correlationKey; + private Date timeStarted; + private Date timeCompleted; public ProcessInstance() { setTimeStarted(new Date()); } public String toString() { - return getClass().getName() + "[id: " + getId() + ", key: " + getCorrelationKey() + "]"; + return "ProcessInstance[" + getCorrelationKey() + "]"; } - // This crap is required to work around a bug in hibernate - @Override @Id - @GeneratedValue - public Long getId() { - return super.getId(); + public String getCorrelationKey() { + return correlationKey; + } + + public void setCorrelationKey(String correlationKey) { + this.correlationKey = correlationKey; } @ManyToOne(fetch = FetchType.LAZY, cascade = {CascadeType.PERSIST }) @@ -80,15 +74,34 @@ public void setActivityStates(Collection<ActivityState> activityStates) { this.activityStates = activityStates; } - public String getCorrelationKey() { - return correlationKey; + + @Transient + public boolean isStarted() { + return timeStarted != null; } - public void setCorrelationKey(String correlationKey) { - this.correlationKey = correlationKey; + @Transient + public boolean isCompleted() { + return timeCompleted != null; + } + + @Temporal(TemporalType.TIME) + public Date getTimeStarted() { + return timeStarted; + } + + public void setTimeStarted(Date timeStarted) { + this.timeStarted = timeStarted; + } + + @Temporal(TemporalType.TIME) + public Date getTimeCompleted() { + return timeCompleted; } - // Helper methods + public void setTimeCompleted(Date timeCompleted) { + this.timeCompleted = timeCompleted; + } // Helper methods //------------------------------------------------------------------------- /** diff --git a/components/camel-bam/src/main/java/org/apache/camel/bam/processor/BamProcessorSupport.java b/components/camel-bam/src/main/java/org/apache/camel/bam/processor/BamProcessorSupport.java index 4a5acf317efff..ce221ab4d58a8 100644 --- a/components/camel-bam/src/main/java/org/apache/camel/bam/processor/BamProcessorSupport.java +++ b/components/camel-bam/src/main/java/org/apache/camel/bam/processor/BamProcessorSupport.java @@ -16,25 +16,27 @@ */ package org.apache.camel.bam.processor; -import java.lang.reflect.ParameterizedType; -import java.lang.reflect.Type; - import org.apache.camel.Exchange; import org.apache.camel.Expression; import org.apache.camel.Processor; import org.apache.camel.RuntimeCamelException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; - +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.orm.jpa.JpaSystemException; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.TransactionCallback; import org.springframework.transaction.support.TransactionTemplate; +import javax.persistence.EntityExistsException; +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; + /** * A base {@link Processor} for working on <a * href="http://activemq.apache.org/camel/bam.html">BAM</a> which a derived * class would do the actual persistence such as the {@link JpaBamProcessor} - * + * * @version $Revision: $ */ public abstract class BamProcessorSupport<T> implements Processor { @@ -42,6 +44,15 @@ public abstract class BamProcessorSupport<T> implements Processor { private Class<T> entityType; private Expression<Exchange> correlationKeyExpression; private TransactionTemplate transactionTemplate; + private int maximumRetries = 30; + + public int getMaximumRetries() { + return maximumRetries; + } + + public void setMaximumRetries(int maximumRetries) { + this.maximumRetries = maximumRetries; + } protected BamProcessorSupport(TransactionTemplate transactionTemplate, Expression<Exchange> correlationKeyExpression) { this.transactionTemplate = transactionTemplate; @@ -49,12 +60,12 @@ protected BamProcessorSupport(TransactionTemplate transactionTemplate, Expressio Type type = getClass().getGenericSuperclass(); if (type instanceof ParameterizedType) { - ParameterizedType parameterizedType = (ParameterizedType)type; + ParameterizedType parameterizedType = (ParameterizedType) type; Type[] arguments = parameterizedType.getActualTypeArguments(); if (arguments.length > 0) { Type argumentType = arguments[0]; if (argumentType instanceof Class) { - this.entityType = (Class<T>)argumentType; + this.entityType = (Class<T>) argumentType; } } } @@ -70,27 +81,46 @@ protected BamProcessorSupport(TransactionTemplate transactionTemplate, Expressio } public void process(final Exchange exchange) { - transactionTemplate.execute(new TransactionCallback() { - public Object doInTransaction(TransactionStatus status) { - try { - Object key = getCorrelationKey(exchange); + Object entity = null; + for (int i = 0; entity == null && i < maximumRetries; i++) { + if (i > 0) { + LOG.info("Retry attempt due to duplicate row: " + i); + } + entity = transactionTemplate.execute(new TransactionCallback() { + public Object doInTransaction(TransactionStatus status) { + try { + Object key = getCorrelationKey(exchange); - T entity = loadEntity(exchange, key); + T entity = loadEntity(exchange, key); - if (LOG.isDebugEnabled()) { - LOG.debug("Correlation key: " + key + " with entity: " + entity); - } - processEntity(exchange, entity); + if (LOG.isDebugEnabled()) { + LOG.debug("Correlation key: " + key + " with entity: " + entity); + } + processEntity(exchange, entity); - return entity; - } catch (Exception e) { - if (LOG.isDebugEnabled()) { - LOG.debug("Caught: " + e, e); + return entity; + } + catch (JpaSystemException e) { + if (LOG.isDebugEnabled()) { + LOG.debug("Likely exception is due to duplicate row in concurrent setting: " + e, e); + } + LOG.info("Attempt to insert duplicate row due to concurrency issue, so retrying: " + e); + return retryDueToDuplicate(status); + } + catch (DataIntegrityViolationException e) { + Throwable throwable = e.getCause(); + if (throwable instanceof EntityExistsException) { + LOG.info("Attempt to insert duplicate row due to concurrency issue, so retrying: " + throwable); + return retryDueToDuplicate(status); + } + return onError(status, throwable); + } + catch (Throwable e) { + return onError(status, e); } - throw new RuntimeCamelException(e); } - } - }); + }); + } } // Properties @@ -116,4 +146,15 @@ protected Object getCorrelationKey(Exchange exchange) throws NoCorrelationKeyExc } return value; } + + protected Object retryDueToDuplicate(TransactionStatus status) { + status.setRollbackOnly(); + return null; + } + + protected Object onError(TransactionStatus status, Throwable e) { + status.setRollbackOnly(); + LOG.error("Caught: " + e, e); + throw new RuntimeCamelException(e); + } } diff --git a/components/camel-bam/src/main/java/org/apache/camel/bam/processor/JpaBamProcessor.java b/components/camel-bam/src/main/java/org/apache/camel/bam/processor/JpaBamProcessor.java index 84b249e01ebf2..09026f19d585c 100644 --- a/components/camel-bam/src/main/java/org/apache/camel/bam/processor/JpaBamProcessor.java +++ b/components/camel-bam/src/main/java/org/apache/camel/bam/processor/JpaBamProcessor.java @@ -18,12 +18,12 @@ import org.apache.camel.Exchange; import org.apache.camel.Expression; +import org.apache.camel.Processor; import org.apache.camel.bam.model.ActivityState; import org.apache.camel.bam.model.ProcessInstance; import org.apache.camel.bam.rules.ActivityRules; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; - import org.springframework.orm.jpa.JpaTemplate; import org.springframework.transaction.support.TransactionTemplate; diff --git a/components/camel-bam/src/main/java/org/apache/camel/bam/processor/JpaBamProcessorSupport.java b/components/camel-bam/src/main/java/org/apache/camel/bam/processor/JpaBamProcessorSupport.java index 15d9c5f987348..281f5815351ae 100644 --- a/components/camel-bam/src/main/java/org/apache/camel/bam/processor/JpaBamProcessorSupport.java +++ b/components/camel-bam/src/main/java/org/apache/camel/bam/processor/JpaBamProcessorSupport.java @@ -16,30 +16,34 @@ */ package org.apache.camel.bam.processor; -import java.util.List; - import org.apache.camel.Exchange; import org.apache.camel.Expression; import org.apache.camel.Processor; import org.apache.camel.bam.model.ProcessDefinition; import org.apache.camel.bam.rules.ActivityRules; import org.apache.camel.util.IntrospectionSupport; - +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.springframework.orm.jpa.JpaTemplate; import org.springframework.transaction.support.TransactionTemplate; +import java.util.List; + /** * A base class for JPA based BAM which can use any entity to store the process * instance information which allows derived classes to specialise the process * instance entity. - * + * * @version $Revision: $ */ public class JpaBamProcessorSupport<T> extends BamProcessorSupport<T> { + private static final transient Log LOG = LogFactory.getLog(JpaBamProcessorSupport.class); + private ActivityRules activityRules; private JpaTemplate template; private String findByKeyQuery; private String keyPropertyName = "correlationKey"; + private boolean correlationKeyIsPrimary = true; public JpaBamProcessorSupport(TransactionTemplate transactionTemplate, JpaTemplate template, Expression<Exchange> correlationKeyExpression, ActivityRules activityRules, Class<T> entitytype) { super(transactionTemplate, correlationKeyExpression, entitytype); @@ -88,24 +92,48 @@ public void setTemplate(JpaTemplate template) { this.template = template; } + public boolean isCorrelationKeyIsPrimary() { + return correlationKeyIsPrimary; + } + + public void setCorrelationKeyIsPrimary(boolean correlationKeyIsPrimary) { + this.correlationKeyIsPrimary = correlationKeyIsPrimary; + } + // Implementatiom methods // ----------------------------------------------------------------------- protected T loadEntity(Exchange exchange, Object key) { - List<T> list = template.find(getFindByKeyQuery(), key); - T entity = null; - if (!list.isEmpty()) { - entity = list.get(0); - } + T entity = findEntityByCorrelationKey(key); if (entity == null) { entity = createEntity(exchange, key); setKeyProperty(entity, key); ProcessDefinition definition = ProcessDefinition.getRefreshedProcessDefinition(template, getActivityRules().getProcessRules().getProcessDefinition()); setProcessDefinitionProperty(entity, definition); template.persist(entity); + + // Now we must flush to avoid concurrent updates clashing trying to insert the + // same row + LOG.debug("About to flush on entity: " + entity + " with key: " + key); + template.flush(); } return entity; } + protected T findEntityByCorrelationKey(Object key) { + if (isCorrelationKeyIsPrimary()) { + return template.find(getEntityType(), key); + } + else { + List<T> list = template.find(getFindByKeyQuery(), key); + if (list.isEmpty()) { + return null; + } + else { + return list.get(0); + } + } + } + /** * Sets the key property on the new entity */ @@ -121,14 +149,15 @@ protected void setProcessDefinitionProperty(T entity, ProcessDefinition processD * Create a new instance of the entity for the given key */ protected T createEntity(Exchange exchange, Object key) { - return (T)exchange.getContext().getInjector().newInstance(getEntityType()); + return (T) exchange.getContext().getInjector().newInstance(getEntityType()); } protected void processEntity(Exchange exchange, T entity) throws Exception { if (entity instanceof Processor) { - Processor processor = (Processor)entity; + Processor processor = (Processor) entity; processor.process(exchange); - } else { + } + else { // TODO add other extension points - eg. passing in Activity throw new IllegalArgumentException("No processor defined for this route"); } diff --git a/components/camel-bam/src/main/java/org/apache/camel/bam/rules/ActivityRules.java b/components/camel-bam/src/main/java/org/apache/camel/bam/rules/ActivityRules.java index 2373fa0dbde6a..b0118e593268e 100644 --- a/components/camel-bam/src/main/java/org/apache/camel/bam/rules/ActivityRules.java +++ b/components/camel-bam/src/main/java/org/apache/camel/bam/rules/ActivityRules.java @@ -74,16 +74,6 @@ public void processExchange(Exchange exchange, ProcessInstance process) { public ActivityDefinition getActivityDefinition() { // lets always query for it, to avoid issues with refreshing before a commit etc return builder.findOrCreateActivityDefinition(activityName); -/* - if (activityDefinition == null) { - activityDefinition = builder.findOrCreateActivityDefinition(activityName); - } - else { - // lets refresh it - builder.getJpaTemplate().refresh(activityDefinition); - } - return activityDefinition; -*/ } public void setActivityDefinition(ActivityDefinition activityDefinition) { diff --git a/components/camel-bam/src/test/java/org/apache/camel/bam/BamRouteTest.java b/components/camel-bam/src/test/java/org/apache/camel/bam/BamRouteTest.java index 0f6c2514a7cf3..34fc2d36aa12e 100644 --- a/components/camel-bam/src/test/java/org/apache/camel/bam/BamRouteTest.java +++ b/components/camel-bam/src/test/java/org/apache/camel/bam/BamRouteTest.java @@ -17,28 +17,26 @@ package org.apache.camel.bam; import org.apache.camel.builder.RouteBuilder; +import static org.apache.camel.builder.xml.XPathBuilder.xpath; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.spring.SpringTestSupport; - +import static org.apache.camel.util.Time.seconds; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.orm.jpa.JpaTemplate; import org.springframework.transaction.support.TransactionTemplate; -import static org.apache.camel.builder.xml.XPathBuilder.xpath; -import static org.apache.camel.util.Time.seconds; - /** * @version $Revision: $ */ public class BamRouteTest extends SpringTestSupport { + protected MockEndpoint overdueEndpoint; - public void testSendingToFirstActivityOnlyResultsInOverdueMessage() throws Exception { - MockEndpoint overdueEndpoint = resolveMandatoryEndpoint("mock:overdue", MockEndpoint.class); + public void testBam() throws Exception { overdueEndpoint.expectedMessageCount(1); template.sendBody("direct:a", "<hello id='123'>world!</hello>"); - overdueEndpoint.assertIsSatisfied(5000); + overdueEndpoint.assertIsSatisfied(); } protected ClassPathXmlApplicationContext createApplicationContext() { @@ -48,7 +46,11 @@ protected ClassPathXmlApplicationContext createApplicationContext() { @Override protected void setUp() throws Exception { super.setUp(); + camelContext.addRoutes(createRouteBuilder()); + + overdueEndpoint = resolveMandatoryEndpoint("mock:overdue", MockEndpoint.class); + overdueEndpoint.setDefaulResultWaitMillis(8000); } protected RouteBuilder createRouteBuilder() throws Exception { diff --git a/components/camel-bam/src/test/java/org/apache/camel/bam/MultipleActivitiesConcurrentlyTest.java b/components/camel-bam/src/test/java/org/apache/camel/bam/MultipleActivitiesConcurrentlyTest.java new file mode 100644 index 0000000000000..8714dc1f29dd4 --- /dev/null +++ b/components/camel-bam/src/test/java/org/apache/camel/bam/MultipleActivitiesConcurrentlyTest.java @@ -0,0 +1,43 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.camel.bam; + +import static org.apache.camel.language.juel.JuelExpression.el; + +/** + * @version $Revision: 1.1 $ + */ +public class MultipleActivitiesConcurrentlyTest extends MultipleProcessesTest { + + @Override + public void testBam() throws Exception { + overdueEndpoint.expectedMessageCount(1); + overdueEndpoint.message(0).predicate(el("${in.body.correlationKey == '124'}")); + + Thread thread = new Thread("B sender") { + public void run() { + sendBMessages(); + } + }; + thread.start(); + + sendAMessages(); + + overdueEndpoint.assertIsSatisfied(); + } +} diff --git a/components/camel-bam/src/test/java/org/apache/camel/bam/MultipleProcessesTest.java b/components/camel-bam/src/test/java/org/apache/camel/bam/MultipleProcessesTest.java new file mode 100644 index 0000000000000..c9cbf4456933f --- /dev/null +++ b/components/camel-bam/src/test/java/org/apache/camel/bam/MultipleProcessesTest.java @@ -0,0 +1,48 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.camel.bam; + +import static org.apache.camel.language.juel.JuelExpression.el; + +/** + * @version $Revision: 1.1 $ + */ +public class MultipleProcessesTest extends BamRouteTest { + + @Override + public void testBam() throws Exception { + overdueEndpoint.expectedMessageCount(1); + overdueEndpoint.message(0).predicate(el("${in.body.correlationKey == '124'}")); + + sendAMessages(); + sendBMessages(); + + overdueEndpoint.assertIsSatisfied(); + } + + protected void sendAMessages() { + template.sendBody("direct:a", "<hello id='123'>A</hello>"); + template.sendBody("direct:a", "<hello id='124'>B</hello>"); + template.sendBody("direct:a", "<hello id='125'>C</hello>"); + } + + protected void sendBMessages() { + template.sendBody("direct:b", "<hello id='123'>A</hello>"); + template.sendBody("direct:b", "<hello id='125'>C</hello>"); + } +} diff --git a/components/camel-bam/src/test/profiles/derby/META-INF/persistence.xml b/components/camel-bam/src/test/profiles/derby/META-INF/persistence.xml new file mode 100644 index 0000000000000..1ee96e339587a --- /dev/null +++ b/components/camel-bam/src/test/profiles/derby/META-INF/persistence.xml @@ -0,0 +1,35 @@ +<?xml version="1.0" encoding="UTF-8"?> +<!-- + Copyright 2006 The Apache Software Foundation. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> +<persistence xmlns="http://java.sun.com/xml/ns/persistence" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + version="1.0"> + + <persistence-unit name="bam" transaction-type="RESOURCE_LOCAL"> + <class>org.apache.camel.bam.model.ActivityDefinition</class> + <class>org.apache.camel.bam.model.ActivityState</class> + <class>org.apache.camel.bam.model.ProcessDefinition</class> + <class>org.apache.camel.bam.model.ProcessInstance</class> + + <properties> + <property name="hibernate.dialect" value="org.hibernate.dialect.DerbyDialect"/> + <property name="hibernate.connection.driver_class" value="org.apache.derby.jdbc.EmbeddedDriver"/> + <property name="hibernate.connection.url" value="jdbc:derby:target/test/database;create=true"/> + <property name="hibernate.hbm2ddl.auto" value="create"/> + </properties> + + </persistence-unit> +</persistence> diff --git a/components/camel-bam/src/test/profiles/mysql/META-INF/persistence.xml b/components/camel-bam/src/test/profiles/mysql/META-INF/persistence.xml index ad93d7cbd1b1d..5b76d5d3cab1c 100644 --- a/components/camel-bam/src/test/profiles/mysql/META-INF/persistence.xml +++ b/components/camel-bam/src/test/profiles/mysql/META-INF/persistence.xml @@ -28,6 +28,11 @@ <property name="hibernate.dialect" value="org.hibernate.dialect.MySQLDialect"/> <property name="hibernate.connection.driver_class" value="com.mysql.jdbc.Driver"/> <property name="hibernate.connection.url" value="jdbc:mysql://localhost/camel_bam?relaxAutoCommit=true"/> + + <!-- TODO change to correct user --> + <property name="hibernate.connection.username" value="jstrachan"/> + <!--<property name="hibernate.connection.password" value=""/>--> + <property name="hibernate.hbm2ddl.auto" value="create"/> </properties> diff --git a/components/camel-bam/src/test/resources/log4j.properties b/components/camel-bam/src/test/resources/log4j.properties index 0fe69a1406688..7aab19225406e 100644 --- a/components/camel-bam/src/test/resources/log4j.properties +++ b/components/camel-bam/src/test/resources/log4j.properties @@ -18,12 +18,13 @@ # # The logging properties used for eclipse testing, We want to see debug output on the console. # -log4j.rootLogger=INFO, out +log4j.rootLogger=DEBUG, out -#log4j.logger.org.apache.activemq=DEBUG +# uncomment the next line to debug Camel log4j.logger.org.apache.camel=DEBUG -log4j.logger.org.springframework=WARN -log4j.logger.org.hibernate=WARN + +#log4j.logger.org.springframework=WARN +#log4j.logger.org.hibernate=WARN # CONSOLE appender not used by default log4j.appender.out=org.apache.log4j.ConsoleAppender diff --git a/examples/camel-example-bam/src/main/resources/META-INF/persistence.xml b/examples/camel-example-bam/src/main/resources/META-INF/persistence.xml index ace49a9c84cc6..e60477e5471e7 100644 --- a/examples/camel-example-bam/src/main/resources/META-INF/persistence.xml +++ b/examples/camel-example-bam/src/main/resources/META-INF/persistence.xml @@ -31,6 +31,12 @@ <property name="hibernate.connection.password" value=""/> <property name="hibernate.connection.url" value="jdbc:hsqldb:mem:camel_bam"/> <property name="hibernate.hbm2ddl.auto" value="create"/> + + <!-- debugging flags --> +<!-- + <property name="hibernate.show_sql" value="true"/> + <property name="hibernate.format_sql" value="true"/> +--> </properties> </persistence-unit>
24f734d135e137229294f4478b2aba251b4184d3
camel
Fix for MulticastStreamCachingTest after- interceptor changes--git-svn-id: https://svn.apache.org/repos/asf/activemq/camel/trunk@658260 13f79535-47bb-0310-9956-ffa450edef68-
c
https://github.com/apache/camel
diff --git a/camel-core/pom.xml b/camel-core/pom.xml index e33f3ca66bf92..bace8f7a95560 100755 --- a/camel-core/pom.xml +++ b/camel-core/pom.xml @@ -114,7 +114,6 @@ <excludes> <!-- TODO FIXME ASAP --> <exclude>**/InterceptorLogTest.*</exclude> - <exclude>**/MulticastStreamCachingTest.*</exclude> </excludes> </configuration> </plugin> diff --git a/camel-core/src/main/java/org/apache/camel/model/MulticastType.java b/camel-core/src/main/java/org/apache/camel/model/MulticastType.java index 98cc1360a6492..4acfd1b4bae4f 100644 --- a/camel-core/src/main/java/org/apache/camel/model/MulticastType.java +++ b/camel-core/src/main/java/org/apache/camel/model/MulticastType.java @@ -30,6 +30,7 @@ import org.apache.camel.processor.MulticastProcessor; import org.apache.camel.processor.aggregate.AggregationStrategy; import org.apache.camel.processor.aggregate.UseLatestAggregationStrategy; +import org.apache.camel.processor.interceptor.StreamCachingInterceptor; /** * @version $Revision$ @@ -88,6 +89,6 @@ public void setThreadPoolExecutor(ThreadPoolExecutor executor) { @Override protected Processor wrapProcessorInInterceptors(RouteContext routeContext, Processor target) throws Exception { // No need to wrap me in interceptors as they are all applied directly to my children - return target; + return new StreamCachingInterceptor(target); } } \ No newline at end of file diff --git a/camel-core/src/test/java/org/apache/camel/processor/MulticastStreamCachingTest.java b/camel-core/src/test/java/org/apache/camel/processor/MulticastStreamCachingTest.java index 82d9c9743cbec..7364e24cc7e98 100644 --- a/camel-core/src/test/java/org/apache/camel/processor/MulticastStreamCachingTest.java +++ b/camel-core/src/test/java/org/apache/camel/processor/MulticastStreamCachingTest.java @@ -76,7 +76,7 @@ public void process(Exchange exchange) { return new RouteBuilder() { public void configure() { //stream caching should fix re-readability issues when multicasting messags - from("direct:a").streamCaching().multicast().to("direct:x", "direct:y", "direct:z"); + from("direct:a").multicast().to("direct:x", "direct:y", "direct:z"); from("direct:x").process(processor).to("mock:x"); from("direct:y").process(processor).to("mock:y");
72e6b0f964e7cb14e3b907b3022cd259e1994290
intellij-community
diff: allow to highlight changes without border--
a
https://github.com/JetBrains/intellij-community
diff --git a/platform/diff-impl/src/com/intellij/diff/util/DiffDividerDrawUtil.java b/platform/diff-impl/src/com/intellij/diff/util/DiffDividerDrawUtil.java index b6db2850a8853..4b4e150d312a4 100644 --- a/platform/diff-impl/src/com/intellij/diff/util/DiffDividerDrawUtil.java +++ b/platform/diff-impl/src/com/intellij/diff/util/DiffDividerDrawUtil.java @@ -62,13 +62,7 @@ public static void paintPolygons(@NotNull Graphics2D gg, @NotNull Editor editor1, @NotNull Editor editor2, @NotNull DividerPaintable paintable) { - List<DividerPolygon> polygons = createVisiblePolygons(editor1, editor2, paintable); - - GraphicsConfig config = GraphicsUtil.setupAAPainting(gg); - for (DividerPolygon polygon : polygons) { - polygon.paint(gg, width); - } - config.restore(); + paintPolygons(gg, width, true, true, editor1, editor2, paintable); } public static void paintSimplePolygons(@NotNull Graphics2D gg, @@ -76,11 +70,21 @@ public static void paintSimplePolygons(@NotNull Graphics2D gg, @NotNull Editor editor1, @NotNull Editor editor2, @NotNull DividerPaintable paintable) { + paintPolygons(gg, width, true, false, editor1, editor2, paintable); + } + + public static void paintPolygons(@NotNull Graphics2D gg, + int width, + boolean paintBorder, + boolean curved, + @NotNull Editor editor1, + @NotNull Editor editor2, + @NotNull DividerPaintable paintable) { List<DividerPolygon> polygons = createVisiblePolygons(editor1, editor2, paintable); GraphicsConfig config = GraphicsUtil.setupAAPainting(gg); for (DividerPolygon polygon : polygons) { - polygon.paintSimple(gg, width); + polygon.paint(gg, width, paintBorder, curved); } config.restore(); } @@ -228,13 +232,15 @@ public DividerPolygon(int start1, int start2, int end1, int end2, @NotNull Color myColor = color; } - private void paint(Graphics2D g, int width) { + private void paint(Graphics2D g, int width, boolean paintBorder, boolean curve) { + Color borderColor = paintBorder ? DiffDrawUtil.getFramingColor(myColor) : myColor; // we need this shift, because editor background highlight is painted in range "Y(line) - 1 .. Y(line + 1) - 1" - DiffDrawUtil.drawCurveTrapezium(g, 0, width, myStart1 - 1, myEnd1 - 1, myStart2 - 1, myEnd2 - 1, myColor); - } - - private void paintSimple(Graphics2D g, int width) { - DiffDrawUtil.drawTrapezium(g, 0, width, myStart1 - 1, myEnd1 - 1, myStart2 - 1, myEnd2 - 1, myColor); + if (curve) { + DiffDrawUtil.drawCurveTrapezium(g, 0, width, myStart1 - 1, myEnd1 - 1, myStart2 - 1, myEnd2 - 1, myColor, borderColor); + } + else { + DiffDrawUtil.drawTrapezium(g, 0, width, myStart1 - 1, myEnd1 - 1, myStart2 - 1, myEnd2 - 1, myColor, borderColor); + } } private void paintOnScrollbar(Graphics2D g, int width) { diff --git a/platform/diff-impl/src/com/intellij/diff/util/DiffDrawUtil.java b/platform/diff-impl/src/com/intellij/diff/util/DiffDrawUtil.java index 57d26dbc4f242..f677ea9d4ae55 100644 --- a/platform/diff-impl/src/com/intellij/diff/util/DiffDrawUtil.java +++ b/platform/diff-impl/src/com/intellij/diff/util/DiffDrawUtil.java @@ -70,15 +70,28 @@ public static void drawTrapezium(@NotNull Graphics2D g, int start1, int end1, int start2, int end2, @NotNull Color color) { + drawTrapezium(g, x1, x2, start1, end1, start2, end2, color, getFramingColor(color)); + } + + public static void drawTrapezium(@NotNull Graphics2D g, + int x1, int x2, + int start1, int end1, + int start2, int end2, + @Nullable Color fillColor, + @Nullable Color borderColor) { final int[] xPoints = new int[]{x1, x2, x2, x1}; final int[] yPoints = new int[]{start1, start2, end2, end1}; - g.setColor(color); - g.fillPolygon(xPoints, yPoints, xPoints.length); + if (fillColor != null) { + g.setColor(fillColor); + g.fillPolygon(xPoints, yPoints, xPoints.length); + } - g.setColor(getFramingColor(color)); - g.drawLine(x1, start1, x2, start2); - g.drawLine(x1, end1, x2, end2); + if (borderColor != null) { + g.setColor(borderColor); + g.drawLine(x1, start1, x2, start2); + g.drawLine(x1, end1, x2, end2); + } } public static void drawCurveTrapezium(@NotNull Graphics2D g,
52bd53f5ea5eec84818d65b40e81d0a82ada6ba8
ReactiveX-RxJava
Restructure into smaller files--
p
https://github.com/ReactiveX/RxJava
diff --git a/.classpath b/.classpath deleted file mode 100644 index b1ae8bae1c..0000000000 --- a/.classpath +++ /dev/null @@ -1,9 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<classpath> - <classpathentry exported="true" kind="con" path="GROOVY_DSL_SUPPORT"/> - <classpathentry kind="con" path="GROOVY_SUPPORT"/> - <classpathentry exported="true" kind="con" path="com.springsource.sts.gradle.classpathcontainer"/> - <classpathentry kind="con" path="com.springsource.sts.gradle.dsld.classpathcontainer"/> - <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/> - <classpathentry kind="output" path="bin"/> -</classpath> diff --git a/.gitignore b/.gitignore index 618e741f86..313af3cb82 100644 --- a/.gitignore +++ b/.gitignore @@ -38,3 +38,26 @@ Thumbs.db # Gradle Files # ################ .gradle + +# Build output directies +/target +*/target +/build +*/build +# +# # IntelliJ specific files/directories +out +.idea +*.ipr +*.iws +*.iml +atlassian-ide-plugin.xml + +# Eclipse specific files/directories +.classpath +.project +.settings +.metadata + +# NetBeans specific files/directories +.nbattrs diff --git a/.project b/.project deleted file mode 100644 index f2d845e45a..0000000000 --- a/.project +++ /dev/null @@ -1,39 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<projectDescription> - <name>gradle-template</name> - <comment></comment> - <projects> - </projects> - <buildSpec> - <buildCommand> - <name>org.eclipse.jdt.core.javabuilder</name> - <arguments> - </arguments> - </buildCommand> - </buildSpec> - <natures> - <nature>com.springsource.sts.gradle.core.nature</nature> - <nature>org.eclipse.jdt.core.javanature</nature> - <nature>org.eclipse.jdt.groovy.core.groovyNature</nature> - </natures> - <filteredResources> - <filter> - <id>1332049227118</id> - <name></name> - <type>10</type> - <matcher> - <id>org.eclipse.ui.ide.orFilterMatcher</id> - <arguments> - <matcher> - <id>org.eclipse.ui.ide.multiFilter</id> - <arguments>1.0-projectRelativePath-equals-true-false-template-server</arguments> - </matcher> - <matcher> - <id>org.eclipse.ui.ide.multiFilter</id> - <arguments>1.0-projectRelativePath-equals-true-false-template-client</arguments> - </matcher> - </arguments> - </matcher> - </filter> - </filteredResources> -</projectDescription> diff --git a/.settings/gradle/com.springsource.sts.gradle.core.import.prefs b/.settings/gradle/com.springsource.sts.gradle.core.import.prefs deleted file mode 100644 index e86c91081f..0000000000 --- a/.settings/gradle/com.springsource.sts.gradle.core.import.prefs +++ /dev/null @@ -1,9 +0,0 @@ -#com.springsource.sts.gradle.core.preferences.GradleImportPreferences -#Sat Mar 17 22:40:13 PDT 2012 -enableAfterTasks=true -afterTasks=afterEclipseImport; -enableDependendencyManagement=true -enableBeforeTasks=true -projects=;template-client;template-server; -enableDSLD=true -beforeTasks=cleanEclipse;eclipse; diff --git a/.settings/gradle/com.springsource.sts.gradle.core.prefs b/.settings/gradle/com.springsource.sts.gradle.core.prefs deleted file mode 100644 index 445ff6da6f..0000000000 --- a/.settings/gradle/com.springsource.sts.gradle.core.prefs +++ /dev/null @@ -1,4 +0,0 @@ -#com.springsource.sts.gradle.core.preferences.GradleProjectPreferences -#Sat Mar 17 22:40:29 PDT 2012 -com.springsource.sts.gradle.rootprojectloc= -com.springsource.sts.gradle.linkedresources= diff --git a/.settings/gradle/com.springsource.sts.gradle.refresh.prefs b/.settings/gradle/com.springsource.sts.gradle.refresh.prefs deleted file mode 100644 index 01e59693e7..0000000000 --- a/.settings/gradle/com.springsource.sts.gradle.refresh.prefs +++ /dev/null @@ -1,9 +0,0 @@ -#com.springsource.sts.gradle.core.actions.GradleRefreshPreferences -#Sat Mar 17 22:40:27 PDT 2012 -enableAfterTasks=true -afterTasks=afterEclipseImport; -useHierarchicalNames=false -enableBeforeTasks=true -addResourceFilters=true -enableDSLD=true -beforeTasks=cleanEclipse;eclipse; diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000..7f8ced0d1f --- /dev/null +++ b/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2012 Netflix, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/build.gradle b/build.gradle index 5297034a51..9eef3329e1 100644 --- a/build.gradle +++ b/build.gradle @@ -5,18 +5,16 @@ ext.githubProjectName = rootProject.name // TEMPLATE: change to match github pro apply from: file('gradle/convention.gradle') apply from: file('gradle/maven.gradle') apply from: file('gradle/check.gradle') +apply from: file('gradle/license.gradle') -subprojects -{ - group = 'com.netflix' +subprojects { + group = 'com.netflix.osstemplate' // TEMPLATE: Set to organization of project - repositories - { + repositories { mavenCentral() } - dependencies - { + dependencies { compile 'javax.ws.rs:jsr311-api:1.1.1' compile 'com.sun.jersey:jersey-core:1.11' testCompile 'org.testng:testng:6.1.1' @@ -24,21 +22,17 @@ subprojects } } -project(':template-client') -{ - dependencies - { +project(':template-client') { + dependencies { compile 'org.slf4j:slf4j-api:1.6.3' compile 'com.sun.jersey:jersey-client:1.11' } } -project(':template-server') -{ +project(':template-server') { apply plugin: 'war' apply plugin: 'jetty' - dependencies - { + dependencies { compile 'com.sun.jersey:jersey-server:1.11' compile 'com.sun.jersey:jersey-servlet:1.11' compile project(':template-client') diff --git a/codequality/HEADER b/codequality/HEADER new file mode 100644 index 0000000000..b27b192925 --- /dev/null +++ b/codequality/HEADER @@ -0,0 +1,13 @@ + Copyright 2012 Netflix, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/codequality/checkstyle.xml b/codequality/checkstyle.xml index 3c8a8e6c75..481d2829fd 100644 --- a/codequality/checkstyle.xml +++ b/codequality/checkstyle.xml @@ -50,6 +50,7 @@ <property name="allowMissingReturnTag" value="true"/> <property name="allowThrowsTagsForSubclasses" value="true"/> <property name="allowUndeclaredRTE" value="true"/> + <property name="allowMissingPropertyJavadoc" value="true"/> </module> <module name="JavadocType"> <property name="scope" value="package"/> diff --git a/gradle/check.gradle b/gradle/check.gradle index cf6f0461ae..0f80516d45 100644 --- a/gradle/check.gradle +++ b/gradle/check.gradle @@ -9,8 +9,10 @@ subprojects { // FindBugs apply plugin: 'findbugs' + //tasks.withType(Findbugs) { reports.html.enabled true } // PMD apply plugin: 'pmd' + //tasks.withType(Pmd) { reports.html.enabled true } } diff --git a/gradle/license.gradle b/gradle/license.gradle new file mode 100644 index 0000000000..9d04830321 --- /dev/null +++ b/gradle/license.gradle @@ -0,0 +1,5 @@ +buildscript { + dependencies { classpath 'nl.javadude.gradle.plugins:license-gradle-plugin:0.4' } +} + +apply plugin: 'license' \ No newline at end of file diff --git a/gradle/local.gradle b/gradle/local.gradle new file mode 100644 index 0000000000..6f2d204b8a --- /dev/null +++ b/gradle/local.gradle @@ -0,0 +1 @@ +apply from: 'file://Users/jryan/Workspaces/jryan_build/Tools/nebula-boot/artifactory.gradle' diff --git a/gradle/maven.gradle b/gradle/maven.gradle index 8639564ce4..cb75dfb637 100644 --- a/gradle/maven.gradle +++ b/gradle/maven.gradle @@ -4,7 +4,7 @@ subprojects { apply plugin: 'signing' signing { - required rootProject.performingRelease + required { performingRelease && gradle.taskGraph.hasTask("uploadMavenCentral")} sign configurations.archives } diff --git a/template-client/bin/com/netflix/template/client/TalkClient.class b/template-client/bin/com/netflix/template/client/TalkClient.class deleted file mode 100644 index 90bbaeb353..0000000000 Binary files a/template-client/bin/com/netflix/template/client/TalkClient.class and /dev/null differ diff --git a/template-client/bin/com/netflix/template/common/Sentence.class b/template-client/bin/com/netflix/template/common/Sentence.class deleted file mode 100644 index 0083f33477..0000000000 Binary files a/template-client/bin/com/netflix/template/common/Sentence.class and /dev/null differ diff --git a/template-client/src/main/java/com/netflix/template/client/TalkClient.java b/template-client/src/main/java/com/netflix/template/client/TalkClient.java index c3ebf86090..fc9d20d33d 100644 --- a/template-client/src/main/java/com/netflix/template/client/TalkClient.java +++ b/template-client/src/main/java/com/netflix/template/client/TalkClient.java @@ -8,26 +8,42 @@ import javax.ws.rs.core.MediaType; +/** + * Delegates to remote TalkServer over REST. + * @author jryan + * + */ public class TalkClient implements Conversation { - WebResource webResource; + private WebResource webResource; - TalkClient(String location) { + /** + * Instantiate client. + * + * @param location URL to the base of resources, e.g. http://localhost:8080/template-server/rest + */ + public TalkClient(String location) { Client client = Client.create(); client.addFilter(new LoggingFilter(System.out)); webResource = client.resource(location + "/talk"); } + @Override public Sentence greeting() { Sentence s = webResource.accept(MediaType.APPLICATION_XML).get(Sentence.class); return s; } + @Override public Sentence farewell() { Sentence s = webResource.accept(MediaType.APPLICATION_XML).delete(Sentence.class); return s; } + /** + * Tests out client. + * @param args Not applicable + */ public static void main(String[] args) { TalkClient remote = new TalkClient("http://localhost:8080/template-server/rest"); System.out.println(remote.greeting().getWhole()); diff --git a/template-client/src/main/java/com/netflix/template/common/Conversation.java b/template-client/src/main/java/com/netflix/template/common/Conversation.java index b85e23e98b..c190f03bb7 100644 --- a/template-client/src/main/java/com/netflix/template/common/Conversation.java +++ b/template-client/src/main/java/com/netflix/template/common/Conversation.java @@ -1,6 +1,21 @@ package com.netflix.template.common; +/** + * Hold a conversation. + * @author jryan + * + */ public interface Conversation { + + /** + * Initiates a conversation. + * @return Sentence words from geeting + */ Sentence greeting(); + + /** + * End the conversation. + * @return + */ Sentence farewell(); -} \ No newline at end of file +} diff --git a/template-client/src/main/java/com/netflix/template/common/Sentence.java b/template-client/src/main/java/com/netflix/template/common/Sentence.java index bf561a6d5a..616f72efb0 100644 --- a/template-client/src/main/java/com/netflix/template/common/Sentence.java +++ b/template-client/src/main/java/com/netflix/template/common/Sentence.java @@ -3,17 +3,31 @@ import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; +/** + * Container for words going back and forth. + * @author jryan + * + */ @XmlRootElement public class Sentence { private String whole; + @SuppressWarnings("unused") private Sentence() { }; + /** + * Initialize sentence. + * @param whole + */ public Sentence(String whole) { this.whole = whole; } + /** + * whole getter. + * @return + */ @XmlElement public String getWhole() { return whole; @@ -22,4 +36,4 @@ public String getWhole() { public void setWhole(String whole) { this.whole = whole; } -} \ No newline at end of file +}
97a8657ae17bf9105a41d4930ee26fb2dbd1e3cc
orientdb
Supported new syntax in SQL UPDATE to use a inner- query as target. Example: update ( traverse V.in, E.out from V ) set date =- sysdate() where out.size() > 1--
a
https://github.com/orientechnologies/orientdb
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLDelete.java b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLDelete.java index 338f1613f4b..aac0e71c057 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLDelete.java +++ b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLDelete.java @@ -76,7 +76,7 @@ public OCommandExecutorSQLDelete parse(final OCommandRequest iRequest) { String subjectName = parserRequiredWord(false, "Syntax error", " =><,\r\n"); if (subjectName == null) - throwSyntaxErrorException("Invalid subject name. Expected cluster, class or index"); + throwSyntaxErrorException("Invalid subject name. Expected cluster, class, index or sub-query"); if (OStringParser.startsWithIgnoreCase(subjectName, OCommandExecutorSQLAbstract.INDEX_PREFIX)) { // INDEX diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLUpdate.java b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLUpdate.java index a0cb59f4a30..f5befe3c4ea 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLUpdate.java +++ b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLUpdate.java @@ -37,6 +37,7 @@ import com.orientechnologies.orient.core.query.OQuery; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.serialization.serializer.OStringSerializerHelper; +import com.orientechnologies.orient.core.sql.filter.OSQLFilter; import com.orientechnologies.orient.core.sql.filter.OSQLFilterItem; import com.orientechnologies.orient.core.sql.functions.OSQLFunctionRuntime; import com.orientechnologies.orient.core.sql.query.OSQLAsynchQuery; @@ -61,6 +62,7 @@ public class OCommandExecutorSQLUpdate extends OCommandExecutorSQLSetAware imple private Map<String, Number> incrementEntries = new LinkedHashMap<String, Number>(); private OQuery<?> query; + private OSQLFilter compiledFilter; private int recordCount = 0; private String subjectName; private static final Object EMPTY_VALUE = new Object(); @@ -82,9 +84,11 @@ public OCommandExecutorSQLUpdate parse(final OCommandRequest iRequest) { query = null; recordCount = 0; - parserRequiredKeyword("UPDATE"); + parserRequiredKeyword(KEYWORD_UPDATE); - subjectName = parserRequiredWord(true, "Invalid target"); + subjectName = parserRequiredWord(true, "Invalid target", " =><,\r\n"); + if (subjectName == null) + throwSyntaxErrorException("Invalid subject name. Expected cluster, class, index or sub-query"); parserNextWord(true); String word = parserGetLastWord(); @@ -116,7 +120,16 @@ else if (word.equals(KEYWORD_INCREMENT)) final String additionalStatement = parserGetLastWord(); - if (additionalStatement.equals(OCommandExecutorSQLAbstract.KEYWORD_WHERE) + if (subjectName.startsWith("(")) { + subjectName = subjectName.trim(); + query = database.command(new OSQLAsynchQuery<ODocument>(subjectName.substring(1, subjectName.length() - 1), this)); + + if (additionalStatement.equals(OCommandExecutorSQLAbstract.KEYWORD_WHERE) + || additionalStatement.equals(OCommandExecutorSQLAbstract.KEYWORD_LIMIT)) + compiledFilter = OSQLEngine.getInstance().parseCondition(parserText.substring(parserGetCurrentPosition()), getContext(), + KEYWORD_WHERE); + + } else if (additionalStatement.equals(OCommandExecutorSQLAbstract.KEYWORD_WHERE) || additionalStatement.equals(OCommandExecutorSQLAbstract.KEYWORD_LIMIT)) query = new OSQLAsynchQuery<ODocument>("select from " + subjectName + " " + additionalStatement + " " + parserText.substring(parserGetCurrentPosition()), this); @@ -149,6 +162,12 @@ public Object execute(final Map<Object, Object> iArgs) { public boolean result(final Object iRecord) { final ODocument record = (ODocument) iRecord; + if (compiledFilter != null) { + // ADDITIONAL FILTERING + if (!(Boolean) compiledFilter.evaluate(record, null, context)) + return false; + } + boolean recordUpdated = false; parameters.reset();
9a94fd278693e098837dddce222a732c1a57f532
ReactiveX-RxJava
CurrentThreadScheduler Memory Leak Fixed-MIME-Version: 1.0-Content-Type: text/plain; charset=UTF-8-Content-Transfer-Encoding: 8bit--- Current/Immediate/NewThread/Executor Schedulers are passing unit tests-- Current/NewThread/Executor Schedulers do not leak memory on the recursion test (Immediate can’t be used for recursion otherwise it stack overflows)-
c
https://github.com/ReactiveX/RxJava
diff --git a/rxjava-core/src/main/java/rx/schedulers/CurrentThreadScheduler.java b/rxjava-core/src/main/java/rx/schedulers/CurrentThreadScheduler.java index d1550a2422..67d66aa4aa 100644 --- a/rxjava-core/src/main/java/rx/schedulers/CurrentThreadScheduler.java +++ b/rxjava-core/src/main/java/rx/schedulers/CurrentThreadScheduler.java @@ -17,10 +17,12 @@ import java.util.PriorityQueue; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; import rx.Scheduler; import rx.Subscription; +import rx.subscriptions.CompositeSubscription; +import rx.subscriptions.MultipleAssignmentSubscription; import rx.util.functions.Func2; /** @@ -28,6 +30,7 @@ */ public class CurrentThreadScheduler extends Scheduler { private static final CurrentThreadScheduler INSTANCE = new CurrentThreadScheduler(); + private static final AtomicLong counter = new AtomicLong(0); public static CurrentThreadScheduler getInstance() { return INSTANCE; @@ -38,25 +41,27 @@ public static CurrentThreadScheduler getInstance() { /* package accessible for unit tests */CurrentThreadScheduler() { } - private final AtomicInteger counter = new AtomicInteger(0); - @Override public <T> Subscription schedule(T state, Func2<? super Scheduler, ? super T, ? extends Subscription> action) { + // immediately move to the InnerCurrentThreadScheduler + InnerCurrentThreadScheduler innerScheduler = new InnerCurrentThreadScheduler(); DiscardableAction<T> discardableAction = new DiscardableAction<T>(state, action); - enqueue(discardableAction, now()); - return discardableAction; + enqueue(innerScheduler, discardableAction, now()); + return innerScheduler; } @Override public <T> Subscription schedule(T state, Func2<? super Scheduler, ? super T, ? extends Subscription> action, long dueTime, TimeUnit unit) { long execTime = now() + unit.toMillis(dueTime); + // immediately move to the InnerCurrentThreadScheduler + InnerCurrentThreadScheduler innerScheduler = new InnerCurrentThreadScheduler(); DiscardableAction<T> discardableAction = new DiscardableAction<T>(state, new SleepingAction<T>(action, this, execTime)); - enqueue(discardableAction, execTime); + enqueue(innerScheduler, discardableAction, execTime); return discardableAction; } - private void enqueue(DiscardableAction<?> action, long execTime) { + private static void enqueue(Scheduler scheduler, DiscardableAction<?> action, long execTime) { PriorityQueue<TimedAction> queue = QUEUE.get(); boolean exec = queue == null; @@ -69,19 +74,50 @@ private void enqueue(DiscardableAction<?> action, long execTime) { if (exec) { while (!queue.isEmpty()) { - queue.poll().action.call(this); + queue.poll().action.call(scheduler); } QUEUE.set(null); } } + private static class InnerCurrentThreadScheduler extends Scheduler implements Subscription { + private final MultipleAssignmentSubscription childSubscription = new MultipleAssignmentSubscription(); + + @Override + public <T> Subscription schedule(T state, Func2<? super Scheduler, ? super T, ? extends Subscription> action) { + DiscardableAction<T> discardableAction = new DiscardableAction<T>(state, action); + childSubscription.set(discardableAction); + enqueue(this, discardableAction, now()); + return childSubscription; + } + + @Override + public <T> Subscription schedule(T state, Func2<? super Scheduler, ? super T, ? extends Subscription> action, long delayTime, TimeUnit unit) { + long execTime = now() + unit.toMillis(delayTime); + + DiscardableAction<T> discardableAction = new DiscardableAction<T>(state, action); + childSubscription.set(discardableAction); + enqueue(this, discardableAction, execTime); + return childSubscription; + } + + @Override + public void unsubscribe() { + childSubscription.unsubscribe(); + } + + } + + /** + * Use time to sort items so delayed actions are sorted to their appropriate position in the queue. + */ private static class TimedAction implements Comparable<TimedAction> { final DiscardableAction<?> action; final Long execTime; - final Integer count; // In case if time between enqueueing took less than 1ms + final Long count; // In case if time between enqueueing took less than 1ms - private TimedAction(DiscardableAction<?> action, Long execTime, Integer count) { + private TimedAction(DiscardableAction<?> action, Long execTime, Long count) { this.action = action; this.execTime = execTime; this.count = count; diff --git a/rxjava-core/src/test/java/rx/schedulers/AbstractSchedulerConcurrencyTests.java b/rxjava-core/src/test/java/rx/schedulers/AbstractSchedulerConcurrencyTests.java index 8fb00c6553..6602d5071d 100644 --- a/rxjava-core/src/test/java/rx/schedulers/AbstractSchedulerConcurrencyTests.java +++ b/rxjava-core/src/test/java/rx/schedulers/AbstractSchedulerConcurrencyTests.java @@ -8,13 +8,15 @@ import org.junit.Test; +import rx.Observable; +import rx.Observer; import rx.Scheduler; import rx.Subscription; import rx.operators.SafeObservableSubscription; -import rx.subscriptions.CompositeSubscription; import rx.subscriptions.Subscriptions; import rx.util.functions.Action0; import rx.util.functions.Action1; +import rx.util.functions.Func1; import rx.util.functions.Func2; /** @@ -26,6 +28,55 @@ */ public abstract class AbstractSchedulerConcurrencyTests extends AbstractSchedulerTests { + /** + * Bug report: https://github.com/Netflix/RxJava/issues/431 + */ + @Test + public final void testUnSubscribeForScheduler() throws InterruptedException { + final AtomicInteger countReceived = new AtomicInteger(); + final AtomicInteger countGenerated = new AtomicInteger(); + final SafeObservableSubscription s = new SafeObservableSubscription(); + final CountDownLatch latch = new CountDownLatch(1); + + s.wrap(Observable.interval(50, TimeUnit.MILLISECONDS) + .map(new Func1<Long, Long>() { + @Override + public Long call(Long aLong) { + countGenerated.incrementAndGet(); + return aLong; + } + }) + .subscribeOn(getScheduler()) + .observeOn(getScheduler()) + .subscribe(new Observer<Long>() { + @Override + public void onCompleted() { + System.out.println("--- completed"); + } + + @Override + public void onError(Throwable e) { + System.out.println("--- onError"); + } + + @Override + public void onNext(Long args) { + if (countReceived.incrementAndGet() == 2) { + s.unsubscribe(); + latch.countDown(); + } + System.out.println("==> Received " + args); + } + })); + + latch.await(1000, TimeUnit.MILLISECONDS); + + System.out.println("----------- it thinks it is finished ------------------ "); + Thread.sleep(100); + + assertEquals(2, countGenerated.get()); + } + @Test public void testUnsubscribeRecursiveScheduleWithStateAndFunc2() throws InterruptedException { final CountDownLatch latch = new CountDownLatch(1); diff --git a/rxjava-core/src/test/java/rx/schedulers/AbstractSchedulerTests.java b/rxjava-core/src/test/java/rx/schedulers/AbstractSchedulerTests.java index 921780f2b2..508727bc12 100644 --- a/rxjava-core/src/test/java/rx/schedulers/AbstractSchedulerTests.java +++ b/rxjava-core/src/test/java/rx/schedulers/AbstractSchedulerTests.java @@ -15,11 +15,11 @@ import rx.Observer; import rx.Scheduler; import rx.Subscription; -import rx.operators.SafeObservableSubscription; import rx.subscriptions.BooleanSubscription; +import rx.subscriptions.Subscriptions; import rx.util.functions.Action0; import rx.util.functions.Action1; -import rx.util.functions.Func1; +import rx.util.functions.Func2; /** * Base tests for all schedulers including Immediate/Current. @@ -98,60 +98,11 @@ public void call(Integer i) { assertEquals(10, countTaken.get()); } - /** - * Bug report: https://github.com/Netflix/RxJava/issues/431 - */ @Test - public final void testUnSubscribeForScheduler() throws InterruptedException { - - final AtomicInteger countReceived = new AtomicInteger(); - final AtomicInteger countGenerated = new AtomicInteger(); - final SafeObservableSubscription s = new SafeObservableSubscription(); + public void testNestedActions() throws InterruptedException { + final Scheduler scheduler = getScheduler(); final CountDownLatch latch = new CountDownLatch(1); - s.wrap(Observable.interval(50, TimeUnit.MILLISECONDS) - .map(new Func1<Long, Long>() { - @Override - public Long call(Long aLong) { - countGenerated.incrementAndGet(); - return aLong; - } - }) - .subscribeOn(getScheduler()) - .observeOn(getScheduler()) - .subscribe(new Observer<Long>() { - @Override - public void onCompleted() { - System.out.println("--- completed"); - } - - @Override - public void onError(Throwable e) { - System.out.println("--- onError"); - } - - @Override - public void onNext(Long args) { - if (countReceived.incrementAndGet() == 2) { - s.unsubscribe(); - latch.countDown(); - } - System.out.println("==> Received " + args); - } - })); - - latch.await(1000, TimeUnit.MILLISECONDS); - - System.out.println("----------- it thinks it is finished ------------------ "); - Thread.sleep(100); - - assertEquals(2, countGenerated.get()); - } - - @Test - public final void testNestedActions() { - final CurrentThreadScheduler scheduler = new CurrentThreadScheduler(); - final Action0 firstStepStart = mock(Action0.class); final Action0 firstStepEnd = mock(Action0.class); @@ -166,6 +117,7 @@ public final void testNestedActions() { public void call() { firstStepStart.call(); firstStepEnd.call(); + latch.countDown(); } }; final Action0 secondAction = new Action0() { @@ -189,6 +141,8 @@ public void call() { InOrder inOrder = inOrder(firstStepStart, firstStepEnd, secondStepStart, secondStepEnd, thirdStepStart, thirdStepEnd); scheduler.schedule(thirdAction); + + latch.await(); inOrder.verify(thirdStepStart, times(1)).call(); inOrder.verify(thirdStepEnd, times(1)).call(); @@ -199,14 +153,24 @@ public void call() { } @Test - public final void testSequenceOfActions() { - final CurrentThreadScheduler scheduler = new CurrentThreadScheduler(); + public final void testSequenceOfActions() throws InterruptedException { + final Scheduler scheduler = getScheduler(); + final CountDownLatch latch = new CountDownLatch(1); final Action0 first = mock(Action0.class); final Action0 second = mock(Action0.class); scheduler.schedule(first); scheduler.schedule(second); + scheduler.schedule(new Action0() { + + @Override + public void call() { + latch.countDown(); + } + }); + + latch.await(); verify(first, times(1)).call(); verify(second, times(1)).call(); @@ -214,9 +178,10 @@ public final void testSequenceOfActions() { } @Test - public final void testSequenceOfDelayedActions() { - final CurrentThreadScheduler scheduler = new CurrentThreadScheduler(); + public void testSequenceOfDelayedActions() throws InterruptedException { + final Scheduler scheduler = getScheduler(); + final CountDownLatch latch = new CountDownLatch(1); final Action0 first = mock(Action0.class); final Action0 second = mock(Action0.class); @@ -225,9 +190,17 @@ public final void testSequenceOfDelayedActions() { public void call() { scheduler.schedule(first, 30, TimeUnit.MILLISECONDS); scheduler.schedule(second, 10, TimeUnit.MILLISECONDS); + scheduler.schedule(new Action0() { + + @Override + public void call() { + latch.countDown(); + } + }, 40, TimeUnit.MILLISECONDS); } }); + latch.await(); InOrder inOrder = inOrder(first, second); inOrder.verify(second, times(1)).call(); @@ -236,9 +209,10 @@ public void call() { } @Test - public final void testMixOfDelayedAndNonDelayedActions() { - final CurrentThreadScheduler scheduler = new CurrentThreadScheduler(); + public void testMixOfDelayedAndNonDelayedActions() throws InterruptedException { + final Scheduler scheduler = getScheduler(); + final CountDownLatch latch = new CountDownLatch(1); final Action0 first = mock(Action0.class); final Action0 second = mock(Action0.class); final Action0 third = mock(Action0.class); @@ -251,16 +225,94 @@ public void call() { scheduler.schedule(second, 300, TimeUnit.MILLISECONDS); scheduler.schedule(third, 100, TimeUnit.MILLISECONDS); scheduler.schedule(fourth); + scheduler.schedule(new Action0() { + + @Override + public void call() { + latch.countDown(); + } + }, 400, TimeUnit.MILLISECONDS); } }); + latch.await(); InOrder inOrder = inOrder(first, second, third, fourth); inOrder.verify(first, times(1)).call(); inOrder.verify(fourth, times(1)).call(); inOrder.verify(third, times(1)).call(); inOrder.verify(second, times(1)).call(); + } + + @Test + public final void testRecursiveExecutionWithAction0() throws InterruptedException { + final Scheduler scheduler = getScheduler(); + final AtomicInteger i = new AtomicInteger(); + final CountDownLatch latch = new CountDownLatch(1); + scheduler.schedule(new Action1<Action0>() { + + @Override + public void call(Action0 self) { + if (i.incrementAndGet() < 100) { + self.call(); + } else { + latch.countDown(); + } + } + }); + + latch.await(); + assertEquals(100, i.get()); + } + + @Test + public final void testRecursiveExecutionWithFunc2() throws InterruptedException { + final Scheduler scheduler = getScheduler(); + final AtomicInteger i = new AtomicInteger(); + final CountDownLatch latch = new CountDownLatch(1); + + scheduler.schedule(0, new Func2<Scheduler, Integer, Subscription>() { + + @Override + public Subscription call(Scheduler innerScheduler, Integer state) { + i.set(state); + if (state < 100) { + return innerScheduler.schedule(state + 1, this); + } else { + latch.countDown(); + return Subscriptions.empty(); + } + } + + }); + + latch.await(); + assertEquals(100, i.get()); + } + + @Test + public final void testRecursiveExecutionWithFunc2AndDelayTime() throws InterruptedException { + final Scheduler scheduler = getScheduler(); + final AtomicInteger i = new AtomicInteger(); + final CountDownLatch latch = new CountDownLatch(1); + + scheduler.schedule(0, new Func2<Scheduler, Integer, Subscription>() { + + @Override + public Subscription call(Scheduler innerScheduler, Integer state) { + i.set(state); + if (state < 100) { + return innerScheduler.schedule(state + 1, this, 5, TimeUnit.MILLISECONDS); + } else { + latch.countDown(); + return Subscriptions.empty(); + } + } + + }, 50, TimeUnit.MILLISECONDS); + latch.await(); + assertEquals(100, i.get()); } } diff --git a/rxjava-core/src/test/java/rx/schedulers/ImmediateSchedulerTest.java b/rxjava-core/src/test/java/rx/schedulers/ImmediateSchedulerTest.java index ff3e6c138b..a00878de10 100644 --- a/rxjava-core/src/test/java/rx/schedulers/ImmediateSchedulerTest.java +++ b/rxjava-core/src/test/java/rx/schedulers/ImmediateSchedulerTest.java @@ -15,6 +15,8 @@ */ package rx.schedulers; +import org.junit.Test; + import rx.Scheduler; public class ImmediateSchedulerTest extends AbstractSchedulerTests { @@ -24,4 +26,25 @@ protected Scheduler getScheduler() { return ImmediateScheduler.getInstance(); } + @Override + @Test + public final void testNestedActions() { + // ordering of nested actions will not match other schedulers + // because there is no reordering or concurrency with ImmediateScheduler + } + + @Override + @Test + public final void testSequenceOfDelayedActions() { + // ordering of nested actions will not match other schedulers + // because there is no reordering or concurrency with ImmediateScheduler + } + + @Override + @Test + public final void testMixOfDelayedAndNonDelayedActions() { + // ordering of nested actions will not match other schedulers + // because there is no reordering or concurrency with ImmediateScheduler + } + } diff --git a/rxjava-core/src/test/java/rx/schedulers/TestRecursionMemoryUsage.java b/rxjava-core/src/test/java/rx/schedulers/TestRecursionMemoryUsage.java index 294193b167..80c36e9bd3 100644 --- a/rxjava-core/src/test/java/rx/schedulers/TestRecursionMemoryUsage.java +++ b/rxjava-core/src/test/java/rx/schedulers/TestRecursionMemoryUsage.java @@ -20,8 +20,8 @@ public static void main(String args[]) { usingFunc2(Schedulers.newThread()); usingAction0(Schedulers.newThread()); -// usingFunc2(Schedulers.currentThread()); -// usingAction0(Schedulers.currentThread()); + usingFunc2(Schedulers.currentThread()); + usingAction0(Schedulers.currentThread()); usingFunc2(Schedulers.threadPoolForComputation()); usingAction0(Schedulers.threadPoolForComputation());
e6d707a5fdfb1d928c335ad499f72e5128928e27
restlet-framework-java
Fixed internal client connector.--
c
https://github.com/restlet/restlet-framework-java
diff --git a/modules/org.restlet/src/org/restlet/engine/http/connector/ClientConnection.java b/modules/org.restlet/src/org/restlet/engine/http/connector/ClientConnection.java index 2dff59b097..c27d81cd3d 100644 --- a/modules/org.restlet/src/org/restlet/engine/http/connector/ClientConnection.java +++ b/modules/org.restlet/src/org/restlet/engine/http/connector/ClientConnection.java @@ -201,7 +201,6 @@ protected void readMessage() throws IOException { getOutboundMessages().poll(); // Allows the connection to write another request setOutboundBusy(false); - setInboundBusy(false); } // Add it to the helper queue diff --git a/modules/org.restlet/src/org/restlet/engine/http/connector/ControllerTask.java b/modules/org.restlet/src/org/restlet/engine/http/connector/ControllerTask.java index e0f18ab829..a9dff18e63 100644 --- a/modules/org.restlet/src/org/restlet/engine/http/connector/ControllerTask.java +++ b/modules/org.restlet/src/org/restlet/engine/http/connector/ControllerTask.java @@ -69,7 +69,7 @@ protected void controlConnections() throws IOException { for (final Connection<?> conn : getHelper().getConnections()) { if (conn.getState() == ConnectionState.CLOSED) { getHelper().getConnections().remove(conn); - } else if ((conn.getState() == ConnectionState.CLOSING)) { + } else if ((conn.getState() == ConnectionState.CLOSING) && !conn.isBusy()) { conn.close(true); } else { if ((isOverloaded() && !getHelper().isClientSide())
7384d25cb3975a7fdc522ff8f84ab3dadcaba03a
kotlin
Prohibit Array<Nothing>--
c
https://github.com/JetBrains/kotlin
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/TargetPlatform.kt b/compiler/frontend/src/org/jetbrains/kotlin/resolve/TargetPlatform.kt index 57fb5757a3c37..685c35a4d7cfe 100644 --- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/TargetPlatform.kt +++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/TargetPlatform.kt @@ -25,7 +25,10 @@ import org.jetbrains.kotlin.descriptors.ModuleParameters import org.jetbrains.kotlin.descriptors.impl.ModuleDescriptorImpl import org.jetbrains.kotlin.name.Name import org.jetbrains.kotlin.resolve.calls.checkers.* -import org.jetbrains.kotlin.resolve.validation.* +import org.jetbrains.kotlin.resolve.validation.DeprecatedSymbolValidator +import org.jetbrains.kotlin.resolve.validation.InfixValidator +import org.jetbrains.kotlin.resolve.validation.OperatorValidator +import org.jetbrains.kotlin.resolve.validation.SymbolUsageValidator import org.jetbrains.kotlin.storage.StorageManager import org.jetbrains.kotlin.types.DynamicTypesSettings @@ -58,7 +61,7 @@ private val DEFAULT_DECLARATION_CHECKERS = listOf( InfixModifierChecker()) private val DEFAULT_CALL_CHECKERS = listOf(CapturingInClosureChecker(), InlineCheckerWrapper(), ReifiedTypeParameterSubstitutionChecker(), - SafeCallChecker(), InvokeConventionChecker()) + SafeCallChecker(), InvokeConventionChecker(), CallReturnsArrayOfNothingChecker()) private val DEFAULT_TYPE_CHECKERS = emptyList<AdditionalTypeChecker>() private val DEFAULT_VALIDATORS = listOf(DeprecatedSymbolValidator(), OperatorValidator(), InfixValidator()) diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/TypeResolver.kt b/compiler/frontend/src/org/jetbrains/kotlin/resolve/TypeResolver.kt index 47332c1695db4..2ac29920c942e 100644 --- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/TypeResolver.kt +++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/TypeResolver.kt @@ -33,14 +33,15 @@ import org.jetbrains.kotlin.resolve.bindingContextUtil.recordScope import org.jetbrains.kotlin.resolve.calls.tasks.DynamicCallableDescriptors import org.jetbrains.kotlin.resolve.lazy.ForceResolveUtil import org.jetbrains.kotlin.resolve.lazy.LazyEntity -import org.jetbrains.kotlin.resolve.scopes.MemberScope import org.jetbrains.kotlin.resolve.scopes.LazyScopeAdapter import org.jetbrains.kotlin.resolve.scopes.LexicalScope +import org.jetbrains.kotlin.resolve.scopes.MemberScope import org.jetbrains.kotlin.resolve.scopes.utils.findClassifier import org.jetbrains.kotlin.storage.LockBasedStorageManager import org.jetbrains.kotlin.storage.StorageManager import org.jetbrains.kotlin.types.* import org.jetbrains.kotlin.types.Variance.* +import org.jetbrains.kotlin.types.typeUtil.isArrayOfNothing public class TypeResolver( private val annotationResolver: AnnotationResolver, @@ -349,6 +350,10 @@ public class TypeResolver( } } + if (resultingType.isArrayOfNothing()) { + c.trace.report(UNSUPPORTED.on(type, "Array<Nothing> is illegal")) + } + return type(resultingType) } diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/checkers/CallReturnsArrayOfNothingChecker.kt b/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/checkers/CallReturnsArrayOfNothingChecker.kt new file mode 100644 index 0000000000000..b49ddb37e26fe --- /dev/null +++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/checkers/CallReturnsArrayOfNothingChecker.kt @@ -0,0 +1,48 @@ +/* + * Copyright 2010-2015 JetBrains s.r.o. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.jetbrains.kotlin.resolve.calls.checkers + +import org.jetbrains.kotlin.descriptors.CallableDescriptor +import org.jetbrains.kotlin.diagnostics.Errors +import org.jetbrains.kotlin.resolve.calls.context.BasicCallResolutionContext +import org.jetbrains.kotlin.resolve.calls.model.ResolvedCall +import org.jetbrains.kotlin.types.DeferredType +import org.jetbrains.kotlin.types.KotlinType +import org.jetbrains.kotlin.types.typeUtil.isArrayOfNothing + +class CallReturnsArrayOfNothingChecker : CallChecker { + override fun <F : CallableDescriptor> check(resolvedCall: ResolvedCall<F>, context: BasicCallResolutionContext) { + val returnType = resolvedCall.resultingDescriptor.returnType + + if (returnType.containsArrayOfNothing()) { + val callElement = resolvedCall.call.callElement + val diagnostic = Errors.UNSUPPORTED.on(callElement, "Array<Nothing> in return type is illegal") + context.trace.report(diagnostic) + } + } + + private fun KotlinType?.containsArrayOfNothing(): Boolean { + // if this.isComputing is true, it means that resolve + // has run into recursion, so checking for Array<Nothing> is meaningless anyway, + // and error about recursion will be reported later + if (this == null || this is DeferredType && this.isComputing) return false + + if (isArrayOfNothing()) return true + + return arguments.any { !it.isStarProjection && it.type.containsArrayOfNothing() } + } +} diff --git a/compiler/testData/diagnostics/testsWithStdLib/ArrayOfNothing.kt b/compiler/testData/diagnostics/testsWithStdLib/ArrayOfNothing.kt new file mode 100644 index 0000000000000..7046e50f6db44 --- /dev/null +++ b/compiler/testData/diagnostics/testsWithStdLib/ArrayOfNothing.kt @@ -0,0 +1,54 @@ +// !DIAGNOSTICS: -UNUSED_PARAMETER -UNUSED_VARIABLE -UNCHECKED_CAST -USELESS_CAST +class A<T> + +fun test1( + a: <!UNSUPPORTED!>Array<Nothing><!>, + b: <!UNSUPPORTED!>Array<Nothing?><!>, + c: <!UNSUPPORTED!>Array<in Nothing><!>, + d: <!UNSUPPORTED!>Array<in Nothing?><!>, + e: <!UNSUPPORTED!>Array<out Nothing><!>, + f: <!UNSUPPORTED!>Array<out Nothing?><!> +) {} + +fun test2( + a: <!UNSUPPORTED!>Array<Nothing><!>?, + b: <!UNSUPPORTED!>Array<Nothing?><!>?, + c: <!UNSUPPORTED!>Array<in Nothing><!>?, + d: <!UNSUPPORTED!>Array<in Nothing?><!>?, + e: <!UNSUPPORTED!>Array<out Nothing><!>?, + f: <!UNSUPPORTED!>Array<out Nothing?><!>? +) {} + +fun test3( + a: A<<!UNSUPPORTED!>Array<Nothing><!>>, + b: A<<!UNSUPPORTED!>Array<Nothing?><!>>, + c: A<<!UNSUPPORTED!>Array<in Nothing><!>>, + d: A<<!UNSUPPORTED!>Array<in Nothing?><!>>, + e: A<<!UNSUPPORTED!>Array<out Nothing><!>>, + f: A<<!UNSUPPORTED!>Array<out Nothing?><!>> +) {} + +fun test4( + a: Array<A<Nothing>>, + b: Array<A<Nothing?>>, + c: Array<A<in Nothing>>, + d: Array<A<in Nothing?>>, + e: Array<A<out Nothing>>, + f: Array<A<out Nothing?>> +) {} + +fun test5() { + <!UNSUPPORTED!><!REIFIED_TYPE_FORBIDDEN_SUBSTITUTION!>arrayOf<!><Nothing>()<!> + <!UNSUPPORTED!><!REIFIED_TYPE_FORBIDDEN_SUBSTITUTION!>Array<!><Nothing>(10) { throw Exception() }<!> +} + +fun <T> foo(): Array<T> = (object {} as Any) as Array<T> + +fun test6() = <!UNSUPPORTED!>foo<Nothing>()<!> + + +class B<T>(val array: Array<T>) + +fun <T> bar() = B<Array<T>>(arrayOf()) + +fun test7() = <!UNSUPPORTED!>bar<Nothing>()<!> diff --git a/compiler/testData/diagnostics/testsWithStdLib/ArrayOfNothing.txt b/compiler/testData/diagnostics/testsWithStdLib/ArrayOfNothing.txt new file mode 100644 index 0000000000000..b905373803d71 --- /dev/null +++ b/compiler/testData/diagnostics/testsWithStdLib/ArrayOfNothing.txt @@ -0,0 +1,26 @@ +package + +public fun </*0*/ T> bar(): B<kotlin.Array<T>> +public fun </*0*/ T> foo(): kotlin.Array<T> +public fun test1(/*0*/ a: kotlin.Array<kotlin.Nothing>, /*1*/ b: kotlin.Array<kotlin.Nothing?>, /*2*/ c: kotlin.Array<in kotlin.Nothing>, /*3*/ d: kotlin.Array<in kotlin.Nothing?>, /*4*/ e: kotlin.Array<out kotlin.Nothing>, /*5*/ f: kotlin.Array<out kotlin.Nothing?>): kotlin.Unit +public fun test2(/*0*/ a: kotlin.Array<kotlin.Nothing>?, /*1*/ b: kotlin.Array<kotlin.Nothing?>?, /*2*/ c: kotlin.Array<in kotlin.Nothing>?, /*3*/ d: kotlin.Array<in kotlin.Nothing?>?, /*4*/ e: kotlin.Array<out kotlin.Nothing>?, /*5*/ f: kotlin.Array<out kotlin.Nothing?>?): kotlin.Unit +public fun test3(/*0*/ a: A<kotlin.Array<kotlin.Nothing>>, /*1*/ b: A<kotlin.Array<kotlin.Nothing?>>, /*2*/ c: A<kotlin.Array<in kotlin.Nothing>>, /*3*/ d: A<kotlin.Array<in kotlin.Nothing?>>, /*4*/ e: A<kotlin.Array<out kotlin.Nothing>>, /*5*/ f: A<kotlin.Array<out kotlin.Nothing?>>): kotlin.Unit +public fun test4(/*0*/ a: kotlin.Array<A<kotlin.Nothing>>, /*1*/ b: kotlin.Array<A<kotlin.Nothing?>>, /*2*/ c: kotlin.Array<A<in kotlin.Nothing>>, /*3*/ d: kotlin.Array<A<in kotlin.Nothing?>>, /*4*/ e: kotlin.Array<A<out kotlin.Nothing>>, /*5*/ f: kotlin.Array<A<out kotlin.Nothing?>>): kotlin.Unit +public fun test5(): kotlin.Unit +public fun test6(): kotlin.Array<kotlin.Nothing> +public fun test7(): B<kotlin.Array<kotlin.Nothing>> + +public final class A</*0*/ T> { + public constructor A</*0*/ T>() + public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean + public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int + public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String +} + +public final class B</*0*/ T> { + public constructor B</*0*/ T>(/*0*/ array: kotlin.Array<T>) + public final val array: kotlin.Array<T> + public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean + public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int + public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String +} diff --git a/compiler/testData/diagnostics/testsWithStdLib/reified/reifiedNothingSubstitution.kt b/compiler/testData/diagnostics/testsWithStdLib/reified/reifiedNothingSubstitution.kt index f409857d53da4..70f3b60482b40 100644 --- a/compiler/testData/diagnostics/testsWithStdLib/reified/reifiedNothingSubstitution.kt +++ b/compiler/testData/diagnostics/testsWithStdLib/reified/reifiedNothingSubstitution.kt @@ -3,8 +3,8 @@ inline fun<reified T> foo(block: () -> T): String = block().toString() fun box() { - val a = <!REIFIED_TYPE_FORBIDDEN_SUBSTITUTION!>arrayOf<!>(null!!) - val b = <!REIFIED_TYPE_FORBIDDEN_SUBSTITUTION!>Array<!><Nothing?>(5) { null!! } + val a = <!UNSUPPORTED!><!REIFIED_TYPE_FORBIDDEN_SUBSTITUTION!>arrayOf<!>(null!!)<!> + val b = <!UNSUPPORTED!><!REIFIED_TYPE_FORBIDDEN_SUBSTITUTION!>Array<!><Nothing?>(5) { null!! }<!> val c = <!REIFIED_TYPE_FORBIDDEN_SUBSTITUTION!>foo<!>() { null!! } val d = foo<Any> { null!! } val e = <!REIFIED_TYPE_FORBIDDEN_SUBSTITUTION!>foo<!> { "1" as Nothing } diff --git a/compiler/tests/org/jetbrains/kotlin/checkers/DiagnosticsTestWithStdLibGenerated.java b/compiler/tests/org/jetbrains/kotlin/checkers/DiagnosticsTestWithStdLibGenerated.java index 6ca79d2990ae9..0e3176d846b3c 100644 --- a/compiler/tests/org/jetbrains/kotlin/checkers/DiagnosticsTestWithStdLibGenerated.java +++ b/compiler/tests/org/jetbrains/kotlin/checkers/DiagnosticsTestWithStdLibGenerated.java @@ -35,6 +35,12 @@ public void testAllFilesPresentInTestsWithStdLib() throws Exception { KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/diagnostics/testsWithStdLib"), Pattern.compile("^(.+)\\.kt$"), true); } + @TestMetadata("ArrayOfNothing.kt") + public void testArrayOfNothing() throws Exception { + String fileName = KotlinTestUtils.navigationMetadata("compiler/testData/diagnostics/testsWithStdLib/ArrayOfNothing.kt"); + doTest(fileName); + } + @TestMetadata("CallCompanionProtectedNonStatic.kt") public void testCallCompanionProtectedNonStatic() throws Exception { String fileName = KotlinTestUtils.navigationMetadata("compiler/testData/diagnostics/testsWithStdLib/CallCompanionProtectedNonStatic.kt"); diff --git a/core/descriptors/src/org/jetbrains/kotlin/types/TypeUtils.kt b/core/descriptors/src/org/jetbrains/kotlin/types/TypeUtils.kt index ba37ec5a1e73e..eaf153ad5d3e8 100644 --- a/core/descriptors/src/org/jetbrains/kotlin/types/TypeUtils.kt +++ b/core/descriptors/src/org/jetbrains/kotlin/types/TypeUtils.kt @@ -55,6 +55,13 @@ fun KotlinType.isAnyOrNullableAny(): Boolean = KotlinBuiltIns.isAnyOrNullableAny fun KotlinType.isBoolean(): Boolean = KotlinBuiltIns.isBoolean(this) fun KotlinType.isBooleanOrNullableBoolean(): Boolean = KotlinBuiltIns.isBooleanOrNullableBoolean(this) +fun KotlinType?.isArrayOfNothing(): Boolean { + if (this == null || !KotlinBuiltIns.isArray(this)) return false + + val typeArg = arguments.firstOrNull()?.type + return typeArg != null && KotlinBuiltIns.isNothingOrNullableNothing(typeArg) +} + private fun KotlinType.getContainedTypeParameters(): Collection<TypeParameterDescriptor> { val declarationDescriptor = getConstructor().getDeclarationDescriptor() if (declarationDescriptor is TypeParameterDescriptor) return listOf(declarationDescriptor)
9ccbb766bd98a4958a38ddc840e00b7dfa6230d6
hbase
HBASE-8033 Break TestRestoreSnapshotFromClient- into TestRestoreSnapshotFromClient and TestCloneSnapshotFromClient (Ted Yu)--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1454186 13f79535-47bb-0310-9956-ffa450edef68-
p
https://github.com/apache/hbase
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCloneSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCloneSnapshotFromClient.java new file mode 100644 index 000000000000..1449865262a8 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCloneSnapshotFromClient.java @@ -0,0 +1,269 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.client; + +import static org.junit.Assert.assertEquals; + +import java.io.IOException; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.LargeTests; +import org.apache.hadoop.hbase.exceptions.SnapshotDoesNotExistException; +import org.apache.hadoop.hbase.master.MasterFileSystem; +import org.apache.hadoop.hbase.master.snapshot.SnapshotManager; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.MD5Hash; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +/** + * Test clone snapshots from the client + */ +@Category(LargeTests.class) +public class TestCloneSnapshotFromClient { + final Log LOG = LogFactory.getLog(getClass()); + + private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); + + private final byte[] FAMILY = Bytes.toBytes("cf"); + + private byte[] emptySnapshot; + private byte[] snapshotName0; + private byte[] snapshotName1; + private byte[] snapshotName2; + private int snapshot0Rows; + private int snapshot1Rows; + private byte[] tableName; + private HBaseAdmin admin; + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + TEST_UTIL.getConfiguration().setBoolean(SnapshotManager.HBASE_SNAPSHOT_ENABLED, true); + TEST_UTIL.getConfiguration().setBoolean("hbase.online.schema.update.enable", true); + TEST_UTIL.getConfiguration().setInt("hbase.hstore.compactionThreshold", 10); + TEST_UTIL.getConfiguration().setInt("hbase.regionserver.msginterval", 100); + TEST_UTIL.getConfiguration().setInt("hbase.client.pause", 250); + TEST_UTIL.getConfiguration().setInt("hbase.client.retries.number", 6); + TEST_UTIL.getConfiguration().setBoolean( + "hbase.master.enabletable.roundrobin", true); + TEST_UTIL.startMiniCluster(3); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + TEST_UTIL.shutdownMiniCluster(); + } + + /** + * Initialize the tests with a table filled with some data + * and two snapshots (snapshotName0, snapshotName1) of different states. + * The tableName, snapshotNames and the number of rows in the snapshot are initialized. + */ + @Before + public void setup() throws Exception { + this.admin = TEST_UTIL.getHBaseAdmin(); + + long tid = System.currentTimeMillis(); + tableName = Bytes.toBytes("testtb-" + tid); + emptySnapshot = Bytes.toBytes("emptySnaptb-" + tid); + snapshotName0 = Bytes.toBytes("snaptb0-" + tid); + snapshotName1 = Bytes.toBytes("snaptb1-" + tid); + snapshotName2 = Bytes.toBytes("snaptb2-" + tid); + + // create Table and disable it + createTable(tableName, FAMILY); + admin.disableTable(tableName); + + // take an empty snapshot + admin.snapshot(emptySnapshot, tableName); + + HTable table = new HTable(TEST_UTIL.getConfiguration(), tableName); + try { + // enable table and insert data + admin.enableTable(tableName); + loadData(table, 500, FAMILY); + snapshot0Rows = TEST_UTIL.countRows(table); + admin.disableTable(tableName); + + // take a snapshot + admin.snapshot(snapshotName0, tableName); + + // enable table and insert more data + admin.enableTable(tableName); + loadData(table, 500, FAMILY); + snapshot1Rows = TEST_UTIL.countRows(table); + admin.disableTable(tableName); + + // take a snapshot of the updated table + admin.snapshot(snapshotName1, tableName); + + // re-enable table + admin.enableTable(tableName); + } finally { + table.close(); + } + } + + @After + public void tearDown() throws Exception { + if (admin.tableExists(tableName)) { + TEST_UTIL.deleteTable(tableName); + } + admin.deleteSnapshot(snapshotName0); + admin.deleteSnapshot(snapshotName1); + + // Ensure the archiver to be empty + MasterFileSystem mfs = TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterFileSystem(); + mfs.getFileSystem().delete( + new Path(mfs.getRootDir(), HConstants.HFILE_ARCHIVE_DIRECTORY), true); + } + + @Test(expected=SnapshotDoesNotExistException.class) + public void testCloneNonExistentSnapshot() throws IOException, InterruptedException { + String snapshotName = "random-snapshot-" + System.currentTimeMillis(); + String tableName = "random-table-" + System.currentTimeMillis(); + admin.cloneSnapshot(snapshotName, tableName); + } + + @Test + public void testCloneSnapshot() throws IOException, InterruptedException { + byte[] clonedTableName = Bytes.toBytes("clonedtb-" + System.currentTimeMillis()); + testCloneSnapshot(clonedTableName, snapshotName0, snapshot0Rows); + testCloneSnapshot(clonedTableName, snapshotName1, snapshot1Rows); + testCloneSnapshot(clonedTableName, emptySnapshot, 0); + } + + private void testCloneSnapshot(final byte[] tableName, final byte[] snapshotName, + int snapshotRows) throws IOException, InterruptedException { + // create a new table from snapshot + admin.cloneSnapshot(snapshotName, tableName); + verifyRowCount(tableName, snapshotRows); + + admin.disableTable(tableName); + admin.deleteTable(tableName); + } + + /** + * Verify that tables created from the snapshot are still alive after source table deletion. + */ + @Test + public void testCloneLinksAfterDelete() throws IOException, InterruptedException { + // Clone a table from the first snapshot + byte[] clonedTableName = Bytes.toBytes("clonedtb1-" + System.currentTimeMillis()); + admin.cloneSnapshot(snapshotName0, clonedTableName); + verifyRowCount(clonedTableName, snapshot0Rows); + + // Take a snapshot of this cloned table. + admin.disableTable(clonedTableName); + admin.snapshot(snapshotName2, clonedTableName); + + // Clone the snapshot of the cloned table + byte[] clonedTableName2 = Bytes.toBytes("clonedtb2-" + System.currentTimeMillis()); + admin.cloneSnapshot(snapshotName2, clonedTableName2); + verifyRowCount(clonedTableName2, snapshot0Rows); + admin.disableTable(clonedTableName2); + + // Remove the original table + admin.disableTable(tableName); + admin.deleteTable(tableName); + waitCleanerRun(); + + // Verify the first cloned table + admin.enableTable(clonedTableName); + verifyRowCount(clonedTableName, snapshot0Rows); + + // Verify the second cloned table + admin.enableTable(clonedTableName2); + verifyRowCount(clonedTableName2, snapshot0Rows); + admin.disableTable(clonedTableName2); + + // Delete the first cloned table + admin.disableTable(clonedTableName); + admin.deleteTable(clonedTableName); + waitCleanerRun(); + + // Verify the second cloned table + admin.enableTable(clonedTableName2); + verifyRowCount(clonedTableName2, snapshot0Rows); + + // Clone a new table from cloned + byte[] clonedTableName3 = Bytes.toBytes("clonedtb3-" + System.currentTimeMillis()); + admin.cloneSnapshot(snapshotName2, clonedTableName3); + verifyRowCount(clonedTableName3, snapshot0Rows); + + // Delete the cloned tables + admin.disableTable(clonedTableName2); + admin.deleteTable(clonedTableName2); + admin.disableTable(clonedTableName3); + admin.deleteTable(clonedTableName3); + admin.deleteSnapshot(snapshotName2); + } + + // ========================================================================== + // Helpers + // ========================================================================== + private void createTable(final byte[] tableName, final byte[]... families) throws IOException { + HTableDescriptor htd = new HTableDescriptor(tableName); + for (byte[] family: families) { + HColumnDescriptor hcd = new HColumnDescriptor(family); + htd.addFamily(hcd); + } + byte[][] splitKeys = new byte[16][]; + byte[] hex = Bytes.toBytes("0123456789abcdef"); + for (int i = 0; i < 16; ++i) { + splitKeys[i] = new byte[] { hex[i] }; + } + admin.createTable(htd, splitKeys); + } + + public void loadData(final HTable table, int rows, byte[]... families) throws IOException { + byte[] qualifier = Bytes.toBytes("q"); + table.setAutoFlush(false); + while (rows-- > 0) { + byte[] value = Bytes.add(Bytes.toBytes(System.currentTimeMillis()), Bytes.toBytes(rows)); + byte[] key = Bytes.toBytes(MD5Hash.getMD5AsHex(value)); + Put put = new Put(key); + put.setWriteToWAL(false); + for (byte[] family: families) { + put.add(family, qualifier, value); + } + table.put(put); + } + table.flushCommits(); + } + + private void waitCleanerRun() throws InterruptedException { + TEST_UTIL.getMiniHBaseCluster().getMaster().getHFileCleaner().choreForTesting(); + } + + private void verifyRowCount(final byte[] tableName, long expectedRows) throws IOException { + HTable table = new HTable(TEST_UTIL.getConfiguration(), tableName); + assertEquals(expectedRows, TEST_UTIL.countRows(table)); + table.close(); + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRestoreSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRestoreSnapshotFromClient.java index ab1ecd8ec07f..0cb764aeee26 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRestoreSnapshotFromClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRestoreSnapshotFromClient.java @@ -17,9 +17,7 @@ */ package org.apache.hadoop.hbase.client; -import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; @@ -30,22 +28,25 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.LargeTests; +import org.apache.hadoop.hbase.exceptions.NoSuchColumnFamilyException; import org.apache.hadoop.hbase.master.MasterFileSystem; import org.apache.hadoop.hbase.master.snapshot.SnapshotManager; -import org.apache.hadoop.hbase.exceptions.NoSuchColumnFamilyException; -import org.apache.hadoop.hbase.exceptions.SnapshotDoesNotExistException; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.MD5Hash; -import org.junit.*; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; import org.junit.experimental.categories.Category; /** - * Test clone/restore snapshots from the client + * Test restore snapshots from the client */ @Category(LargeTests.class) public class TestRestoreSnapshotFromClient { @@ -225,31 +226,6 @@ public void testRestoreSchemaChange() throws IOException { table.close(); } - @Test(expected=SnapshotDoesNotExistException.class) - public void testCloneNonExistentSnapshot() throws IOException, InterruptedException { - String snapshotName = "random-snapshot-" + System.currentTimeMillis(); - String tableName = "random-table-" + System.currentTimeMillis(); - admin.cloneSnapshot(snapshotName, tableName); - } - - @Test - public void testCloneSnapshot() throws IOException, InterruptedException { - byte[] clonedTableName = Bytes.toBytes("clonedtb-" + System.currentTimeMillis()); - testCloneSnapshot(clonedTableName, snapshotName0, snapshot0Rows); - testCloneSnapshot(clonedTableName, snapshotName1, snapshot1Rows); - testCloneSnapshot(clonedTableName, emptySnapshot, 0); - } - - private void testCloneSnapshot(final byte[] tableName, final byte[] snapshotName, - int snapshotRows) throws IOException, InterruptedException { - // create a new table from snapshot - admin.cloneSnapshot(snapshotName, tableName); - verifyRowCount(tableName, snapshotRows); - - admin.disableTable(tableName); - admin.deleteTable(tableName); - } - @Test public void testRestoreSnapshotOfCloned() throws IOException, InterruptedException { byte[] clonedTableName = Bytes.toBytes("clonedtb-" + System.currentTimeMillis()); @@ -266,62 +242,6 @@ public void testRestoreSnapshotOfCloned() throws IOException, InterruptedExcepti admin.deleteTable(clonedTableName); } - /** - * Verify that tables created from the snapshot are still alive after source table deletion. - */ - @Test - public void testCloneLinksAfterDelete() throws IOException, InterruptedException { - // Clone a table from the first snapshot - byte[] clonedTableName = Bytes.toBytes("clonedtb1-" + System.currentTimeMillis()); - admin.cloneSnapshot(snapshotName0, clonedTableName); - verifyRowCount(clonedTableName, snapshot0Rows); - - // Take a snapshot of this cloned table. - admin.disableTable(clonedTableName); - admin.snapshot(snapshotName2, clonedTableName); - - // Clone the snapshot of the cloned table - byte[] clonedTableName2 = Bytes.toBytes("clonedtb2-" + System.currentTimeMillis()); - admin.cloneSnapshot(snapshotName2, clonedTableName2); - verifyRowCount(clonedTableName2, snapshot0Rows); - admin.disableTable(clonedTableName2); - - // Remove the original table - admin.disableTable(tableName); - admin.deleteTable(tableName); - waitCleanerRun(); - - // Verify the first cloned table - admin.enableTable(clonedTableName); - verifyRowCount(clonedTableName, snapshot0Rows); - - // Verify the second cloned table - admin.enableTable(clonedTableName2); - verifyRowCount(clonedTableName2, snapshot0Rows); - admin.disableTable(clonedTableName2); - - // Delete the first cloned table - admin.disableTable(clonedTableName); - admin.deleteTable(clonedTableName); - waitCleanerRun(); - - // Verify the second cloned table - admin.enableTable(clonedTableName2); - verifyRowCount(clonedTableName2, snapshot0Rows); - - // Clone a new table from cloned - byte[] clonedTableName3 = Bytes.toBytes("clonedtb3-" + System.currentTimeMillis()); - admin.cloneSnapshot(snapshotName2, clonedTableName3); - verifyRowCount(clonedTableName3, snapshot0Rows); - - // Delete the cloned tables - admin.disableTable(clonedTableName2); - admin.deleteTable(clonedTableName2); - admin.disableTable(clonedTableName3); - admin.deleteTable(clonedTableName3); - admin.deleteSnapshot(snapshotName2); - } - // ========================================================================== // Helpers // ==========================================================================
59d6a57f9a5e93146bcc83bcdb6f7c26ce644c51
ReactiveX-RxJava
Update Observable.isInternalImplementation, get rid- of NullPointerException--NullPointerException has been encountered during my tests. It is because java.lang.Class.getPackage() may return null "... if no package information is available from the archive or codebase" (documented feature).-
c
https://github.com/ReactiveX/RxJava
diff --git a/rxjava-core/src/main/java/rx/Observable.java b/rxjava-core/src/main/java/rx/Observable.java index b93f1621cf..65f348e9b1 100644 --- a/rxjava-core/src/main/java/rx/Observable.java +++ b/rxjava-core/src/main/java/rx/Observable.java @@ -3745,7 +3745,8 @@ private boolean isInternalImplementation(Object o) { if (o instanceof AtomicObserver) return true; // we treat the following package as "internal" and don't wrap it - return o.getClass().getPackage().getName().startsWith("rx.operators"); + Package p = o.getClass().getPackage(); // it can be null + return p != null && p.getName().startsWith("rx.operators"); } public static class UnitTest {
615f760590ecff8d51832aa4988de410b9de4fb5
drools
JBRULES-2339: JBRULES-2440: fixing ruleflow group- management--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@32725 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
c
https://github.com/kiegroup/drools
diff --git a/drools-core/src/main/java/org/drools/common/DefaultAgenda.java b/drools-core/src/main/java/org/drools/common/DefaultAgenda.java index b6b14756a40..5d9c2502dee 100644 --- a/drools-core/src/main/java/org/drools/common/DefaultAgenda.java +++ b/drools-core/src/main/java/org/drools/common/DefaultAgenda.java @@ -904,8 +904,9 @@ public synchronized void fireActivation(final Activation activation) throws Cons } activation.setActivated( false ); + InternalRuleFlowGroup ruleFlowGroup = null; if ( activation.getActivationNode() != null ) { - final InternalRuleFlowGroup ruleFlowGroup = (InternalRuleFlowGroup) activation.getActivationNode().getParentContainer(); + ruleFlowGroup = (InternalRuleFlowGroup) activation.getActivationNode().getParentContainer(); // it is possible that the ruleflow group is no longer active if it was // cleared during execution of this activation ruleFlowGroup.removeActivation( activation ); @@ -930,6 +931,10 @@ public synchronized void fireActivation(final Activation activation) throws Cons throw new RuntimeException( e ); } } + + if( ruleFlowGroup != null ) { + ruleFlowGroup.deactivateIfEmpty(); + } // if the tuple contains expired events for ( LeftTuple tuple = (LeftTuple) activation.getTuple(); tuple != null; tuple = tuple.getParent() ) { diff --git a/drools-core/src/main/java/org/drools/common/InternalRuleFlowGroup.java b/drools-core/src/main/java/org/drools/common/InternalRuleFlowGroup.java index eb56d023dd5..8ed644cd22d 100644 --- a/drools-core/src/main/java/org/drools/common/InternalRuleFlowGroup.java +++ b/drools-core/src/main/java/org/drools/common/InternalRuleFlowGroup.java @@ -17,6 +17,12 @@ public interface InternalRuleFlowGroup extends RuleFlowGroup { void clear(); + /** + * Checks if this ruleflow group is active and should automatically deactivate. + * If the queue is empty, it deactivates the group. + */ + public void deactivateIfEmpty(); + /** * Activates or deactivates this <code>RuleFlowGroup</code>. * When activating, all activations of this <code>RuleFlowGroup</code> are added diff --git a/drools-core/src/main/java/org/drools/common/RuleFlowGroupImpl.java b/drools-core/src/main/java/org/drools/common/RuleFlowGroupImpl.java index a744ae0cdfb..40337c80397 100644 --- a/drools-core/src/main/java/org/drools/common/RuleFlowGroupImpl.java +++ b/drools-core/src/main/java/org/drools/common/RuleFlowGroupImpl.java @@ -53,7 +53,7 @@ public class RuleFlowGroupImpl private InternalWorkingMemory workingMemory; private String name; private boolean active = false; - private boolean autoDeactivate = true; + private boolean autoDeactivate = true; private LinkedList list; private List<RuleFlowGroupListener> listeners; private Map<Long, String> nodeInstances = new HashMap<Long, String>(); @@ -72,13 +72,15 @@ public RuleFlowGroupImpl(final String name) { this.name = name; this.list = new LinkedList(); } - - public RuleFlowGroupImpl(final String name, final boolean active, final boolean autoDeactivate) { + + public RuleFlowGroupImpl(final String name, + final boolean active, + final boolean autoDeactivate) { this.name = name; this.active = active; this.autoDeactivate = autoDeactivate; this.list = new LinkedList(); - } + } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { @@ -187,7 +189,7 @@ public int size() { public void addActivation(final Activation activation) { assert activation.getActivationNode() == null; final ActivationNode node = new ActivationNode( activation, - this ); + this ); activation.setActivationNode( node ); this.list.add( node ); @@ -200,12 +202,17 @@ public void removeActivation(final Activation activation) { final ActivationNode node = activation.getActivationNode(); this.list.remove( node ); activation.setActivationNode( null ); - if ( this.active && this.autoDeactivate ) { - if ( this.list.isEmpty() ) { - // deactivate callback - WorkingMemoryAction action = new DeactivateCallback( this ); - this.workingMemory.queueWorkingMemoryAction( action ); - } + } + + /** + * Checks if this ruleflow group is active and should automatically deactivate. + * If the queue is empty, it deactivates the group. + */ + public void deactivateIfEmpty() { + if ( this.active && this.autoDeactivate && this.list.isEmpty() ) { + // deactivate callback + WorkingMemoryAction action = new DeactivateCallback( this ); + this.workingMemory.queueWorkingMemoryAction( action ); } } @@ -217,8 +224,8 @@ public void addRuleFlowGroupListener(RuleFlowGroupListener listener) { } public void removeRuleFlowGroupListener(RuleFlowGroupListener listener) { - if (listeners != null) { - listeners.remove(listener); + if ( listeners != null ) { + listeners.remove( listener ); } } @@ -258,10 +265,12 @@ public int hashCode() { return this.name.hashCode(); } - public static class DeactivateCallback implements WorkingMemoryAction { - - private static final long serialVersionUID = 400L; - + public static class DeactivateCallback + implements + WorkingMemoryAction { + + private static final long serialVersionUID = 400L; + private InternalRuleFlowGroup ruleFlowGroup; public DeactivateCallback(InternalRuleFlowGroup ruleFlowGroup) { @@ -269,12 +278,12 @@ public DeactivateCallback(InternalRuleFlowGroup ruleFlowGroup) { } public DeactivateCallback(MarshallerReaderContext context) throws IOException { - this.ruleFlowGroup = (InternalRuleFlowGroup) context.wm.getAgenda().getRuleFlowGroup(context.readUTF()); + this.ruleFlowGroup = (InternalRuleFlowGroup) context.wm.getAgenda().getRuleFlowGroup( context.readUTF() ); } public void write(MarshallerWriteContext context) throws IOException { - context.writeInt( WorkingMemoryAction.DeactivateCallback ); - context.writeUTF(ruleFlowGroup.getName()); + context.writeInt( WorkingMemoryAction.DeactivateCallback ); + context.writeUTF( ruleFlowGroup.getName() ); } public void readExternal(ObjectInput in) throws IOException, @@ -283,28 +292,32 @@ public void readExternal(ObjectInput in) throws IOException, } public void writeExternal(ObjectOutput out) throws IOException { - out.writeObject(ruleFlowGroup); + out.writeObject( ruleFlowGroup ); } public void execute(InternalWorkingMemory workingMemory) { // check whether ruleflow group is still empty first - if (this.ruleFlowGroup.isEmpty()) { + if ( this.ruleFlowGroup.isEmpty() ) { // deactivate ruleflow group - this.ruleFlowGroup.setActive(false); + this.ruleFlowGroup.setActive( false ); } } } - - public void addNodeInstance(Long processInstanceId, String nodeInstanceId) { - nodeInstances.put(processInstanceId, nodeInstanceId); + + public void addNodeInstance(Long processInstanceId, + String nodeInstanceId) { + nodeInstances.put( processInstanceId, + nodeInstanceId ); } - public void removeNodeInstance(Long processInstanceId, String nodeInstanceId) { - nodeInstances.put(processInstanceId, nodeInstanceId); + public void removeNodeInstance(Long processInstanceId, + String nodeInstanceId) { + nodeInstances.put( processInstanceId, + nodeInstanceId ); } - + public Map<Long, String> getNodeInstances() { - return nodeInstances; + return nodeInstances; } - + } diff --git a/drools-core/src/main/java/org/drools/reteoo/RuleTerminalNode.java b/drools-core/src/main/java/org/drools/reteoo/RuleTerminalNode.java index 07b454c3f87..78ed21b2651 100644 --- a/drools-core/src/main/java/org/drools/reteoo/RuleTerminalNode.java +++ b/drools-core/src/main/java/org/drools/reteoo/RuleTerminalNode.java @@ -298,7 +298,7 @@ public void modifyLeftTuple(LeftTuple leftTuple, } AgendaItem item = (AgendaItem) leftTuple.getActivation(); - if ( item.isActivated() ) { + if ( item != null && item.isActivated() ) { // already activated, do nothing return; } @@ -314,12 +314,18 @@ public void modifyLeftTuple(LeftTuple leftTuple, final Timer timer = this.rule.getTimer(); if ( timer != null ) { + if ( item == null ) { + item = agenda.createScheduledAgendaItem( leftTuple, + context, + this.rule, + this.subrule ); + } agenda.scheduleItem( (ScheduledAgendaItem) item, workingMemory ); item.setActivated( true ); -// workingMemory.removeLogicalDependencies( item, -// context, -// this.rule ); + // workingMemory.removeLogicalDependencies( item, + // context, + // this.rule ); ((EventSupport) workingMemory).getAgendaEventSupport().fireActivationCreated( item, workingMemory ); @@ -334,18 +340,32 @@ public void modifyLeftTuple(LeftTuple leftTuple, } } - item.setSalience( rule.getSalience().getValue( leftTuple, - workingMemory ) ); // need to re-evaluate salience, as used fields may have changed - item.setPropagationContext( context ); // update the Propagation Context + if ( item == null ) { + // ----------------- + // Lazy instantiation and addition to the Agenda of AgendGroup + // implementations + // ---------------- + item = agenda.createAgendaItem( leftTuple, + rule.getSalience().getValue( leftTuple, + workingMemory ), + context, + this.rule, + this.subrule ); + item.setSequenence( this.sequence ); + } else { + item.setSalience( rule.getSalience().getValue( leftTuple, + workingMemory ) ); // need to re-evaluate salience, as used fields may have changed + item.setPropagationContext( context ); // update the Propagation Context + } boolean added = agenda.addActivation( item ); item.setActivated( added ); if ( added ) { -// workingMemory.removeLogicalDependencies( item, -// context, -// this.rule ); + // workingMemory.removeLogicalDependencies( item, + // context, + // this.rule ); ((EventSupport) workingMemory).getAgendaEventSupport().fireActivationCreated( item, workingMemory ); } @@ -395,7 +415,7 @@ protected void doRemove(final RuleRemovalContext context, builder, this, workingMemories ); - for( InternalWorkingMemory workingMemory : workingMemories ) { + for ( InternalWorkingMemory workingMemory : workingMemories ) { workingMemory.executeQueuedActions(); } context.setCleanupAdapter( adapter ); @@ -470,19 +490,21 @@ public short getType() { return NodeTypeEnums.RuleTerminalNode; } - public static class RTNCleanupAdapter implements CleanupAdapter { + public static class RTNCleanupAdapter + implements + CleanupAdapter { private RuleTerminalNode node; - + public RTNCleanupAdapter(RuleTerminalNode node) { this.node = node; } public void cleanUp(final LeftTuple leftTuple, final InternalWorkingMemory workingMemory) { - if( leftTuple.getLeftTupleSink() != node ) { + if ( leftTuple.getLeftTupleSink() != node ) { return; } - + final Activation activation = leftTuple.getActivation(); if ( activation.isActivated() ) {
b40fe9d5bab0db8ca1ade525aaabc6c94d189f78
kotlin
Refactoring: TypeInfoFactory.createTypeInfo()- without type -> noTypeInfo(), getNotNullType -> getTypeNotNull, nullability- refined, style fixes--
p
https://github.com/JetBrains/kotlin
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/BindingContextUtils.java b/compiler/frontend/src/org/jetbrains/kotlin/resolve/BindingContextUtils.java index d59d0f1dd2899..3b4850f6bb9bc 100644 --- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/BindingContextUtils.java +++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/BindingContextUtils.java @@ -82,7 +82,7 @@ public static <K, V> V getNotNull( } @NotNull - public static JetType getNotNullType( + public static JetType getTypeNotNull( @NotNull BindingContext bindingContext, @NotNull JetExpression expression ) { @@ -160,7 +160,7 @@ public static JetTypeInfo getRecordedTypeInfo(@NotNull JetExpression expression, // NB: should never return null if expression is already processed if (!Boolean.TRUE.equals(context.get(BindingContext.PROCESSED, expression))) return null; JetTypeInfo result = context.get(BindingContext.EXPRESSION_TYPE_INFO, expression); - return result != null ? result : TypeInfoFactoryPackage.createTypeInfo(DataFlowInfo.EMPTY); + return result != null ? result : TypeInfoFactoryPackage.noTypeInfo(DataFlowInfo.EMPTY); } public static boolean isExpressionWithValidReference( diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/BindingContextUtils.kt b/compiler/frontend/src/org/jetbrains/kotlin/resolve/BindingContextUtils.kt index 73727214c7341..3accfe407612f 100644 --- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/BindingContextUtils.kt +++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/BindingContextUtils.kt @@ -31,7 +31,7 @@ import org.jetbrains.kotlin.psi.JetExpression import org.jetbrains.kotlin.resolve.DescriptorToSourceUtils import org.jetbrains.kotlin.resolve.calls.smartcasts.DataFlowInfo import org.jetbrains.kotlin.resolve.calls.context.ResolutionContext -import org.jetbrains.kotlin.types.expressions.typeInfoFactory.createTypeInfo +import org.jetbrains.kotlin.types.expressions.typeInfoFactory.noTypeInfo public fun JetReturnExpression.getTargetFunctionDescriptor(context: BindingContext): FunctionDescriptor? { val targetLabel = getTargetLabel() @@ -63,7 +63,8 @@ public fun <C : ResolutionContext<C>> ResolutionContext<C>.recordScopeAndDataFlo trace.record(BindingContext.EXPRESSION_TYPE_INFO, expression, typeInfo.replaceDataFlowInfo(dataFlowInfo)) } else if (dataFlowInfo != DataFlowInfo.EMPTY) { - trace.record(BindingContext.EXPRESSION_TYPE_INFO, expression, createTypeInfo(dataFlowInfo)) + // Don't store anything in BindingTrace if it's simply an empty DataFlowInfo + trace.record(BindingContext.EXPRESSION_TYPE_INFO, expression, noTypeInfo(dataFlowInfo)) } } diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/ArgumentTypeResolver.java b/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/ArgumentTypeResolver.java index 1d846ff5b0f9e..972f399856abc 100644 --- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/ArgumentTypeResolver.java +++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/ArgumentTypeResolver.java @@ -192,7 +192,7 @@ public JetTypeInfo getArgumentTypeInfo( @NotNull ResolveArgumentsMode resolveArgumentsMode ) { if (expression == null) { - return TypeInfoFactoryPackage.createTypeInfo(context); + return TypeInfoFactoryPackage.noTypeInfo(context); } if (isFunctionLiteralArgument(expression, context)) { return getFunctionLiteralTypeInfo(expression, getFunctionLiteralArgument(expression, context), context, resolveArgumentsMode); diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/CallExpressionResolver.java b/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/CallExpressionResolver.java index 90bfb6635624f..1fc5ed8b9b2b1 100644 --- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/CallExpressionResolver.java +++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/CallExpressionResolver.java @@ -158,7 +158,7 @@ public JetTypeInfo getSimpleNameExpressionTypeInfo( } temporaryForVariable.commit(); - return TypeInfoFactoryPackage.createTypeInfo(context); + return TypeInfoFactoryPackage.noTypeInfo(context); } @NotNull @@ -201,7 +201,7 @@ public JetTypeInfo getCallExpressionTypeInfoWithoutFinalTypeCheck( context.trace.report(FUNCTION_CALL_EXPECTED.on(callExpression, callExpression, hasValueParameters)); } if (functionDescriptor == null) { - return TypeInfoFactoryPackage.createTypeInfo(context); + return TypeInfoFactoryPackage.noTypeInfo(context); } if (functionDescriptor instanceof ConstructorDescriptor && DescriptorUtils.isAnnotationClass(functionDescriptor.getContainingDeclaration())) { @@ -226,11 +226,11 @@ public JetTypeInfo getCallExpressionTypeInfoWithoutFinalTypeCheck( temporaryForVariable.commit(); context.trace.report(FUNCTION_EXPECTED.on(calleeExpression, calleeExpression, type != null ? type : ErrorUtils.createErrorType(""))); - return TypeInfoFactoryPackage.createTypeInfo(context); + return TypeInfoFactoryPackage.noTypeInfo(context); } } temporaryForFunction.commit(); - return TypeInfoFactoryPackage.createTypeInfo(context); + return TypeInfoFactoryPackage.noTypeInfo(context); } private static boolean canInstantiateAnnotationClass(@NotNull JetCallExpression expression) { @@ -265,7 +265,7 @@ else if (selectorExpression instanceof JetSimpleNameExpression) { else if (selectorExpression != null) { context.trace.report(ILLEGAL_SELECTOR.on(selectorExpression, selectorExpression.getText())); } - return TypeInfoFactoryPackage.createTypeInfo(context); + return TypeInfoFactoryPackage.noTypeInfo(context); } /** diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/util/callUtil.kt b/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/util/callUtil.kt index 9dbc99d7d603c..b3bc155dcc45d 100644 --- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/util/callUtil.kt +++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/util/callUtil.kt @@ -66,7 +66,7 @@ public fun <D : CallableDescriptor> ResolvedCall<D>.getParameterForArgument(valu // call public fun <C: ResolutionContext<C>> Call.hasUnresolvedArguments(context: ResolutionContext<C>): Boolean { - val arguments = getValueArguments().map { it?.getArgumentExpression() } + val arguments = getValueArguments().map { it.getArgumentExpression() } return arguments.any { argument -> val expressionType = argument?.let { context.trace.getBindingContext().getType(it) } @@ -135,7 +135,7 @@ public fun JetElement.getCall(context: BindingContext): Call? { } public fun JetElement.getParentCall(context: BindingContext, strict: Boolean = true): Call? { - val callExpressionTypes = array<Class<out JetElement>?>( + val callExpressionTypes = arrayOf<Class<out JetElement>?>( javaClass<JetSimpleNameExpression>(), javaClass<JetCallElement>(), javaClass<JetBinaryExpression>(), javaClass<JetUnaryExpression>(), javaClass<JetArrayAccessExpression>()) diff --git a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/BasicExpressionTypingVisitor.java b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/BasicExpressionTypingVisitor.java index 0c0742291c076..ccbf06ea810d3 100644 --- a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/BasicExpressionTypingVisitor.java +++ b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/BasicExpressionTypingVisitor.java @@ -111,7 +111,7 @@ public JetTypeInfo visitSimpleNameExpression(@NotNull JetSimpleNameExpression ex public JetTypeInfo visitParenthesizedExpression(@NotNull JetParenthesizedExpression expression, ExpressionTypingContext context) { JetExpression innerExpression = expression.getExpression(); if (innerExpression == null) { - return TypeInfoFactoryPackage.createTypeInfo(context); + return TypeInfoFactoryPackage.noTypeInfo(context); } return facade.getTypeInfo(innerExpression, context.replaceScope(context.scope)); } @@ -287,7 +287,7 @@ private JetTypeInfo errorInSuper(JetSuperExpression expression, ExpressionTyping if (superTypeQualifier != null) { components.expressionTypingServices.getTypeResolver().resolveType(context.scope, superTypeQualifier, context.trace, true); } - return TypeInfoFactoryPackage.createTypeInfo(context); + return TypeInfoFactoryPackage.noTypeInfo(context); } private JetType checkPossiblyQualifiedSuper( @@ -812,7 +812,7 @@ public JetTypeInfo visitUnaryExpression(@NotNull JetUnaryExpression expression, : contextWithExpectedType.replaceContextDependency(INDEPENDENT).replaceExpectedType(NO_EXPECTED_TYPE); JetExpression baseExpression = expression.getBaseExpression(); - if (baseExpression == null) return TypeInfoFactoryPackage.createTypeInfo(context); + if (baseExpression == null) return TypeInfoFactoryPackage.noTypeInfo(context); JetSimpleNameExpression operationSign = expression.getOperationReference(); @@ -953,7 +953,7 @@ public JetTypeInfo visitLabeledExpression( boolean isStatement ) { JetExpression baseExpression = expression.getBaseExpression(); - if (baseExpression == null) return TypeInfoFactoryPackage.createTypeInfo(context); + if (baseExpression == null) return TypeInfoFactoryPackage.noTypeInfo(context); return facade.getTypeInfo(baseExpression, context, isStatement); } @@ -1079,7 +1079,7 @@ else if (OperatorConventions.BOOLEAN_OPERATIONS.containsKey(operationType)) { } else { context.trace.report(UNSUPPORTED.on(operationSign, "Unknown operation")); - result = TypeInfoFactoryPackage.createTypeInfo(context); + result = TypeInfoFactoryPackage.noTypeInfo(context); } CompileTimeConstant<?> value = ConstantExpressionEvaluator.evaluate( expression, contextWithExpectedType.trace, contextWithExpectedType.expectedType @@ -1222,7 +1222,7 @@ private JetTypeInfo visitElvisExpression( if (left == null || right == null) { getTypeInfoOrNullType(left, context, facade); - return TypeInfoFactoryPackage.createTypeInfo(context); + return TypeInfoFactoryPackage.noTypeInfo(context); } Call call = createCallForSpecialConstruction(expression, expression.getOperationReference(), Lists.newArrayList(left, right)); @@ -1245,7 +1245,7 @@ private JetTypeInfo visitElvisExpression( dataFlowInfo = dataFlowInfo.disequate(value, DataFlowValue.NULL); } JetType type = resolvedCall.getResultingDescriptor().getReturnType(); - if (type == null || rightType == null) return TypeInfoFactoryPackage.createTypeInfo(dataFlowInfo); + if (type == null || rightType == null) return TypeInfoFactoryPackage.noTypeInfo(dataFlowInfo); // Sometimes return type for special call for elvis operator might be nullable, // but result is not nullable if the right type is not nullable @@ -1255,6 +1255,8 @@ private JetTypeInfo visitElvisExpression( if (context.contextDependency == DEPENDENT) { return TypeInfoFactoryPackage.createTypeInfo(type, dataFlowInfo); } + + // If break or continue was possible, take condition check info as the jump info return TypeInfoFactoryPackage.createTypeInfo(DataFlowUtils.checkType(type, expression, context), dataFlowInfo, loopBreakContinuePossible, @@ -1273,7 +1275,7 @@ public JetTypeInfo checkInExpression( ExpressionTypingContext contextWithNoExpectedType = context.replaceExpectedType(NO_EXPECTED_TYPE); if (right == null) { if (left != null) facade.getTypeInfo(left, contextWithNoExpectedType); - return TypeInfoFactoryPackage.createTypeInfo(context); + return TypeInfoFactoryPackage.noTypeInfo(context); } JetTypeInfo rightTypeInfo = facade.getTypeInfo(right, contextWithNoExpectedType); @@ -1350,7 +1352,7 @@ private JetTypeInfo visitAssignment(JetBinaryExpression expression, ExpressionTy private JetTypeInfo assignmentIsNotAnExpressionError(JetBinaryExpression expression, ExpressionTypingContext context) { facade.checkStatementType(expression, context); context.trace.report(ASSIGNMENT_IN_EXPRESSION_CONTEXT.on(expression)); - return TypeInfoFactoryPackage.createTypeInfo(context); + return TypeInfoFactoryPackage.noTypeInfo(context); } @Override @@ -1370,7 +1372,7 @@ public JetTypeInfo getTypeInfoForBinaryCall( //left here is a receiver, so it doesn't depend on expected type typeInfo = facade.getTypeInfo(left, context.replaceContextDependency(INDEPENDENT).replaceExpectedType(NO_EXPECTED_TYPE)); } else { - typeInfo = TypeInfoFactoryPackage.createTypeInfo(context); + typeInfo = TypeInfoFactoryPackage.noTypeInfo(context); } ExpressionTypingContext contextWithDataFlow = context.replaceDataFlowInfo(typeInfo.getDataFlowInfo()); @@ -1396,7 +1398,7 @@ public JetTypeInfo getTypeInfoForBinaryCall( @Override public JetTypeInfo visitDeclaration(@NotNull JetDeclaration dcl, ExpressionTypingContext context) { context.trace.report(DECLARATION_IN_ILLEGAL_CONTEXT.on(dcl)); - return TypeInfoFactoryPackage.createTypeInfo(context); + return TypeInfoFactoryPackage.noTypeInfo(context); } @Override @@ -1404,10 +1406,9 @@ public JetTypeInfo visitRootPackageExpression(@NotNull JetRootPackageExpression if (!JetPsiUtil.isLHSOfDot(expression)) { context.trace.report(PACKAGE_IS_NOT_AN_EXPRESSION.on(expression)); } - return TypeInfoFactoryPackage.createTypeInfo(context); + return TypeInfoFactoryPackage.noTypeInfo(context); } - private class StringTemplateVisitor extends JetVisitorVoid { final ExpressionTypingContext context; @@ -1416,7 +1417,7 @@ private class StringTemplateVisitor extends JetVisitorVoid { StringTemplateVisitor(ExpressionTypingContext context) { this.context = context; - this.typeInfo = TypeInfoFactoryPackage.createTypeInfo(context); + this.typeInfo = TypeInfoFactoryPackage.noTypeInfo(context); } @Override @@ -1460,7 +1461,7 @@ public JetTypeInfo visitAnnotatedExpression(JetAnnotatedExpression expression, E JetExpression baseExpression = expression.getBaseExpression(); if (baseExpression == null) { - return TypeInfoFactoryPackage.createTypeInfo(context); + return TypeInfoFactoryPackage.noTypeInfo(context); } return facade.getTypeInfo(baseExpression, context, isStatement); } @@ -1468,7 +1469,7 @@ public JetTypeInfo visitAnnotatedExpression(JetAnnotatedExpression expression, E @Override public JetTypeInfo visitJetElement(@NotNull JetElement element, ExpressionTypingContext context) { context.trace.report(UNSUPPORTED.on(element, getClass().getCanonicalName())); - return TypeInfoFactoryPackage.createTypeInfo(context); + return TypeInfoFactoryPackage.noTypeInfo(context); } @NotNull @@ -1488,7 +1489,7 @@ private JetTypeInfo resolveArrayAccessSpecialMethod(@NotNull JetArrayAccessExpre @NotNull BindingTrace traceForResolveResult, boolean isGet) { JetExpression arrayExpression = arrayAccessExpression.getArrayExpression(); - if (arrayExpression == null) return TypeInfoFactoryPackage.createTypeInfo(oldContext); + if (arrayExpression == null) return TypeInfoFactoryPackage.noTypeInfo(oldContext); JetTypeInfo arrayTypeInfo = facade.safeGetTypeInfo(arrayExpression, oldContext.replaceExpectedType(NO_EXPECTED_TYPE) diff --git a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ControlStructureTypingVisitor.java b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ControlStructureTypingVisitor.java index 4aaf6e79165f2..ce05ec4c672c6 100644 --- a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ControlStructureTypingVisitor.java +++ b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ControlStructureTypingVisitor.java @@ -111,8 +111,8 @@ public JetTypeInfo visitIfExpression(JetIfExpression ifExpression, ExpressionTyp : result; } return TypeInfoFactoryPackage.createTypeInfo(DataFlowUtils.checkImplicitCast( - components.builtIns.getUnitType(), ifExpression, - contextWithExpectedType, isStatement + components.builtIns.getUnitType(), ifExpression, + contextWithExpectedType, isStatement ), thenInfo.or(elseInfo) ); @@ -221,8 +221,9 @@ public JetTypeInfo visitWhileExpression(JetWhileExpression expression, Expressio bodyTypeInfo = components.expressionTypingServices.getBlockReturnedTypeWithWritableScope( scopeToExtend, Collections.singletonList(body), CoercionStrategy.NO_COERCION, context.replaceDataFlowInfo(conditionInfo)); - } else { - bodyTypeInfo = TypeInfoFactoryPackage.createTypeInfo(conditionInfo); + } + else { + bodyTypeInfo = TypeInfoFactoryPackage.noTypeInfo(conditionInfo); } // Condition is false at this point only if there is no jumps outside @@ -331,7 +332,7 @@ else if (body != null) { writableScope, block, CoercionStrategy.NO_COERCION, context); } else { - bodyTypeInfo = TypeInfoFactoryPackage.createTypeInfo(context); + bodyTypeInfo = TypeInfoFactoryPackage.noTypeInfo(context); } JetExpression condition = expression.getCondition(); DataFlowInfo conditionDataFlowInfo = checkCondition(conditionScope, condition, context); @@ -380,7 +381,7 @@ public JetTypeInfo visitForExpression(JetForExpression expression, ExpressionTyp } } else { - loopRangeInfo = TypeInfoFactoryPackage.createTypeInfo(context); + loopRangeInfo = TypeInfoFactoryPackage.noTypeInfo(context); } WritableScope loopScope = newWritableScopeImpl(context, "Scope with for-loop index"); @@ -480,7 +481,7 @@ public JetTypeInfo visitTryExpression(@NotNull JetTryExpression expression, Expr } } - JetTypeInfo result = TypeInfoFactoryPackage.createTypeInfo(context); + JetTypeInfo result = TypeInfoFactoryPackage.noTypeInfo(context); if (finallyBlock != null) { result = facade.getTypeInfo(finallyBlock.getFinalExpression(), context.replaceExpectedType(NO_EXPECTED_TYPE)); diff --git a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/DataFlowUtils.java b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/DataFlowUtils.java index 951b2517797b8..1fdb467306807 100644 --- a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/DataFlowUtils.java +++ b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/DataFlowUtils.java @@ -225,7 +225,7 @@ public static JetTypeInfo illegalStatementType(@NotNull JetExpression expression facade.checkStatementType( expression, context.replaceExpectedType(TypeUtils.NO_EXPECTED_TYPE).replaceContextDependency(INDEPENDENT)); context.trace.report(EXPRESSION_EXPECTED.on(expression, expression)); - return TypeInfoFactoryPackage.createTypeInfo(context); + return TypeInfoFactoryPackage.noTypeInfo(context); } @NotNull diff --git a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ExpressionTypingServices.java b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ExpressionTypingServices.java index 581d1a6d7de8a..94460733925e8 100644 --- a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ExpressionTypingServices.java +++ b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ExpressionTypingServices.java @@ -315,13 +315,13 @@ public JetType getBodyExpressionType( @NotNull ExpressionTypingContext context ) { if (block.isEmpty()) { - return new JetTypeInfo(builtIns.getUnitType(), context.dataFlowInfo, false, context.dataFlowInfo); + return TypeInfoFactoryPackage.createTypeInfo(builtIns.getUnitType(), context); } ExpressionTypingInternals blockLevelVisitor = ExpressionTypingVisitorDispatcher.createForBlock(expressionTypingComponents, scope); ExpressionTypingContext newContext = context.replaceScope(scope).replaceExpectedType(NO_EXPECTED_TYPE); - JetTypeInfo result = TypeInfoFactoryPackage.createTypeInfo(context); + JetTypeInfo result = TypeInfoFactoryPackage.noTypeInfo(context); // Jump point data flow info DataFlowInfo beforeJumpInfo = newContext.dataFlowInfo; boolean jumpOutPossible = false; diff --git a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ExpressionTypingUtils.java b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ExpressionTypingUtils.java index 07e2ccaf6716f..0abb2bfbe094d 100644 --- a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ExpressionTypingUtils.java +++ b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ExpressionTypingUtils.java @@ -336,7 +336,7 @@ public static JetTypeInfo getTypeInfoOrNullType( ) { return expression != null ? facade.getTypeInfo(expression, context) - : TypeInfoFactoryPackage.createTypeInfo(context); + : TypeInfoFactoryPackage.noTypeInfo(context); } @SuppressWarnings("SuspiciousMethodCalls") diff --git a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ExpressionTypingVisitorDispatcher.java b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ExpressionTypingVisitorDispatcher.java index 19ea2c2a5d15d..49a13e3a785e1 100644 --- a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ExpressionTypingVisitorDispatcher.java +++ b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ExpressionTypingVisitorDispatcher.java @@ -99,8 +99,9 @@ public final JetTypeInfo safeGetTypeInfo(@NotNull JetExpression expression, Expr if (typeInfo.getType() != null) { return typeInfo; } - return typeInfo.replaceType(ErrorUtils.createErrorType("Type for " + expression.getText())).replaceDataFlowInfo( - context.dataFlowInfo); + return typeInfo + .replaceType(ErrorUtils.createErrorType("Type for " + expression.getText())) + .replaceDataFlowInfo(context.dataFlowInfo); } @Override @@ -131,7 +132,7 @@ public void checkStatementType(@NotNull JetExpression expression, ExpressionTypi } @NotNull - static private JetTypeInfo getTypeInfo(@NotNull JetExpression expression, ExpressionTypingContext context, JetVisitor<JetTypeInfo, ExpressionTypingContext> visitor) { + private static JetTypeInfo getTypeInfo(@NotNull JetExpression expression, ExpressionTypingContext context, JetVisitor<JetTypeInfo, ExpressionTypingContext> visitor) { try { JetTypeInfo recordedTypeInfo = BindingContextUtils.getRecordedTypeInfo(expression, context.trace.getBindingContext()); if (recordedTypeInfo != null) { @@ -141,7 +142,8 @@ static private JetTypeInfo getTypeInfo(@NotNull JetExpression expression, Expres try { result = expression.accept(visitor, context); // Some recursive definitions (object expressions) must put their types in the cache manually: - if (Boolean.TRUE.equals(context.trace.get(BindingContext.PROCESSED, expression))) { + //noinspection ConstantConditions + if (context.trace.get(BindingContext.PROCESSED, expression)) { JetType type = context.trace.getBindingContext().getType(expression); return result.replaceType(type); } @@ -153,7 +155,7 @@ static private JetTypeInfo getTypeInfo(@NotNull JetExpression expression, Expres } catch (ReenteringLazyValueComputationException e) { context.trace.report(TYPECHECKER_HAS_RUN_INTO_RECURSIVE_PROBLEM.on(expression)); - result = TypeInfoFactoryPackage.createTypeInfo(context); + result = TypeInfoFactoryPackage.noTypeInfo(context); } context.trace.record(BindingContext.PROCESSED, expression); diff --git a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ExpressionTypingVisitorForStatements.java b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ExpressionTypingVisitorForStatements.java index 0f322c3d17ac3..e7238fa8e8385 100644 --- a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ExpressionTypingVisitorForStatements.java +++ b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ExpressionTypingVisitorForStatements.java @@ -155,7 +155,7 @@ public JetTypeInfo visitProperty(@NotNull JetProperty property, ExpressionTyping } } else { - typeInfo = TypeInfoFactoryPackage.createTypeInfo(context); + typeInfo = TypeInfoFactoryPackage.noTypeInfo(context); } { @@ -176,13 +176,13 @@ public JetTypeInfo visitMultiDeclaration(@NotNull JetMultiDeclaration multiDecla JetExpression initializer = multiDeclaration.getInitializer(); if (initializer == null) { context.trace.report(INITIALIZER_REQUIRED_FOR_MULTIDECLARATION.on(multiDeclaration)); - return TypeInfoFactoryPackage.createTypeInfo(context); + return TypeInfoFactoryPackage.noTypeInfo(context); } ExpressionReceiver expressionReceiver = ExpressionTypingUtils.getExpressionReceiver( facade, initializer, context.replaceExpectedType(NO_EXPECTED_TYPE).replaceContextDependency(INDEPENDENT)); JetTypeInfo typeInfo = facade.getTypeInfo(initializer, context); if (expressionReceiver == null) { - return TypeInfoFactoryPackage.createTypeInfo(context); + return TypeInfoFactoryPackage.noTypeInfo(context); } components.expressionTypingUtils.defineLocalVariablesFromMultiDeclaration(scope, multiDeclaration, expressionReceiver, initializer, context); return typeInfo.replaceType(DataFlowUtils.checkStatementType(multiDeclaration, context)); @@ -339,7 +339,7 @@ protected JetTypeInfo visitAssignment(JetBinaryExpression expression, Expression JetExpression right = expression.getRight(); if (left instanceof JetArrayAccessExpression) { JetArrayAccessExpression arrayAccessExpression = (JetArrayAccessExpression) left; - if (right == null) return TypeInfoFactoryPackage.createTypeInfo(context); + if (right == null) return TypeInfoFactoryPackage.noTypeInfo(context); JetTypeInfo typeInfo = basic.resolveArrayAccessSetMethod(arrayAccessExpression, right, context, context.trace); basic.checkLValue(context.trace, context, arrayAccessExpression, right); return typeInfo.replaceType(checkAssignmentType(typeInfo.getType(), expression, contextWithExpectedType)); @@ -347,25 +347,25 @@ protected JetTypeInfo visitAssignment(JetBinaryExpression expression, Expression JetTypeInfo leftInfo = ExpressionTypingUtils.getTypeInfoOrNullType(left, context, facade); JetType leftType = leftInfo.getType(); DataFlowInfo dataFlowInfo = leftInfo.getDataFlowInfo(); - JetTypeInfo rightInfo; + JetTypeInfo resultInfo; if (right != null) { - rightInfo = facade.getTypeInfo(right, context.replaceDataFlowInfo(dataFlowInfo).replaceExpectedType(leftType)); - dataFlowInfo = rightInfo.getDataFlowInfo(); - JetType rightType = rightInfo.getType(); + resultInfo = facade.getTypeInfo(right, context.replaceDataFlowInfo(dataFlowInfo).replaceExpectedType(leftType)); + dataFlowInfo = resultInfo.getDataFlowInfo(); + JetType rightType = resultInfo.getType(); if (left != null && leftType != null && rightType != null) { DataFlowValue leftValue = DataFlowValueFactory.createDataFlowValue(left, leftType, context); DataFlowValue rightValue = DataFlowValueFactory.createDataFlowValue(right, rightType, context); // We cannot say here anything new about rightValue except it has the same value as leftValue - rightInfo = rightInfo.replaceDataFlowInfo(dataFlowInfo.assign(leftValue, rightValue)); + resultInfo = resultInfo.replaceDataFlowInfo(dataFlowInfo.assign(leftValue, rightValue)); } } else { - rightInfo = leftInfo; + resultInfo = leftInfo; } if (leftType != null && leftOperand != null) { //if leftType == null, some other error has been generated basic.checkLValue(context.trace, context, leftOperand, right); } - return rightInfo.replaceType(DataFlowUtils.checkStatementType(expression, contextWithExpectedType)); + return resultInfo.replaceType(DataFlowUtils.checkStatementType(expression, contextWithExpectedType)); } @@ -377,7 +377,7 @@ public JetTypeInfo visitExpression(@NotNull JetExpression expression, Expression @Override public JetTypeInfo visitJetElement(@NotNull JetElement element, ExpressionTypingContext context) { context.trace.report(UNSUPPORTED.on(element, "in a block")); - return TypeInfoFactoryPackage.createTypeInfo(context); + return TypeInfoFactoryPackage.noTypeInfo(context); } @Override diff --git a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/FunctionsTypingVisitor.kt b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/FunctionsTypingVisitor.kt index 25462096976c6..e4e4f36839f9d 100644 --- a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/FunctionsTypingVisitor.kt +++ b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/FunctionsTypingVisitor.kt @@ -42,6 +42,7 @@ import org.jetbrains.kotlin.types.checker.JetTypeChecker import org.jetbrains.kotlin.types.expressions.CoercionStrategy.COERCION_TO_UNIT import org.jetbrains.kotlin.types.expressions.typeInfoFactory.createCheckedTypeInfo import org.jetbrains.kotlin.types.expressions.typeInfoFactory.createTypeInfo +import org.jetbrains.kotlin.types.expressions.typeInfoFactory.noTypeInfo import org.jetbrains.kotlin.utils.addIfNotNull public class FunctionsTypingVisitor(facade: ExpressionTypingInternals) : ExpressionTypingVisitor(facade) { diff --git a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/PatternMatchingTypingVisitor.java b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/PatternMatchingTypingVisitor.java index 52c57491fa858..d14483e86afc1 100644 --- a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/PatternMatchingTypingVisitor.java +++ b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/PatternMatchingTypingVisitor.java @@ -51,8 +51,9 @@ protected PatternMatchingTypingVisitor(@NotNull ExpressionTypingInternals facade @Override public JetTypeInfo visitIsExpression(@NotNull JetIsExpression expression, ExpressionTypingContext contextWithExpectedType) { - ExpressionTypingContext context = contextWithExpectedType.replaceExpectedType(NO_EXPECTED_TYPE).replaceContextDependency( - INDEPENDENT); + ExpressionTypingContext context = contextWithExpectedType + .replaceExpectedType(NO_EXPECTED_TYPE) + .replaceContextDependency(INDEPENDENT); JetExpression leftHandSide = expression.getLeftHandSide(); JetTypeInfo typeInfo = facade.safeGetTypeInfo(leftHandSide, context.replaceScope(context.scope)); JetType knownType = typeInfo.getType(); diff --git a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/TypeInfoFactory.kt b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/TypeInfoFactory.kt index 91b16fecd9d33..5722f77830d57 100644 --- a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/TypeInfoFactory.kt +++ b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/TypeInfoFactory.kt @@ -22,22 +22,22 @@ import org.jetbrains.kotlin.resolve.calls.smartcasts.DataFlowInfo import org.jetbrains.kotlin.types.JetType import org.jetbrains.kotlin.types.expressions.JetTypeInfo -/** +/* * Functions in this file are intended to create type info instances in different circumstances */ -public fun createTypeInfo(type: JetType?): JetTypeInfo = createTypeInfo(type, DataFlowInfo.EMPTY) - -public fun createTypeInfo(dataFlowInfo: DataFlowInfo): JetTypeInfo = JetTypeInfo(null, dataFlowInfo) +public fun createTypeInfo(type: JetType?, dataFlowInfo: DataFlowInfo): JetTypeInfo = JetTypeInfo(type, dataFlowInfo) -public fun createTypeInfo(context: ResolutionContext<*>): JetTypeInfo = createTypeInfo(context.dataFlowInfo) +public fun createTypeInfo(type: JetType?, dataFlowInfo: DataFlowInfo, jumpPossible: Boolean, jumpFlowInfo: DataFlowInfo): JetTypeInfo = + JetTypeInfo(type, dataFlowInfo, jumpPossible, jumpFlowInfo) -public fun createTypeInfo(type: JetType?, dataFlowInfo: DataFlowInfo): JetTypeInfo = JetTypeInfo(type, dataFlowInfo) +public fun createTypeInfo(type: JetType?): JetTypeInfo = createTypeInfo(type, DataFlowInfo.EMPTY) public fun createTypeInfo(type: JetType?, context: ResolutionContext<*>): JetTypeInfo = createTypeInfo(type, context.dataFlowInfo) -public fun createTypeInfo(type: JetType?, dataFlowInfo: DataFlowInfo, jumpPossible: Boolean, jumpFlowInfo: DataFlowInfo): JetTypeInfo = - JetTypeInfo(type, dataFlowInfo, jumpPossible, jumpFlowInfo) +public fun noTypeInfo(dataFlowInfo: DataFlowInfo): JetTypeInfo = createTypeInfo(null, dataFlowInfo) + +public fun noTypeInfo(context: ResolutionContext<*>): JetTypeInfo = noTypeInfo(context.dataFlowInfo) public fun createCheckedTypeInfo(type: JetType?, context: ResolutionContext<*>, expression: JetExpression): JetTypeInfo = createTypeInfo(type, context).checkType(expression, context) diff --git a/js/js.translator/src/org/jetbrains/kotlin/js/translate/expression/ExpressionVisitor.java b/js/js.translator/src/org/jetbrains/kotlin/js/translate/expression/ExpressionVisitor.java index 16b24f8f32ef2..8ab76ed82fea3 100644 --- a/js/js.translator/src/org/jetbrains/kotlin/js/translate/expression/ExpressionVisitor.java +++ b/js/js.translator/src/org/jetbrains/kotlin/js/translate/expression/ExpressionVisitor.java @@ -356,7 +356,7 @@ public JsNode visitBinaryWithTypeRHSExpression(@NotNull JetBinaryExpressionWithT assert right != null; JetType rightType = BindingContextUtils.getNotNull(context.bindingContext(), BindingContext.TYPE, right); - JetType leftType = BindingContextUtils.getNotNullType(context.bindingContext(), expression.getLeft()); + JetType leftType = BindingContextUtils.getTypeNotNull(context.bindingContext(), expression.getLeft()); if (TypeUtils.isNullableType(rightType) || !TypeUtils.isNullableType(leftType)) { return jsExpression.source(expression); } diff --git a/js/js.translator/src/org/jetbrains/kotlin/js/translate/operation/UnaryOperationTranslator.java b/js/js.translator/src/org/jetbrains/kotlin/js/translate/operation/UnaryOperationTranslator.java index 84df664fe621e..bc4fb74893126 100644 --- a/js/js.translator/src/org/jetbrains/kotlin/js/translate/operation/UnaryOperationTranslator.java +++ b/js/js.translator/src/org/jetbrains/kotlin/js/translate/operation/UnaryOperationTranslator.java @@ -55,7 +55,7 @@ public static JsExpression translate( IElementType operationToken = expression.getOperationReference().getReferencedNameElementType(); if (operationToken == JetTokens.EXCLEXCL) { JetExpression baseExpression = getBaseExpression(expression); - JetType type = BindingContextUtils.getNotNullType(context.bindingContext(), baseExpression); + JetType type = BindingContextUtils.getTypeNotNull(context.bindingContext(), baseExpression); JsExpression translatedExpression = translateAsExpression(baseExpression, context); return type.isMarkedNullable() ? sure(translatedExpression, context) : translatedExpression; } diff --git a/js/js.translator/src/org/jetbrains/kotlin/js/translate/utils/BindingUtils.java b/js/js.translator/src/org/jetbrains/kotlin/js/translate/utils/BindingUtils.java index 79fac37510370..09b5b127e81e5 100644 --- a/js/js.translator/src/org/jetbrains/kotlin/js/translate/utils/BindingUtils.java +++ b/js/js.translator/src/org/jetbrains/kotlin/js/translate/utils/BindingUtils.java @@ -210,7 +210,7 @@ public static ResolvedCall<FunctionDescriptor> getHasNextCallable(@NotNull Bindi @NotNull public static JetType getTypeForExpression(@NotNull BindingContext context, @NotNull JetExpression expression) { - return BindingContextUtils.getNotNullType(context, expression); + return BindingContextUtils.getTypeNotNull(context, expression); } @NotNull
188a11bdb925e3a4c1f38f8ff52f0039eb343eaf
spring-framework
Fixed setFavorPathExtension delegation code--
c
https://github.com/spring-projects/spring-framework
diff --git a/spring-webmvc/src/main/java/org/springframework/web/servlet/view/ContentNegotiatingViewResolver.java b/spring-webmvc/src/main/java/org/springframework/web/servlet/view/ContentNegotiatingViewResolver.java index 2d543941af6f..f7b59c3f6c24 100644 --- a/spring-webmvc/src/main/java/org/springframework/web/servlet/view/ContentNegotiatingViewResolver.java +++ b/spring-webmvc/src/main/java/org/springframework/web/servlet/view/ContentNegotiatingViewResolver.java @@ -1,5 +1,5 @@ /* - * Copyright 2002-2012 the original author or authors. + * Copyright 2002-2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,7 +25,6 @@ import java.util.Map; import java.util.Properties; import java.util.Set; - import javax.activation.FileTypeMap; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; @@ -33,6 +32,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; + import org.springframework.beans.factory.BeanFactoryUtils; import org.springframework.beans.factory.InitializingBean; import org.springframework.core.OrderComparator; @@ -95,7 +95,7 @@ public class ContentNegotiatingViewResolver extends WebApplicationObjectSupport private ContentNegotiationManager contentNegotiationManager; - private ContentNegotiationManagerFactoryBean cnManagerFactoryBean = new ContentNegotiationManagerFactoryBean(); + private final ContentNegotiationManagerFactoryBean cnManagerFactoryBean = new ContentNegotiationManagerFactoryBean(); private boolean useNotAcceptableStatusCode = false; @@ -104,10 +104,6 @@ public class ContentNegotiatingViewResolver extends WebApplicationObjectSupport private List<ViewResolver> viewResolvers; - public ContentNegotiatingViewResolver() { - super(); - } - public void setOrder(int order) { this.order = order; } @@ -118,7 +114,9 @@ public int getOrder() { /** * Set the {@link ContentNegotiationManager} to use to determine requested media types. - * If not set, the default constructor is used. + * <p>If not set, ContentNegotiationManager's default constructor will be used, + * applying a {@link org.springframework.web.accept.HeaderContentNegotiationStrategy}. + * @see ContentNegotiationManager#ContentNegotiationManager() */ public void setContentNegotiationManager(ContentNegotiationManager contentNegotiationManager) { this.contentNegotiationManager = contentNegotiationManager; @@ -130,18 +128,16 @@ public void setContentNegotiationManager(ContentNegotiationManager contentNegoti * <p>For instance, when this flag is {@code true} (the default), a request for {@code /hotels.pdf} * will result in an {@code AbstractPdfView} being resolved, while the {@code Accept} header can be the * browser-defined {@code text/html,application/xhtml+xml}. - * * @deprecated use {@link #setContentNegotiationManager(ContentNegotiationManager)} */ @Deprecated public void setFavorPathExtension(boolean favorPathExtension) { - this.cnManagerFactoryBean.setFavorParameter(favorPathExtension); + this.cnManagerFactoryBean.setFavorPathExtension(favorPathExtension); } /** * Indicate whether to use the Java Activation Framework to map from file extensions to media types. * <p>Default is {@code true}, i.e. the Java Activation Framework is used (if available). - * * @deprecated use {@link #setContentNegotiationManager(ContentNegotiationManager)} */ @Deprecated @@ -155,7 +151,6 @@ public void setUseJaf(boolean useJaf) { * <p>For instance, when this flag is {@code true}, a request for {@code /hotels?format=pdf} will result * in an {@code AbstractPdfView} being resolved, while the {@code Accept} header can be the browser-defined * {@code text/html,application/xhtml+xml}. - * * @deprecated use {@link #setContentNegotiationManager(ContentNegotiationManager)} */ @Deprecated @@ -166,7 +161,6 @@ public void setFavorParameter(boolean favorParameter) { /** * Set the parameter name that can be used to determine the requested media type if the {@link * #setFavorParameter} property is {@code true}. The default parameter name is {@code format}. - * * @deprecated use {@link #setContentNegotiationManager(ContentNegotiationManager)} */ @Deprecated @@ -179,7 +173,6 @@ public void setParameterName(String parameterName) { * <p>If set to {@code true}, this view resolver will only refer to the file extension and/or * parameter, as indicated by the {@link #setFavorPathExtension favorPathExtension} and * {@link #setFavorParameter favorParameter} properties. - * * @deprecated use {@link #setContentNegotiationManager(ContentNegotiationManager)} */ @Deprecated @@ -191,7 +184,6 @@ public void setIgnoreAcceptHeader(boolean ignoreAcceptHeader) { * Set the mapping from file extensions to media types. * <p>When this mapping is not set or when an extension is not present, this view resolver * will fall back to using a {@link FileTypeMap} when the Java Action Framework is available. - * * @deprecated use {@link #setContentNegotiationManager(ContentNegotiationManager)} */ @Deprecated @@ -207,7 +199,6 @@ public void setMediaTypes(Map<String, String> mediaTypes) { * Set the default content type. * <p>This content type will be used when file extension, parameter, nor {@code Accept} * header define a content-type, either through being disabled or empty. - * * @deprecated use {@link #setContentNegotiationManager(ContentNegotiationManager)} */ @Deprecated @@ -275,7 +266,7 @@ protected void initServletContext(ServletContext servletContext) { this.cnManagerFactoryBean.setServletContext(servletContext); } - public void afterPropertiesSet() throws Exception { + public void afterPropertiesSet() { if (this.contentNegotiationManager == null) { this.cnManagerFactoryBean.afterPropertiesSet(); this.contentNegotiationManager = this.cnManagerFactoryBean.getObject();
0c6b38b0b5749057d6e9dcb5f7917f27e6542fc3
spring-framework
DataSourceUtils lets timeout exceptions through- even for setReadOnly calls (revised; SPR-7226)--
c
https://github.com/spring-projects/spring-framework
diff --git a/org.springframework.jdbc/src/test/java/org/springframework/jdbc/datasource/DataSourceTransactionManagerTests.java b/org.springframework.jdbc/src/test/java/org/springframework/jdbc/datasource/DataSourceTransactionManagerTests.java index 07c6efaf3494..89ecf0bc66e3 100644 --- a/org.springframework.jdbc/src/test/java/org/springframework/jdbc/datasource/DataSourceTransactionManagerTests.java +++ b/org.springframework.jdbc/src/test/java/org/springframework/jdbc/datasource/DataSourceTransactionManagerTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2002-2009 the original author or authors. + * Copyright 2002-2010 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,7 +21,6 @@ import java.sql.PreparedStatement; import java.sql.SQLException; import java.sql.Savepoint; - import javax.sql.DataSource; import junit.framework.TestCase; @@ -463,6 +462,8 @@ public void testParticipatingTransactionWithIncompatibleReadOnly() throws Except conControl.setReturnValue(false, 1); con.rollback(); conControl.setVoidCallable(1); + con.setReadOnly(true); + conControl.setThrowable(new SQLException("read-only not supported"), 1); con.isReadOnly(); conControl.setReturnValue(false, 1); con.close();
6354d1bae0e3f6dbb519291b328b74b3200afd8f
kotlin
Custom equals operator (includes little patch to- frontend)---KT-2354 fixed-
c
https://github.com/JetBrains/kotlin
diff --git a/compiler/frontend/src/org/jetbrains/jet/lang/types/expressions/BasicExpressionTypingVisitor.java b/compiler/frontend/src/org/jetbrains/jet/lang/types/expressions/BasicExpressionTypingVisitor.java index 81065acdc19e2..c91711ef87058 100644 --- a/compiler/frontend/src/org/jetbrains/jet/lang/types/expressions/BasicExpressionTypingVisitor.java +++ b/compiler/frontend/src/org/jetbrains/jet/lang/types/expressions/BasicExpressionTypingVisitor.java @@ -21,7 +21,6 @@ import com.intellij.lang.ASTNode; import com.intellij.psi.PsiElement; import com.intellij.psi.tree.IElementType; -import com.intellij.util.ObjectUtils; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.jet.JetNodeTypes; @@ -958,6 +957,7 @@ else if (OperatorConventions.COMPARISON_OPERATIONS.contains(operationType)) { if (resolutionResults.isSuccess()) { FunctionDescriptor equals = resolutionResults.getResultingCall().getResultingDescriptor(); context.trace.record(REFERENCE_TARGET, operationSign, equals); + context.trace.record(RESOLVED_CALL, operationSign, resolutionResults.getResultingCall()); if (ensureBooleanResult(operationSign, name, equals.getReturnType(), context)) { ensureNonemptyIntersectionOfOperandTypes(expression, context); } diff --git a/js/js.tests/test/org/jetbrains/k2js/test/semantics/EqualsTest.java b/js/js.tests/test/org/jetbrains/k2js/test/semantics/EqualsTest.java index 67c073bfb8989..7c78890e80d85 100644 --- a/js/js.tests/test/org/jetbrains/k2js/test/semantics/EqualsTest.java +++ b/js/js.tests/test/org/jetbrains/k2js/test/semantics/EqualsTest.java @@ -22,7 +22,7 @@ public EqualsTest() { super("equals/"); } - public void TODO_testCustomEqualsMethod() throws Exception { + public void testCustomEqualsMethod() throws Exception { fooBoxTest(); } diff --git a/js/js.translator/src/org/jetbrains/k2js/translate/operation/BinaryOperationTranslator.java b/js/js.translator/src/org/jetbrains/k2js/translate/operation/BinaryOperationTranslator.java index 5389fae963916..c55be86d03afb 100644 --- a/js/js.translator/src/org/jetbrains/k2js/translate/operation/BinaryOperationTranslator.java +++ b/js/js.translator/src/org/jetbrains/k2js/translate/operation/BinaryOperationTranslator.java @@ -39,8 +39,8 @@ import static org.jetbrains.k2js.translate.operation.CompareToTranslator.isCompareToCall; import static org.jetbrains.k2js.translate.utils.BindingUtils.getFunctionDescriptorForOperationExpression; import static org.jetbrains.k2js.translate.utils.BindingUtils.getResolvedCall; -import static org.jetbrains.k2js.translate.utils.JsDescriptorUtils.isEquals; import static org.jetbrains.k2js.translate.utils.JsAstUtils.not; +import static org.jetbrains.k2js.translate.utils.JsDescriptorUtils.isEquals; import static org.jetbrains.k2js.translate.utils.PsiUtils.*; import static org.jetbrains.k2js.translate.utils.TranslationUtils.*; @@ -52,13 +52,13 @@ public final class BinaryOperationTranslator extends AbstractTranslator { @NotNull public static JsExpression translate(@NotNull JetBinaryExpression expression, - @NotNull TranslationContext context) { + @NotNull TranslationContext context) { return (new BinaryOperationTranslator(expression, context).translate()); } @NotNull /*package*/ static JsExpression translateAsOverloadedCall(@NotNull JetBinaryExpression expression, - @NotNull TranslationContext context) { + @NotNull TranslationContext context) { return (new BinaryOperationTranslator(expression, context)).translateAsOverloadedBinaryOperation(); } @@ -69,11 +69,11 @@ public static JsExpression translate(@NotNull JetBinaryExpression expression, private final FunctionDescriptor operationDescriptor; private BinaryOperationTranslator(@NotNull JetBinaryExpression expression, - @NotNull TranslationContext context) { + @NotNull TranslationContext context) { super(context); this.expression = expression; this.operationDescriptor = - getFunctionDescriptorForOperationExpression(bindingContext(), expression); + getFunctionDescriptorForOperationExpression(bindingContext(), expression); } @NotNull @@ -91,9 +91,9 @@ private JsExpression translate() { return CompareToTranslator.translate(expression, context()); } assert operationDescriptor != null : - "Overloadable operations must have not null descriptor"; - if (isEquals(operationDescriptor)) { - return translateAsEqualsCall(); + "Overloadable operations must have not null descriptor"; + if (isEquals(operationDescriptor) && context().intrinsics().isIntrinsic(operationDescriptor)) { + return translateAsEqualsIntrinsic(); } return translateAsOverloadedBinaryOperation(); } @@ -116,7 +116,7 @@ private boolean isNotOverloadable() { } @NotNull - private JsExpression translateAsEqualsCall() { + private JsExpression translateAsEqualsIntrinsic() { assert operationDescriptor != null : "Equals operation must resolve to descriptor."; EqualsIntrinsic intrinsic = context().intrinsics().getEqualsIntrinsic(operationDescriptor); intrinsic.setNegated(expression.getOperationToken().equals(JetTokens.EXCLEQ)); @@ -137,10 +137,8 @@ private JsExpression translateAsUnOverloadableBinaryOperation() { @NotNull private JsExpression translateAsOverloadedBinaryOperation() { CallBuilder callBuilder = setReceiverAndArguments(); - ResolvedCall<?> resolvedCall1 = - getResolvedCall(bindingContext(), expression.getOperationReference()); - JsExpression result = callBuilder.resolvedCall(resolvedCall1) - .type(CallType.NORMAL).translate(); + ResolvedCall<?> resolvedCall = getResolvedCall(bindingContext(), expression.getOperationReference()); + JsExpression result = callBuilder.resolvedCall(resolvedCall).type(CallType.NORMAL).translate(); return mayBeWrapWithNegation(result); } @@ -161,7 +159,7 @@ private CallBuilder setReceiverAndArguments() { @NotNull private JsExpression mayBeWrapWithNegation(@NotNull JsExpression result) { - if (isNotInOperation(expression)) { + if (isNegatedOperation(expression)) { return not(result); } else { diff --git a/js/js.translator/src/org/jetbrains/k2js/translate/utils/PsiUtils.java b/js/js.translator/src/org/jetbrains/k2js/translate/utils/PsiUtils.java index e4f08b3a55a7e..7aeb7b2882d31 100644 --- a/js/js.translator/src/org/jetbrains/k2js/translate/utils/PsiUtils.java +++ b/js/js.translator/src/org/jetbrains/k2js/translate/utils/PsiUtils.java @@ -93,6 +93,10 @@ public static boolean isNotInOperation(@NotNull JetBinaryExpression binaryExpres return (binaryExpression.getOperationToken() == JetTokens.NOT_IN); } + public static boolean isNegatedOperation(@NotNull JetBinaryExpression binaryExpression) { + return (binaryExpression.getOperationToken() == JetTokens.EXCLEQ) || isNotInOperation(binaryExpression); + } + private static boolean isInOperation(@NotNull JetBinaryExpression binaryExpression) { return (binaryExpression.getOperationToken() == JetTokens.IN_KEYWORD); } diff --git a/js/js.translator/testFiles/expression/equals/cases/customEqualsMethod.kt b/js/js.translator/testFiles/expression/equals/cases/customEqualsMethod.kt index 70c58bf634fe9..4dae1afc721ba 100644 --- a/js/js.translator/testFiles/expression/equals/cases/customEqualsMethod.kt +++ b/js/js.translator/testFiles/expression/equals/cases/customEqualsMethod.kt @@ -1,9 +1,12 @@ package foo class Foo(val name: String) { - public fun equals(that: Foo): Boolean { - return this.name == that.name - } + public fun equals(that: Any?): Boolean { + if (that !is Foo) { + return false + } + return this.name == that.name + } } fun box() : Boolean {
161c9260542aead8826db802b524a75cb6fb8932
spring-framework
SPR-5624 - A default HandlerExceptionResolver- that resolves standard Spring exceptions--
p
https://github.com/spring-projects/spring-framework
diff --git a/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/DispatcherServlet.java b/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/DispatcherServlet.java index f245b41b4bf1..231967867fbd 100644 --- a/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/DispatcherServlet.java +++ b/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/DispatcherServlet.java @@ -22,7 +22,6 @@ import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; -import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Locale; @@ -48,7 +47,6 @@ import org.springframework.ui.context.ThemeSource; import org.springframework.util.ClassUtils; import org.springframework.util.StringUtils; -import org.springframework.web.HttpRequestMethodNotSupportedException; import org.springframework.web.multipart.MultipartException; import org.springframework.web.multipart.MultipartHttpServletRequest; import org.springframework.web.multipart.MultipartResolver; @@ -57,88 +55,65 @@ import org.springframework.web.util.WebUtils; /** - * Central dispatcher for HTTP request handlers/controllers, - * e.g. for web UI controllers or HTTP-based remote service exporters. - * Dispatches to registered handlers for processing a web request, - * providing convenient mapping and exception handling facilities. + * Central dispatcher for HTTP request handlers/controllers, e.g. for web UI controllers or HTTP-based remote service + * exporters. Dispatches to registered handlers for processing a web request, providing convenient mapping and exception + * handling facilities. * - * <p>This servlet is very flexible: It can be used with just about any workflow, - * with the installation of the appropriate adapter classes. It offers the - * following functionality that distinguishes it from other request-driven + * <p>This servlet is very flexible: It can be used with just about any workflow, with the installation of the + * appropriate adapter classes. It offers the following functionality that distinguishes it from other request-driven * web MVC frameworks: * - * <ul> - * <li>It is based around a JavaBeans configuration mechanism. + * <ul> <li>It is based around a JavaBeans configuration mechanism. * - * <li>It can use any {@link HandlerMapping} implementation - pre-built or provided - * as part of an application - to control the routing of requests to handler objects. - * Default is {@link org.springframework.web.servlet.handler.BeanNameUrlHandlerMapping} and - * {@link org.springframework.web.servlet.mvc.annotation.DefaultAnnotationHandlerMapping}. - * HandlerMapping objects can be defined as beans in the servlet's application context, - * implementing the HandlerMapping interface, overriding the default HandlerMapping if present. - * HandlerMappings can be given any bean name (they are tested by type). + * <li>It can use any {@link HandlerMapping} implementation - pre-built or provided as part of an application - to + * control the routing of requests to handler objects. Default is {@link org.springframework.web.servlet.handler.BeanNameUrlHandlerMapping} + * and {@link org.springframework.web.servlet.mvc.annotation.DefaultAnnotationHandlerMapping}. HandlerMapping objects + * can be defined as beans in the servlet's application context, implementing the HandlerMapping interface, overriding + * the default HandlerMapping if present. HandlerMappings can be given any bean name (they are tested by type). * - * <li>It can use any {@link HandlerAdapter}; this allows for using any handler interface. - * Default adapters are {@link org.springframework.web.servlet.mvc.HttpRequestHandlerAdapter}, - * {@link org.springframework.web.servlet.mvc.SimpleControllerHandlerAdapter}, - * for Spring's {@link org.springframework.web.HttpRequestHandler} and - * {@link org.springframework.web.servlet.mvc.Controller} interfaces, respectively. A default - * {@link org.springframework.web.servlet.mvc.annotation.AnnotationMethodHandlerAdapter} - * will be registered as well. HandlerAdapter objects can be added as beans in the - * application context, overriding the default HandlerAdapters. Like HandlerMappings, - * HandlerAdapters can be given any bean name (they are tested by type). + * <li>It can use any {@link HandlerAdapter}; this allows for using any handler interface. Default adapters are {@link + * org.springframework.web.servlet.mvc.HttpRequestHandlerAdapter}, {@link org.springframework.web.servlet.mvc.SimpleControllerHandlerAdapter}, + * for Spring's {@link org.springframework.web.HttpRequestHandler} and {@link org.springframework.web.servlet.mvc.Controller} + * interfaces, respectively. A default {@link org.springframework.web.servlet.mvc.annotation.AnnotationMethodHandlerAdapter} + * will be registered as well. HandlerAdapter objects can be added as beans in the application context, overriding the + * default HandlerAdapters. Like HandlerMappings, HandlerAdapters can be given any bean name (they are tested by type). * - * <li>The dispatcher's exception resolution strategy can be specified via a - * {@link HandlerExceptionResolver}, for example mapping certain exceptions to - * error pages. Default is none. Additional HandlerExceptionResolvers can be added - * through the application context. HandlerExceptionResolver can be given any - * bean name (they are tested by type). + * <li>The dispatcher's exception resolution strategy can be specified via a {@link HandlerExceptionResolver}, for + * example mapping certain exceptions to error pages. Default is none. Additional HandlerExceptionResolvers can be added + * through the application context. HandlerExceptionResolver can be given any bean name (they are tested by type). * - * <li>Its view resolution strategy can be specified via a {@link ViewResolver} - * implementation, resolving symbolic view names into View objects. Default is - * {@link org.springframework.web.servlet.view.InternalResourceViewResolver}. - * ViewResolver objects can be added as beans in the application context, - * overriding the default ViewResolver. ViewResolvers can be given any bean name - * (they are tested by type). + * <li>Its view resolution strategy can be specified via a {@link ViewResolver} implementation, resolving symbolic view + * names into View objects. Default is {@link org.springframework.web.servlet.view.InternalResourceViewResolver}. + * ViewResolver objects can be added as beans in the application context, overriding the default ViewResolver. + * ViewResolvers can be given any bean name (they are tested by type). * - * <li>If a {@link View} or view name is not supplied by the user, then the configured - * {@link RequestToViewNameTranslator} will translate the current request into a - * view name. The corresponding bean name is "viewNameTranslator"; the default is - * {@link org.springframework.web.servlet.view.DefaultRequestToViewNameTranslator}. + * <li>If a {@link View} or view name is not supplied by the user, then the configured {@link + * RequestToViewNameTranslator} will translate the current request into a view name. The corresponding bean name is + * "viewNameTranslator"; the default is {@link org.springframework.web.servlet.view.DefaultRequestToViewNameTranslator}. * - * <li>The dispatcher's strategy for resolving multipart requests is determined by - * a {@link org.springframework.web.multipart.MultipartResolver} implementation. - * Implementations for Jakarta Commons FileUpload and Jason Hunter's COS are - * included; the typical choise is - * {@link org.springframework.web.multipart.commons.CommonsMultipartResolver}. + * <li>The dispatcher's strategy for resolving multipart requests is determined by a {@link + * org.springframework.web.multipart.MultipartResolver} implementation. Implementations for Jakarta Commons FileUpload + * and Jason Hunter's COS are included; the typical choise is {@link org.springframework.web.multipart.commons.CommonsMultipartResolver}. * The MultipartResolver bean name is "multipartResolver"; default is none. * - * <li>Its locale resolution strategy is determined by a {@link LocaleResolver}. - * Out-of-the-box implementations work via HTTP accept header, cookie, or session. - * The LocaleResolver bean name is "localeResolver"; default is - * {@link org.springframework.web.servlet.i18n.AcceptHeaderLocaleResolver}. + * <li>Its locale resolution strategy is determined by a {@link LocaleResolver}. Out-of-the-box implementations work via + * HTTP accept header, cookie, or session. The LocaleResolver bean name is "localeResolver"; default is {@link + * org.springframework.web.servlet.i18n.AcceptHeaderLocaleResolver}. * - * <li>Its theme resolution strategy is determined by a {@link ThemeResolver}. - * Implementations for a fixed theme and for cookie and session storage are included. - * The ThemeResolver bean name is "themeResolver"; default is - * {@link org.springframework.web.servlet.theme.FixedThemeResolver}. - * </ul> + * <li>Its theme resolution strategy is determined by a {@link ThemeResolver}. Implementations for a fixed theme and for + * cookie and session storage are included. The ThemeResolver bean name is "themeResolver"; default is {@link + * org.springframework.web.servlet.theme.FixedThemeResolver}. </ul> * - * <p><b>NOTE: The <code>@RequestMapping</code> annotation will only be processed - * if a corresponding <code>HandlerMapping</code> (for type level annotations) - * and/or <code>HandlerAdapter</code> (for method level annotations) - * is present in the dispatcher.</b> This is the case by default. - * However, if you are defining custom <code>HandlerMappings</code> or - * <code>HandlerAdapters</code>, then you need to make sure that a - * corresponding custom <code>DefaultAnnotationHandlerMapping</code> - * and/or <code>AnnotationMethodHandlerAdapter</code> is defined as well - * - provided that you intend to use <code>@RequestMapping</code>. + * <p><b>NOTE: The <code>@RequestMapping</code> annotation will only be processed if a corresponding + * <code>HandlerMapping</code> (for type level annotations) and/or <code>HandlerAdapter</code> (for method level + * annotations) is present in the dispatcher.</b> This is the case by default. However, if you are defining custom + * <code>HandlerMappings</code> or <code>HandlerAdapters</code>, then you need to make sure that a corresponding custom + * <code>DefaultAnnotationHandlerMapping</code> and/or <code>AnnotationMethodHandlerAdapter</code> is defined as well - + * provided that you intend to use <code>@RequestMapping</code>. * - * <p><b>A web application can define any number of DispatcherServlets.</b> - * Each servlet will operate in its own namespace, loading its own application - * context with mappings, handlers, etc. Only the root application context - * as loaded by {@link org.springframework.web.context.ContextLoaderListener}, - * if any, will be shared. + * <p><b>A web application can define any number of DispatcherServlets.</b> Each servlet will operate in its own + * namespace, loading its own application context with mappings, handlers, etc. Only the root application context as + * loaded by {@link org.springframework.web.context.ContextLoaderListener}, if any, will be shared. * * @author Rod Johnson * @author Juergen Hoeller @@ -149,102 +124,92 @@ */ public class DispatcherServlet extends FrameworkServlet { - /** - * Well-known name for the MultipartResolver object in the bean factory for this namespace. - */ + /** Well-known name for the MultipartResolver object in the bean factory for this namespace. */ public static final String MULTIPART_RESOLVER_BEAN_NAME = "multipartResolver"; - /** - * Well-known name for the LocaleResolver object in the bean factory for this namespace. - */ + /** Well-known name for the LocaleResolver object in the bean factory for this namespace. */ public static final String LOCALE_RESOLVER_BEAN_NAME = "localeResolver"; - /** - * Well-known name for the ThemeResolver object in the bean factory for this namespace. - */ + /** Well-known name for the ThemeResolver object in the bean factory for this namespace. */ public static final String THEME_RESOLVER_BEAN_NAME = "themeResolver"; /** - * Well-known name for the HandlerMapping object in the bean factory for this namespace. - * Only used when "detectAllHandlerMappings" is turned off. + * Well-known name for the HandlerMapping object in the bean factory for this namespace. Only used when + * "detectAllHandlerMappings" is turned off. + * * @see #setDetectAllHandlerMappings */ public static final String HANDLER_MAPPING_BEAN_NAME = "handlerMapping"; /** - * Well-known name for the HandlerAdapter object in the bean factory for this namespace. - * Only used when "detectAllHandlerAdapters" is turned off. + * Well-known name for the HandlerAdapter object in the bean factory for this namespace. Only used when + * "detectAllHandlerAdapters" is turned off. + * * @see #setDetectAllHandlerAdapters */ public static final String HANDLER_ADAPTER_BEAN_NAME = "handlerAdapter"; /** - * Well-known name for the HandlerExceptionResolver object in the bean factory for this - * namespace. Only used when "detectAllHandlerExceptionResolvers" is turned off. + * Well-known name for the HandlerExceptionResolver object in the bean factory for this namespace. Only used when + * "detectAllHandlerExceptionResolvers" is turned off. + * * @see #setDetectAllHandlerExceptionResolvers */ public static final String HANDLER_EXCEPTION_RESOLVER_BEAN_NAME = "handlerExceptionResolver"; - /** - * Well-known name for the RequestToViewNameTranslator object in the bean factory for - * this namespace. - */ + /** Well-known name for the RequestToViewNameTranslator object in the bean factory for this namespace. */ public static final String REQUEST_TO_VIEW_NAME_TRANSLATOR_BEAN_NAME = "viewNameTranslator"; /** - * Well-known name for the ViewResolver object in the bean factory for this namespace. - * Only used when "detectAllViewResolvers" is turned off. + * Well-known name for the ViewResolver object in the bean factory for this namespace. Only used when + * "detectAllViewResolvers" is turned off. + * * @see #setDetectAllViewResolvers */ public static final String VIEW_RESOLVER_BEAN_NAME = "viewResolver"; - /** - * Request attribute to hold the currently chosen HandlerExecutionChain. - * Only used for internal optimizations. - */ + /** Request attribute to hold the currently chosen HandlerExecutionChain. Only used for internal optimizations. */ public static final String HANDLER_EXECUTION_CHAIN_ATTRIBUTE = DispatcherServlet.class.getName() + ".HANDLER"; /** - * Request attribute to hold the current web application context. - * Otherwise only the global web app context is obtainable by tags etc. + * Request attribute to hold the current web application context. Otherwise only the global web app context is + * obtainable by tags etc. + * * @see org.springframework.web.servlet.support.RequestContextUtils#getWebApplicationContext */ public static final String WEB_APPLICATION_CONTEXT_ATTRIBUTE = DispatcherServlet.class.getName() + ".CONTEXT"; /** * Request attribute to hold the current LocaleResolver, retrievable by views. + * * @see org.springframework.web.servlet.support.RequestContextUtils#getLocaleResolver */ public static final String LOCALE_RESOLVER_ATTRIBUTE = DispatcherServlet.class.getName() + ".LOCALE_RESOLVER"; /** * Request attribute to hold the current ThemeResolver, retrievable by views. + * * @see org.springframework.web.servlet.support.RequestContextUtils#getThemeResolver */ public static final String THEME_RESOLVER_ATTRIBUTE = DispatcherServlet.class.getName() + ".THEME_RESOLVER"; /** * Request attribute to hold the current ThemeSource, retrievable by views. + * * @see org.springframework.web.servlet.support.RequestContextUtils#getThemeSource */ public static final String THEME_SOURCE_ATTRIBUTE = DispatcherServlet.class.getName() + ".THEME_SOURCE"; - - /** - * Log category to use when no mapped handler is found for a request. - */ + /** Log category to use when no mapped handler is found for a request. */ public static final String PAGE_NOT_FOUND_LOG_CATEGORY = "org.springframework.web.servlet.PageNotFound"; /** - * Name of the class path resource (relative to the DispatcherServlet class) - * that defines DispatcherServlet's default strategy names. + * Name of the class path resource (relative to the DispatcherServlet class) that defines DispatcherServlet's default + * strategy names. */ private static final String DEFAULT_STRATEGIES_PATH = "DispatcherServlet.properties"; - - /** - * Additional logger to use when no mapped handler is found for a request. - */ + /** Additional logger to use when no mapped handler is found for a request. */ protected static final Log pageNotFoundLogger = LogFactory.getLog(PAGE_NOT_FOUND_LOG_CATEGORY); private static final Properties defaultStrategies; @@ -262,7 +227,6 @@ public class DispatcherServlet extends FrameworkServlet { } } - /** Detect all HandlerMappings or just expect "handlerMapping" bean? */ private boolean detectAllHandlerMappings = true; @@ -278,7 +242,6 @@ public class DispatcherServlet extends FrameworkServlet { /** Perform cleanup of request attributes after include request? */ private boolean cleanupAfterInclude = true; - /** MultipartResolver used by this servlet */ private MultipartResolver multipartResolver; @@ -303,80 +266,65 @@ public class DispatcherServlet extends FrameworkServlet { /** List of ViewResolvers used by this servlet */ private List<ViewResolver> viewResolvers; - /** - * Set whether to detect all HandlerMapping beans in this servlet's context. - * Else, just a single bean with name "handlerMapping" will be expected. - * <p>Default is "true". Turn this off if you want this servlet to use a - * single HandlerMapping, despite multiple HandlerMapping beans being - * defined in the context. + * Set whether to detect all HandlerMapping beans in this servlet's context. Else, just a single bean with name + * "handlerMapping" will be expected. <p>Default is "true". Turn this off if you want this servlet to use a single + * HandlerMapping, despite multiple HandlerMapping beans being defined in the context. */ public void setDetectAllHandlerMappings(boolean detectAllHandlerMappings) { this.detectAllHandlerMappings = detectAllHandlerMappings; } /** - * Set whether to detect all HandlerAdapter beans in this servlet's context. - * Else, just a single bean with name "handlerAdapter" will be expected. - * <p>Default is "true". Turn this off if you want this servlet to use a - * single HandlerAdapter, despite multiple HandlerAdapter beans being - * defined in the context. + * Set whether to detect all HandlerAdapter beans in this servlet's context. Else, just a single bean with name + * "handlerAdapter" will be expected. <p>Default is "true". Turn this off if you want this servlet to use a single + * HandlerAdapter, despite multiple HandlerAdapter beans being defined in the context. */ public void setDetectAllHandlerAdapters(boolean detectAllHandlerAdapters) { this.detectAllHandlerAdapters = detectAllHandlerAdapters; } /** - * Set whether to detect all HandlerExceptionResolver beans in this servlet's context. - * Else, just a single bean with name "handlerExceptionResolver" will be expected. - * <p>Default is "true". Turn this off if you want this servlet to use a - * single HandlerExceptionResolver, despite multiple HandlerExceptionResolver - * beans being defined in the context. + * Set whether to detect all HandlerExceptionResolver beans in this servlet's context. Else, just a single bean with + * name "handlerExceptionResolver" will be expected. <p>Default is "true". Turn this off if you want this servlet to + * use a single HandlerExceptionResolver, despite multiple HandlerExceptionResolver beans being defined in the + * context. */ public void setDetectAllHandlerExceptionResolvers(boolean detectAllHandlerExceptionResolvers) { this.detectAllHandlerExceptionResolvers = detectAllHandlerExceptionResolvers; } /** - * Set whether to detect all ViewResolver beans in this servlet's context. - * Else, just a single bean with name "viewResolver" will be expected. - * <p>Default is "true". Turn this off if you want this servlet to use a - * single ViewResolver, despite multiple ViewResolver beans being - * defined in the context. + * Set whether to detect all ViewResolver beans in this servlet's context. Else, just a single bean with name + * "viewResolver" will be expected. <p>Default is "true". Turn this off if you want this servlet to use a single + * ViewResolver, despite multiple ViewResolver beans being defined in the context. */ public void setDetectAllViewResolvers(boolean detectAllViewResolvers) { this.detectAllViewResolvers = detectAllViewResolvers; } /** - * Set whether to perform cleanup of request attributes after an include request, - * that is, whether to reset the original state of all request attributes after - * the DispatcherServlet has processed within an include request. Else, just the - * DispatcherServlet's own request attributes will be reset, but not model - * attributes for JSPs or special attributes set by views (for example, JSTL's). - * <p>Default is "true", which is strongly recommended. Views should not rely on - * request attributes having been set by (dynamic) includes. This allows JSP views - * rendered by an included controller to use any model attributes, even with the - * same names as in the main JSP, without causing side effects. Only turn this - * off for special needs, for example to deliberately allow main JSPs to access - * attributes from JSP views rendered by an included controller. + * Set whether to perform cleanup of request attributes after an include request, that is, whether to reset the + * original state of all request attributes after the DispatcherServlet has processed within an include request. Else, + * just the DispatcherServlet's own request attributes will be reset, but not model attributes for JSPs or special + * attributes set by views (for example, JSTL's). <p>Default is "true", which is strongly recommended. Views should not + * rely on request attributes having been set by (dynamic) includes. This allows JSP views rendered by an included + * controller to use any model attributes, even with the same names as in the main JSP, without causing side effects. + * Only turn this off for special needs, for example to deliberately allow main JSPs to access attributes from JSP + * views rendered by an included controller. */ public void setCleanupAfterInclude(boolean cleanupAfterInclude) { this.cleanupAfterInclude = cleanupAfterInclude; } - - /** - * This implementation calls {@link #initStrategies}. - */ + /** This implementation calls {@link #initStrategies}. */ @Override protected void onRefresh(ApplicationContext context) throws BeansException { initStrategies(context); } /** - * Initialize the strategy objects that this servlet uses. - * <p>May be overridden in subclasses in order to initialize + * Initialize the strategy objects that this servlet uses. <p>May be overridden in subclasses in order to initialize * further strategy objects. */ protected void initStrategies(ApplicationContext context) { @@ -391,8 +339,7 @@ protected void initStrategies(ApplicationContext context) { } /** - * Initialize the MultipartResolver used by this class. - * <p>If no bean is defined with the given name in the BeanFactory + * Initialize the MultipartResolver used by this class. <p>If no bean is defined with the given name in the BeanFactory * for this namespace, no multipart handling is provided. */ private void initMultipartResolver(ApplicationContext context) { @@ -406,15 +353,14 @@ private void initMultipartResolver(ApplicationContext context) { // Default is no multipart resolver. this.multipartResolver = null; if (logger.isDebugEnabled()) { - logger.debug("Unable to locate MultipartResolver with name '" + MULTIPART_RESOLVER_BEAN_NAME + + logger.debug("Unable to locate MultipartResolver with name '" + MULTIPART_RESOLVER_BEAN_NAME + "': no multipart request handling provided"); } } } /** - * Initialize the LocaleResolver used by this class. - * <p>If no bean is defined with the given name in the BeanFactory + * Initialize the LocaleResolver used by this class. <p>If no bean is defined with the given name in the BeanFactory * for this namespace, we default to AcceptHeaderLocaleResolver. */ private void initLocaleResolver(ApplicationContext context) { @@ -435,9 +381,8 @@ private void initLocaleResolver(ApplicationContext context) { } /** - * Initialize the ThemeResolver used by this class. - * <p>If no bean is defined with the given name in the BeanFactory - * for this namespace, we default to a FixedThemeResolver. + * Initialize the ThemeResolver used by this class. <p>If no bean is defined with the given name in the BeanFactory for + * this namespace, we default to a FixedThemeResolver. */ private void initThemeResolver(ApplicationContext context) { try { @@ -450,24 +395,24 @@ private void initThemeResolver(ApplicationContext context) { // We need to use the default. this.themeResolver = getDefaultStrategy(context, ThemeResolver.class); if (logger.isDebugEnabled()) { - logger.debug("Unable to locate ThemeResolver with name '" + THEME_RESOLVER_BEAN_NAME + - "': using default [" + this.themeResolver + "]"); + logger.debug( + "Unable to locate ThemeResolver with name '" + THEME_RESOLVER_BEAN_NAME + "': using default [" + + this.themeResolver + "]"); } } } /** - * Initialize the HandlerMappings used by this class. - * <p>If no HandlerMapping beans are defined in the BeanFactory - * for this namespace, we default to BeanNameUrlHandlerMapping. + * Initialize the HandlerMappings used by this class. <p>If no HandlerMapping beans are defined in the BeanFactory for + * this namespace, we default to BeanNameUrlHandlerMapping. */ private void initHandlerMappings(ApplicationContext context) { this.handlerMappings = null; if (this.detectAllHandlerMappings) { // Find all HandlerMappings in the ApplicationContext, including ancestor contexts. - Map<String, HandlerMapping> matchingBeans = BeanFactoryUtils.beansOfTypeIncludingAncestors( - context, HandlerMapping.class, true, false); + Map<String, HandlerMapping> matchingBeans = + BeanFactoryUtils.beansOfTypeIncludingAncestors(context, HandlerMapping.class, true, false); if (!matchingBeans.isEmpty()) { this.handlerMappings = new ArrayList<HandlerMapping>(matchingBeans.values()); // We keep HandlerMappings in sorted order. @@ -495,17 +440,16 @@ private void initHandlerMappings(ApplicationContext context) { } /** - * Initialize the HandlerAdapters used by this class. - * <p>If no HandlerAdapter beans are defined in the BeanFactory - * for this namespace, we default to SimpleControllerHandlerAdapter. + * Initialize the HandlerAdapters used by this class. <p>If no HandlerAdapter beans are defined in the BeanFactory for + * this namespace, we default to SimpleControllerHandlerAdapter. */ private void initHandlerAdapters(ApplicationContext context) { this.handlerAdapters = null; if (this.detectAllHandlerAdapters) { // Find all HandlerAdapters in the ApplicationContext, including ancestor contexts. - Map<String, HandlerAdapter> matchingBeans = BeanFactoryUtils.beansOfTypeIncludingAncestors( - context, HandlerAdapter.class, true, false); + Map<String, HandlerAdapter> matchingBeans = + BeanFactoryUtils.beansOfTypeIncludingAncestors(context, HandlerAdapter.class, true, false); if (!matchingBeans.isEmpty()) { this.handlerAdapters = new ArrayList<HandlerAdapter>(matchingBeans.values()); // We keep HandlerAdapters in sorted order. @@ -533,17 +477,16 @@ private void initHandlerAdapters(ApplicationContext context) { } /** - * Initialize the HandlerExceptionResolver used by this class. - * <p>If no bean is defined with the given name in the BeanFactory - * for this namespace, we default to no exception resolver. + * Initialize the HandlerExceptionResolver used by this class. <p>If no bean is defined with the given name in the + * BeanFactory for this namespace, we default to no exception resolver. */ private void initHandlerExceptionResolvers(ApplicationContext context) { this.handlerExceptionResolvers = null; if (this.detectAllHandlerExceptionResolvers) { // Find all HandlerExceptionResolvers in the ApplicationContext, including ancestor contexts. - Map<String, HandlerExceptionResolver> matchingBeans = BeanFactoryUtils.beansOfTypeIncludingAncestors( - context, HandlerExceptionResolver.class, true, false); + Map<String, HandlerExceptionResolver> matchingBeans = BeanFactoryUtils + .beansOfTypeIncludingAncestors(context, HandlerExceptionResolver.class, true, false); if (!matchingBeans.isEmpty()) { this.handlerExceptionResolvers = new ArrayList<HandlerExceptionResolver>(matchingBeans.values()); // We keep HandlerExceptionResolvers in sorted order. @@ -552,8 +495,8 @@ private void initHandlerExceptionResolvers(ApplicationContext context) { } else { try { - HandlerExceptionResolver her = context.getBean( - HANDLER_EXCEPTION_RESOLVER_BEAN_NAME, HandlerExceptionResolver.class); + HandlerExceptionResolver her = + context.getBean(HANDLER_EXCEPTION_RESOLVER_BEAN_NAME, HandlerExceptionResolver.class); this.handlerExceptionResolvers = Collections.singletonList(her); } catch (NoSuchBeanDefinitionException ex) { @@ -561,8 +504,8 @@ private void initHandlerExceptionResolvers(ApplicationContext context) { } } - // Just for consistency, check for default HandlerExceptionResolvers... - // There aren't any in usual scenarios. + // Ensure we have at least some HandlerExceptionResolvers, by registering + // default HandlerExceptionResolvers if no other resolvers are found. if (this.handlerExceptionResolvers == null) { this.handlerExceptionResolvers = getDefaultStrategies(context, HandlerExceptionResolver.class); if (logger.isDebugEnabled()) { @@ -572,13 +515,13 @@ private void initHandlerExceptionResolvers(ApplicationContext context) { } /** - * Initialize the RequestToViewNameTranslator used by this servlet instance. If no - * implementation is configured then we default to DefaultRequestToViewNameTranslator. + * Initialize the RequestToViewNameTranslator used by this servlet instance. If no implementation is configured then we + * default to DefaultRequestToViewNameTranslator. */ private void initRequestToViewNameTranslator(ApplicationContext context) { try { - this.viewNameTranslator = context.getBean( - REQUEST_TO_VIEW_NAME_TRANSLATOR_BEAN_NAME, RequestToViewNameTranslator.class); + this.viewNameTranslator = + context.getBean(REQUEST_TO_VIEW_NAME_TRANSLATOR_BEAN_NAME, RequestToViewNameTranslator.class); if (logger.isDebugEnabled()) { logger.debug("Using RequestToViewNameTranslator [" + this.viewNameTranslator + "]"); } @@ -588,24 +531,23 @@ private void initRequestToViewNameTranslator(ApplicationContext context) { this.viewNameTranslator = getDefaultStrategy(context, RequestToViewNameTranslator.class); if (logger.isDebugEnabled()) { logger.debug("Unable to locate RequestToViewNameTranslator with name '" + - REQUEST_TO_VIEW_NAME_TRANSLATOR_BEAN_NAME + - "': using default [" + this.viewNameTranslator + "]"); + REQUEST_TO_VIEW_NAME_TRANSLATOR_BEAN_NAME + "': using default [" + this.viewNameTranslator + + "]"); } } } /** - * Initialize the ViewResolvers used by this class. - * <p>If no ViewResolver beans are defined in the BeanFactory - * for this namespace, we default to InternalResourceViewResolver. + * Initialize the ViewResolvers used by this class. <p>If no ViewResolver beans are defined in the BeanFactory for this + * namespace, we default to InternalResourceViewResolver. */ private void initViewResolvers(ApplicationContext context) { this.viewResolvers = null; if (this.detectAllViewResolvers) { // Find all ViewResolvers in the ApplicationContext, including ancestor contexts. - Map<String, ViewResolver> matchingBeans = BeanFactoryUtils.beansOfTypeIncludingAncestors( - context, ViewResolver.class, true, false); + Map<String, ViewResolver> matchingBeans = + BeanFactoryUtils.beansOfTypeIncludingAncestors(context, ViewResolver.class, true, false); if (!matchingBeans.isEmpty()) { this.viewResolvers = new ArrayList<ViewResolver>(matchingBeans.values()); // We keep ViewResolvers in sorted order. @@ -633,9 +575,9 @@ private void initViewResolvers(ApplicationContext context) { } /** - * Return this servlet's ThemeSource, if any; else return <code>null</code>. - * <p>Default is to return the WebApplicationContext as ThemeSource, - * provided that it implements the ThemeSource interface. + * Return this servlet's ThemeSource, if any; else return <code>null</code>. <p>Default is to return the + * WebApplicationContext as ThemeSource, provided that it implements the ThemeSource interface. + * * @return the ThemeSource, if any * @see #getWebApplicationContext() */ @@ -650,18 +592,18 @@ public final ThemeSource getThemeSource() { /** * Obtain this servlet's MultipartResolver, if any. - * @return the MultipartResolver used by this servlet, or <code>null</code> - * if none (indicating that no multipart support is available) + * + * @return the MultipartResolver used by this servlet, or <code>null</code> if none (indicating that no multipart + * support is available) */ public final MultipartResolver getMultipartResolver() { return this.multipartResolver; } - /** - * Return the default strategy object for the given strategy interface. - * <p>The default implementation delegates to {@link #getDefaultStrategies}, - * expecting a single object in the list. + * Return the default strategy object for the given strategy interface. <p>The default implementation delegates to + * {@link #getDefaultStrategies}, expecting a single object in the list. + * * @param context the current WebApplicationContext * @param strategyInterface the strategy interface * @return the corresponding strategy object @@ -677,10 +619,10 @@ protected <T> T getDefaultStrategy(ApplicationContext context, Class<T> strategy } /** - * Create a List of default strategy objects for the given strategy interface. - * <p>The default implementation uses the "DispatcherServlet.properties" file - * (in the same package as the DispatcherServlet class) to determine the class names. - * It instantiates the strategy objects through the context's BeanFactory. + * Create a List of default strategy objects for the given strategy interface. <p>The default implementation uses the + * "DispatcherServlet.properties" file (in the same package as the DispatcherServlet class) to determine the class + * names. It instantiates the strategy objects through the context's BeanFactory. + * * @param context the current WebApplicationContext * @param strategyInterface the strategy interface * @return the List of corresponding strategy objects @@ -717,13 +659,12 @@ protected <T> List<T> getDefaultStrategies(ApplicationContext context, Class<T> } /** - * Create a default strategy. - * <p>The default implementation uses - * {@link org.springframework.beans.factory.config.AutowireCapableBeanFactory#createBean}. + * Create a default strategy. <p>The default implementation uses {@link org.springframework.beans.factory.config.AutowireCapableBeanFactory#createBean}. + * * @param context the current WebApplicationContext * @param clazz the strategy implementation class to instantiate - * @throws BeansException if initialization failed * @return the fully configured strategy instance + * @throws BeansException if initialization failed * @see org.springframework.context.ApplicationContext#getAutowireCapableBeanFactory() * @see org.springframework.beans.factory.config.AutowireCapableBeanFactory#createBean */ @@ -731,17 +672,16 @@ protected Object createDefaultStrategy(ApplicationContext context, Class clazz) return context.getAutowireCapableBeanFactory().createBean(clazz); } - /** - * Exposes the DispatcherServlet-specific request attributes and - * delegates to {@link #doDispatch} for the actual dispatching. + * Exposes the DispatcherServlet-specific request attributes and delegates to {@link #doDispatch} for the actual + * dispatching. */ @Override protected void doService(HttpServletRequest request, HttpServletResponse response) throws Exception { if (logger.isDebugEnabled()) { String requestUri = new UrlPathHelper().getRequestUri(request); - logger.debug("DispatcherServlet with name '" + getServletName() + - "' processing " + request.getMethod() + " request for [" + requestUri + "]"); + logger.debug("DispatcherServlet with name '" + getServletName() + "' processing " + request.getMethod() + + " request for [" + requestUri + "]"); } // Keep a snapshot of the request attributes in case of an include, @@ -777,12 +717,11 @@ protected void doService(HttpServletRequest request, HttpServletResponse respons } /** - * Process the actual dispatching to the handler. - * <p>The handler will be obtained by applying the servlet's HandlerMappings in order. - * The HandlerAdapter will be obtained by querying the servlet's installed - * HandlerAdapters to find the first that supports the handler class. - * <p>All HTTP methods are handled by this method. It's up to HandlerAdapters or - * handlers themselves to decide which methods are acceptable. + * Process the actual dispatching to the handler. <p>The handler will be obtained by applying the servlet's + * HandlerMappings in order. The HandlerAdapter will be obtained by querying the servlet's installed HandlerAdapters to + * find the first that supports the handler class. <p>All HTTP methods are handled by this method. It's up to + * HandlerAdapters or handlers themselves to decide which methods are acceptable. + * * @param request current HTTP request * @param response current HTTP response * @throws Exception in case of any kind of processing failure @@ -855,8 +794,8 @@ protected void doDispatch(HttpServletRequest request, HttpServletResponse respon } else { if (logger.isDebugEnabled()) { - logger.debug("Null ModelAndView returned to DispatcherServlet with name '" + - getServletName() + "': assuming HandlerAdapter completed request handling"); + logger.debug("Null ModelAndView returned to DispatcherServlet with name '" + getServletName() + + "': assuming HandlerAdapter completed request handling"); } } @@ -885,15 +824,16 @@ protected void doDispatch(HttpServletRequest request, HttpServletResponse respon } /** - * Override HttpServlet's <code>getLastModified</code> method to evaluate - * the Last-Modified value of the mapped handler. + * Override HttpServlet's <code>getLastModified</code> method to evaluate the Last-Modified value of the mapped + * handler. */ @Override protected long getLastModified(HttpServletRequest request) { if (logger.isDebugEnabled()) { String requestUri = new UrlPathHelper().getRequestUri(request); - logger.debug("DispatcherServlet with name '" + getServletName() + - "' determining Last-Modified value for [" + requestUri + "]"); + logger.debug( + "DispatcherServlet with name '" + getServletName() + "' determining Last-Modified value for [" + + requestUri + "]"); } try { HandlerExecutionChain mappedHandler = getHandler(request, true); @@ -918,12 +858,11 @@ protected long getLastModified(HttpServletRequest request) { } } - /** - * Build a LocaleContext for the given request, exposing the request's - * primary locale as current locale. - * <p>The default implementation uses the dispatcher's LocaleResolver - * to obtain the current locale, which might change during a request. + * Build a LocaleContext for the given request, exposing the request's primary locale as current locale. <p>The default + * implementation uses the dispatcher's LocaleResolver to obtain the current locale, which might change during a + * request. + * * @param request current HTTP request * @return the corresponding LocaleContext */ @@ -933,6 +872,7 @@ protected LocaleContext buildLocaleContext(final HttpServletRequest request) { public Locale getLocale() { return localeResolver.resolveLocale(request); } + @Override public String toString() { return getLocale().toString(); @@ -941,8 +881,9 @@ public String toString() { } /** - * Convert the request into a multipart request, and make multipart resolver available. - * If no multipart resolver is set, simply use the existing request. + * Convert the request into a multipart request, and make multipart resolver available. If no multipart resolver is + * set, simply use the existing request. + * * @param request current HTTP request * @return the processed request (multipart wrapper if necessary) * @see MultipartResolver#resolveMultipart @@ -963,6 +904,7 @@ protected HttpServletRequest checkMultipart(HttpServletRequest request) throws M /** * Clean up any resources used by the given multipart request (if any). + * * @param request current HTTP request * @see MultipartResolver#cleanupMultipart */ @@ -973,15 +915,14 @@ protected void cleanupMultipart(HttpServletRequest request) { } /** - * Return the HandlerExecutionChain for this request. - * Try all handler mappings in order. + * Return the HandlerExecutionChain for this request. Try all handler mappings in order. + * * @param request current HTTP request * @param cache whether to cache the HandlerExecutionChain in a request attribute * @return the HandlerExceutionChain, or <code>null</code> if no handler could be found */ protected HandlerExecutionChain getHandler(HttpServletRequest request, boolean cache) throws Exception { - HandlerExecutionChain handler = - (HandlerExecutionChain) request.getAttribute(HANDLER_EXECUTION_CHAIN_ATTRIBUTE); + HandlerExecutionChain handler = (HandlerExecutionChain) request.getAttribute(HANDLER_EXECUTION_CHAIN_ATTRIBUTE); if (handler != null) { if (!cache) { request.removeAttribute(HANDLER_EXECUTION_CHAIN_ATTRIBUTE); @@ -1007,6 +948,7 @@ protected HandlerExecutionChain getHandler(HttpServletRequest request, boolean c /** * No handler found -> set appropriate HTTP response status. + * * @param request current HTTP request * @param response current HTTP response * @throws Exception if preparing the response failed @@ -1014,17 +956,17 @@ protected HandlerExecutionChain getHandler(HttpServletRequest request, boolean c protected void noHandlerFound(HttpServletRequest request, HttpServletResponse response) throws Exception { if (pageNotFoundLogger.isWarnEnabled()) { String requestUri = new UrlPathHelper().getRequestUri(request); - pageNotFoundLogger.warn("No mapping found for HTTP request with URI [" + - requestUri + "] in DispatcherServlet with name '" + getServletName() + "'"); + pageNotFoundLogger.warn("No mapping found for HTTP request with URI [" + requestUri + + "] in DispatcherServlet with name '" + getServletName() + "'"); } response.sendError(HttpServletResponse.SC_NOT_FOUND); } /** * Return the HandlerAdapter for this handler object. + * * @param handler the handler object to find an adapter for - * @throws ServletException if no HandlerAdapter can be found for the handler. - * This is a fatal error. + * @throws ServletException if no HandlerAdapter can be found for the handler. This is a fatal error. */ protected HandlerAdapter getHandlerAdapter(Object handler) throws ServletException { for (HandlerAdapter ha : this.handlerAdapters) { @@ -1041,17 +983,19 @@ protected HandlerAdapter getHandlerAdapter(Object handler) throws ServletExcepti /** * Determine an error ModelAndView via the registered HandlerExceptionResolvers. + * * @param request current HTTP request * @param response current HTTP response - * @param handler the executed handler, or <code>null</code> if none chosen at the time of - * the exception (for example, if multipart resolution failed) + * @param handler the executed handler, or <code>null</code> if none chosen at the time of the exception (for example, + * if multipart resolution failed) * @param ex the exception that got thrown during handler execution * @return a corresponding ModelAndView to forward to * @throws Exception if no error ModelAndView found */ - protected ModelAndView processHandlerException( - HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) - throws Exception { + protected ModelAndView processHandlerException(HttpServletRequest request, + HttpServletResponse response, + Object handler, + Exception ex) throws Exception { // Check registerer HandlerExceptionResolvers... ModelAndView exMv = null; @@ -1066,35 +1010,26 @@ protected ModelAndView processHandlerException( return null; } if (logger.isDebugEnabled()) { - logger.debug("Handler execution resulted in exception - forwarding to resolved error view: " + exMv, ex); + logger.debug("Handler execution resulted in exception - forwarding to resolved error view: " + exMv, + ex); } WebUtils.exposeErrorRequestAttributes(request, ex, getServletName()); return exMv; } - // Send default responses for well-known exceptions, if possible. - if (ex instanceof HttpRequestMethodNotSupportedException && !response.isCommitted()) { - String[] supportedMethods = ((HttpRequestMethodNotSupportedException) ex).getSupportedMethods(); - if (supportedMethods != null) { - response.setHeader("Allow", StringUtils.arrayToDelimitedString(supportedMethods, ", ")); - } - response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, ex.getMessage()); - return null; - } - throw ex; } /** - * Render the given ModelAndView. This is the last stage in handling a request. - * It may involve resolving the view by name. + * Render the given ModelAndView. This is the last stage in handling a request. It may involve resolving the view by + * name. + * * @param mv the ModelAndView to render * @param request current HTTP servlet request * @param response current HTTP servlet response * @throws Exception if there's a problem rendering the view */ - protected void render(ModelAndView mv, HttpServletRequest request, HttpServletResponse response) - throws Exception { + protected void render(ModelAndView mv, HttpServletRequest request, HttpServletResponse response) throws Exception { // Determine locale for request and apply it to the response. Locale locale = this.localeResolver.resolveLocale(request); @@ -1106,8 +1041,9 @@ protected void render(ModelAndView mv, HttpServletRequest request, HttpServletRe // We need to resolve the view name. view = resolveViewName(mv.getViewName(), mv.getModelInternal(), locale, request); if (view == null) { - throw new ServletException("Could not resolve view with name '" + mv.getViewName() + - "' in servlet with name '" + getServletName() + "'"); + throw new ServletException( + "Could not resolve view with name '" + mv.getViewName() + "' in servlet with name '" + + getServletName() + "'"); } } else { @@ -1128,6 +1064,7 @@ protected void render(ModelAndView mv, HttpServletRequest request, HttpServletRe /** * Translate the supplied request into a default view name. + * * @param request current HTTP servlet request * @return the view name (or <code>null</code> if no default found) * @throws Exception if view name translation failed @@ -1137,22 +1074,22 @@ protected String getDefaultViewName(HttpServletRequest request) throws Exception } /** - * Resolve the given view name into a View object (to be rendered). - * <p>Default implementations asks all ViewResolvers of this dispatcher. - * Can be overridden for custom resolution strategies, potentially based - * on specific model attributes or request parameters. + * Resolve the given view name into a View object (to be rendered). <p>Default implementations asks all ViewResolvers + * of this dispatcher. Can be overridden for custom resolution strategies, potentially based on specific model + * attributes or request parameters. + * * @param viewName the name of the view to resolve * @param model the model to be passed to the view * @param locale the current locale * @param request current HTTP servlet request * @return the View object, or <code>null</code> if none found - * @throws Exception if the view cannot be resolved - * (typically in case of problems creating an actual View object) + * @throws Exception if the view cannot be resolved (typically in case of problems creating an actual View object) * @see ViewResolver#resolveViewName */ - protected View resolveViewName( - String viewName, Map<String, Object> model, Locale locale, HttpServletRequest request) - throws Exception { + protected View resolveViewName(String viewName, + Map<String, Object> model, + Locale locale, + HttpServletRequest request) throws Exception { for (ViewResolver viewResolver : this.viewResolvers) { View view = viewResolver.resolveViewName(viewName, locale); @@ -1164,18 +1101,19 @@ protected View resolveViewName( } /** - * Trigger afterCompletion callbacks on the mapped HandlerInterceptors. - * Will just invoke afterCompletion for all interceptors whose preHandle - * invocation has successfully completed and returned true. + * Trigger afterCompletion callbacks on the mapped HandlerInterceptors. Will just invoke afterCompletion for all + * interceptors whose preHandle invocation has successfully completed and returned true. + * * @param mappedHandler the mapped HandlerExecutionChain * @param interceptorIndex index of last interceptor that successfully completed * @param ex Exception thrown on handler execution, or <code>null</code> if none * @see HandlerInterceptor#afterCompletion */ - private void triggerAfterCompletion( - HandlerExecutionChain mappedHandler, int interceptorIndex, - HttpServletRequest request, HttpServletResponse response, Exception ex) - throws Exception { + private void triggerAfterCompletion(HandlerExecutionChain mappedHandler, + int interceptorIndex, + HttpServletRequest request, + HttpServletResponse response, + Exception ex) throws Exception { // Apply afterCompletion methods of registered interceptors. if (mappedHandler != null) { @@ -1196,9 +1134,9 @@ private void triggerAfterCompletion( /** * Restore the request attributes after an include. + * * @param request current HTTP request - * @param attributesSnapshot the snapshot of the request attributes - * before the include + * @param attributesSnapshot the snapshot of the request attributes before the include */ private void restoreAttributesAfterInclude(HttpServletRequest request, Map attributesSnapshot) { logger.debug("Restoring snapshot of request attributes after include"); diff --git a/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/DispatcherServlet.properties b/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/DispatcherServlet.properties index 95404a75500c..c550bd71275e 100644 --- a/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/DispatcherServlet.properties +++ b/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/DispatcherServlet.properties @@ -13,6 +13,8 @@ org.springframework.web.servlet.HandlerAdapter=org.springframework.web.servlet.m org.springframework.web.servlet.mvc.SimpleControllerHandlerAdapter,\ org.springframework.web.servlet.mvc.annotation.AnnotationMethodHandlerAdapter +org.springframework.web.servlet.HandlerExceptionResolver=org.springframework.web.servlet.handler.DefaultHandlerExceptionResolver + org.springframework.web.servlet.RequestToViewNameTranslator=org.springframework.web.servlet.view.DefaultRequestToViewNameTranslator org.springframework.web.servlet.ViewResolver=org.springframework.web.servlet.view.InternalResourceViewResolver diff --git a/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/handler/AbstractHandlerExceptionResolver.java b/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/handler/AbstractHandlerExceptionResolver.java new file mode 100644 index 000000000000..a32f626da161 --- /dev/null +++ b/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/handler/AbstractHandlerExceptionResolver.java @@ -0,0 +1,190 @@ +/* + * Copyright 2002-2009 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.web.servlet.handler; + +import java.util.Set; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.springframework.core.Ordered; +import org.springframework.web.servlet.HandlerExceptionResolver; +import org.springframework.web.servlet.ModelAndView; + +/** + * Abstract base class for {@link HandlerExceptionResolver} implementations. <p>Provides a set of mapped handlers that + * the resolver should map to, and the {@link Ordered} implementation. + * + * @author Arjen Poutsma + * @since 3.0 + */ +public abstract class AbstractHandlerExceptionResolver implements HandlerExceptionResolver, Ordered { + + /** Logger available to subclasses */ + protected final Log logger = LogFactory.getLog(getClass()); + + private int order = Ordered.LOWEST_PRECEDENCE; + + private Set mappedHandlers; + + private Class[] mappedHandlerClasses; + + private Log warnLogger; + + public void setOrder(int order) { + this.order = order; + } + + public int getOrder() { + return this.order; + } + + /** + * Specify the set of handlers that this exception resolver should apply to. The exception mappings and the default + * error view will only apply to the specified handlers. <p>If no handlers and handler classes are set, the exception + * mappings and the default error view will apply to all handlers. This means that a specified default error view will + * be used as fallback for all exceptions; any further HandlerExceptionResolvers in the chain will be ignored in this + * case. + */ + public void setMappedHandlers(Set mappedHandlers) { + this.mappedHandlers = mappedHandlers; + } + + /** + * Specify the set of classes that this exception resolver should apply to. The exception mappings and the default + * error view will only apply to handlers of the specified type; the specified types may be interfaces and superclasses + * of handlers as well. <p>If no handlers and handler classes are set, the exception mappings and the default error + * view will apply to all handlers. This means that a specified default error view will be used as fallback for all + * exceptions; any further HandlerExceptionResolvers in the chain will be ignored in this case. + */ + public void setMappedHandlerClasses(Class[] mappedHandlerClasses) { + this.mappedHandlerClasses = mappedHandlerClasses; + } + + /** + * Set the log category for warn logging. The name will be passed to the underlying logger implementation through + * Commons Logging, getting interpreted as log category according to the logger's configuration. <p>Default is no warn + * logging. Specify this setting to activate warn logging into a specific category. Alternatively, override the {@link + * #logException} method for custom logging. + * + * @see org.apache.commons.logging.LogFactory#getLog(String) + * @see org.apache.log4j.Logger#getLogger(String) + * @see java.util.logging.Logger#getLogger(String) + */ + public void setWarnLogCategory(String loggerName) { + this.warnLogger = LogFactory.getLog(loggerName); + } + + /** + * Checks whether this resolver is supposed to apply (i.e. the handler matches in case of "mappedHandlers" having been + * specified), then delegates to the {@link #doResolveException} template method. + */ + public ModelAndView resolveException(HttpServletRequest request, + HttpServletResponse response, + Object handler, + Exception ex) { + + if (shouldApplyTo(request, handler)) { + // Log exception, both at debug log level and at warn level, if desired. + if (logger.isDebugEnabled()) { + logger.debug("Resolving exception from handler [" + handler + "]: " + ex); + } + logException(ex, request); + return doResolveException(request, response, handler, ex); + } + else { + return null; + } + } + + /** + * Check whether this resolver is supposed to apply to the given handler. <p>The default implementation checks against + * the specified mapped handlers and handler classes, if any. + * + * @param request current HTTP request + * @param handler the executed handler, or <code>null</code> if none chosen at the time of the exception (for example, + * if multipart resolution failed) + * @return whether this resolved should proceed with resolving the exception for the given request and handler + * @see #setMappedHandlers + * @see #setMappedHandlerClasses + */ + protected boolean shouldApplyTo(HttpServletRequest request, Object handler) { + if (handler != null) { + if (this.mappedHandlers != null && this.mappedHandlers.contains(handler)) { + return true; + } + if (this.mappedHandlerClasses != null) { + for (Class handlerClass : this.mappedHandlerClasses) { + if (handlerClass.isInstance(handler)) { + return true; + } + } + } + } + // Else only apply if there are no explicit handler mappings. + return (this.mappedHandlers == null && this.mappedHandlerClasses == null); + } + + /** + * Log the given exception at warn level, provided that warn logging has been activated through the {@link + * #setWarnLogCategory "warnLogCategory"} property. <p>Calls {@link #buildLogMessage} in order to determine the + * concrete message to log. Always passes the full exception to the logger. + * + * @param ex the exception that got thrown during handler execution + * @param request current HTTP request (useful for obtaining metadata) + * @see #setWarnLogCategory + * @see #buildLogMessage + * @see org.apache.commons.logging.Log#warn(Object, Throwable) + */ + protected void logException(Exception ex, HttpServletRequest request) { + if (this.warnLogger != null && this.warnLogger.isWarnEnabled()) { + this.warnLogger.warn(buildLogMessage(ex, request), ex); + } + } + + /** + * Build a log message for the given exception, occured during processing the given request. + * + * @param ex the exception that got thrown during handler execution + * @param request current HTTP request (useful for obtaining metadata) + * @return the log message to use + */ + protected String buildLogMessage(Exception ex, HttpServletRequest request) { + return "Handler execution resulted in exception"; + } + + /** + * Actually resolve the given exception that got thrown during on handler execution, returning a ModelAndView that + * represents a specific error page if appropriate. <p>May be overridden in subclasses, in order to apply specific + * exception checks. Note that this template method will be invoked <i>after</i> checking whether this resolved applies + * ("mappedHandlers" etc), so an implementation may simply proceed with its actual exception handling. + * + * @param request current HTTP request + * @param response current HTTP response + * @param handler the executed handler, or <code>null</code> if none chosen at the time of the exception (for example, + * if multipart resolution failed) + * @param ex the exception that got thrown during handler execution + * @return a corresponding ModelAndView to forward to, or <code>null</code> for default processing + */ + protected abstract ModelAndView doResolveException(HttpServletRequest request, + HttpServletResponse response, + Object handler, + Exception ex); + +} diff --git a/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/handler/DefaultHandlerExceptionResolver.java b/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/handler/DefaultHandlerExceptionResolver.java new file mode 100644 index 000000000000..03f837719aa2 --- /dev/null +++ b/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/handler/DefaultHandlerExceptionResolver.java @@ -0,0 +1,280 @@ +/* + * Copyright 2002-2009 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.web.servlet.handler; + +import java.util.List; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.springframework.beans.TypeMismatchException; +import org.springframework.core.Ordered; +import org.springframework.http.MediaType; +import org.springframework.http.converter.HttpMessageNotReadableException; +import org.springframework.http.converter.HttpMessageNotWritableException; +import org.springframework.util.StringUtils; +import org.springframework.web.HttpMediaTypeNotSupportedException; +import org.springframework.web.HttpRequestMethodNotSupportedException; +import org.springframework.web.bind.MissingServletRequestParameterException; +import org.springframework.web.servlet.ModelAndView; +import org.springframework.web.servlet.mvc.multiaction.NoSuchRequestHandlingMethodException; + +/** + * Default implementation of the {@link org.springframework.web.servlet.HandlerExceptionResolver + * HandlerExceptionResolver} interface that resolves standard Spring exceptions. <p>Default implementations typically + * set the response status. + * + * @author Arjen Poutsma + * @see #handleNoSuchRequestHandlingMethod + * @see #handleHttpRequestMethodNotSupported + * @see #handleHttpMediaTypeNotSupported + * @see #handleMissingServletRequestParameter + * @see #handleTypeMismatch + * @see #handleHttpMessageNotReadable + * @see #handleHttpMessageNotWritable + * @since 3.0 + */ +public class DefaultHandlerExceptionResolver extends AbstractHandlerExceptionResolver { + + /** + * Log category to use when no mapped handler is found for a request. + * + * @see #pageNotFoundLogger + */ + public static final String PAGE_NOT_FOUND_LOG_CATEGORY = "org.springframework.web.servlet.PageNotFound"; + + /** + * Additional logger to use when no mapped handler is found for a request. + * + * @see #PAGE_NOT_FOUND_LOG_CATEGORY + */ + protected static final Log pageNotFoundLogger = LogFactory.getLog(PAGE_NOT_FOUND_LOG_CATEGORY); + + /** Sets the {@linkplain #setOrder(int) order} to {@link #LOWEST_PRECEDENCE}. */ + public DefaultHandlerExceptionResolver() { + setOrder(Ordered.LOWEST_PRECEDENCE); + } + + @Override + protected ModelAndView doResolveException(HttpServletRequest request, + HttpServletResponse response, + Object handler, + Exception ex) { + try { + if (ex instanceof NoSuchRequestHandlingMethodException) { + return handleNoSuchRequestHandlingMethod((NoSuchRequestHandlingMethodException) ex, request, response, + handler); + } + else if (ex instanceof HttpRequestMethodNotSupportedException) { + return handleHttpRequestMethodNotSupported((HttpRequestMethodNotSupportedException) ex, request, + response, handler); + } + else if (ex instanceof HttpMediaTypeNotSupportedException) { + return handleHttpMediaTypeNotSupported((HttpMediaTypeNotSupportedException) ex, request, response, + handler); + } + else if (ex instanceof MissingServletRequestParameterException) { + return handleMissingServletRequestParameter((MissingServletRequestParameterException) ex, request, + response, handler); + } + else if (ex instanceof TypeMismatchException) { + return handleTypeMismatch((TypeMismatchException) ex, request, response, handler); + } + else if (ex instanceof HttpMessageNotReadableException) { + return handleHttpMessageNotReadable((HttpMessageNotReadableException) ex, request, response, handler); + } + else if (ex instanceof HttpMessageNotWritableException) { + return handleHttpMessageNotWritable((HttpMessageNotWritableException) ex, request, response, handler); + } + } + catch (Exception handlerException) { + logger.warn("Handling of [" + ex.getClass().getName() + "] resulted in Exception", handlerException); + } + return null; + } + + /** + * Handle the case where no request handler method was found. <p>The default implementation logs a warning, sends an + * HTTP 404 error, and returns an empty {@code ModelAndView}. Alternatively, a fallback view could be chosen, or the + * NoSuchRequestHandlingMethodException could be rethrown as-is. + * + * @param ex the NoSuchRequestHandlingMethodException to be handled + * @param request current HTTP request + * @param response current HTTP response + * @param handler the executed handler, or <code>null</code> if none chosen at the time of the exception (for example, + * if multipart resolution failed) + * @return a ModelAndView to render, or <code>null</code> if handled directly + * @throws Exception an Exception that should be thrown as result of the servlet request + */ + protected ModelAndView handleNoSuchRequestHandlingMethod(NoSuchRequestHandlingMethodException ex, + HttpServletRequest request, + HttpServletResponse response, + Object handler) throws Exception { + + pageNotFoundLogger.warn(ex.getMessage()); + response.sendError(HttpServletResponse.SC_NOT_FOUND); + return new ModelAndView(); + } + + /** + * Handle the case where no request handler method was found for the particular HTTP request method. <p>The default + * implementation logs a warning, sends an HTTP 405 error, sets the "Allow" header, and returns an empty {@code + * ModelAndView}. Alternatively, a fallback view could be chosen, or the HttpRequestMethodNotSupportedException could + * be rethrown as-is. + * + * @param ex the HttpRequestMethodNotSupportedException to be handled + * @param request current HTTP request + * @param response current HTTP response + * @param handler the executed handler, or <code>null</code> if none chosen at the time of the exception (for example, + * if multipart resolution failed) + * @return a ModelAndView to render, or <code>null</code> if handled directly + * @throws Exception an Exception that should be thrown as result of the servlet request + */ + protected ModelAndView handleHttpRequestMethodNotSupported(HttpRequestMethodNotSupportedException ex, + HttpServletRequest request, + HttpServletResponse response, + Object handler) throws Exception { + + pageNotFoundLogger.warn(ex.getMessage()); + String[] supportedMethods = ex.getSupportedMethods(); + if (supportedMethods != null) { + response.setHeader("Allow", StringUtils.arrayToDelimitedString(supportedMethods, ", ")); + } + response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, ex.getMessage()); + return new ModelAndView(); + } + + /** + * Handle the case where no {@linkplain org.springframework.http.converter.HttpMessageConverter message converters} + * were found for the PUT or POSTed content. <p>The default implementation sends an HTTP 415 error, sets the "Allow" + * header, and returns an empty {@code ModelAndView}. Alternatively, a fallback view could be chosen, or the + * HttpMediaTypeNotSupportedException could be rethrown as-is. + * + * @param ex the HttpMediaTypeNotSupportedException to be handled + * @param request current HTTP request + * @param response current HTTP response + * @param handler the executed handler, or <code>null</code> if none chosen at the time of the exception (for example, + * if multipart resolution failed) + * @return a ModelAndView to render, or <code>null</code> if handled directly + * @throws Exception an Exception that should be thrown as result of the servlet request + */ + protected ModelAndView handleHttpMediaTypeNotSupported(HttpMediaTypeNotSupportedException ex, + HttpServletRequest request, + HttpServletResponse response, + Object handler) throws Exception { + + response.sendError(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE); + List<MediaType> mediaTypes = ex.getSupportedMediaTypes(); + if (mediaTypes != null) { + response.setHeader("Accept", MediaType.toString(mediaTypes)); + } + return new ModelAndView(); + } + + /** + * Handle the case when a required parameter is missing. <p>The default implementation sends an HTTP 400 error, and + * returns an empty {@code ModelAndView}. Alternatively, a fallback view could be chosen, or the + * MissingServletRequestParameterException could be rethrown as-is. + * + * @param ex the MissingServletRequestParameterException to be handled + * @param request current HTTP request + * @param response current HTTP response + * @param handler the executed handler, or <code>null</code> if none chosen at the time of the exception (for example, + * if multipart resolution failed) + * @return a ModelAndView to render, or <code>null</code> if handled directly + * @throws Exception an Exception that should be thrown as result of the servlet request + */ + protected ModelAndView handleMissingServletRequestParameter(MissingServletRequestParameterException ex, + HttpServletRequest request, + HttpServletResponse response, + Object handler) throws Exception { + + response.sendError(HttpServletResponse.SC_BAD_REQUEST); + return new ModelAndView(); + } + + /** + * Handle the case when a {@link org.springframework.web.bind.WebDataBinder} conversion error occurs. <p>The default + * implementation sends an HTTP 400 error, and returns an empty {@code ModelAndView}. Alternatively, a fallback view + * could be chosen, or the TypeMismatchException could be rethrown as-is. + * + * @param ex the TypeMismatchException to be handled + * @param request current HTTP request + * @param response current HTTP response + * @param handler the executed handler, or <code>null</code> if none chosen at the time of the exception (for example, + * if multipart resolution failed) + * @return a ModelAndView to render, or <code>null</code> if handled directly + * @throws Exception an Exception that should be thrown as result of the servlet request + */ + protected ModelAndView handleTypeMismatch(TypeMismatchException ex, + HttpServletRequest request, + HttpServletResponse response, + Object handler) throws Exception { + + response.sendError(HttpServletResponse.SC_BAD_REQUEST); + return new ModelAndView(); + } + + /** + * Handle the case where a {@linkplain org.springframework.http.converter.HttpMessageConverter message converter} can + * not read from a HTTP request. <p>The default implementation sends an HTTP 400 error, and returns an empty {@code + * ModelAndView}. Alternatively, a fallback view could be chosen, or the HttpMediaTypeNotSupportedException could be + * rethrown as-is. + * + * @param ex the HttpMessageNotReadableException to be handled + * @param request current HTTP request + * @param response current HTTP response + * @param handler the executed handler, or <code>null</code> if none chosen at the time of the exception (for example, + * if multipart resolution failed) + * @return a ModelAndView to render, or <code>null</code> if handled directly + * @throws Exception an Exception that should be thrown as result of the servlet request + */ + protected ModelAndView handleHttpMessageNotReadable(HttpMessageNotReadableException ex, + HttpServletRequest request, + HttpServletResponse response, + Object handler) throws Exception { + + response.sendError(HttpServletResponse.SC_BAD_REQUEST); + return new ModelAndView(); + } + + /** + * Handle the case where a {@linkplain org.springframework.http.converter.HttpMessageConverter message converter} can + * not write to a HTTP request. <p>The default implementation sends an HTTP 500 error, and returns an empty {@code + * ModelAndView}. Alternatively, a fallback view could be chosen, or the HttpMediaTypeNotSupportedException could be + * rethrown as-is. + * + * @param ex the HttpMessageNotWritableException to be handled + * @param request current HTTP request + * @param response current HTTP response + * @param handler the executed handler, or <code>null</code> if none chosen at the time of the exception (for example, + * if multipart resolution failed) + * @return a ModelAndView to render, or <code>null</code> if handled directly + * @throws Exception an Exception that should be thrown as result of the servlet request + */ + protected ModelAndView handleHttpMessageNotWritable(HttpMessageNotWritableException ex, + HttpServletRequest request, + HttpServletResponse response, + Object handler) throws Exception { + + response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); + return new ModelAndView(); + } + +} diff --git a/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/handler/SimpleMappingExceptionResolver.java b/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/handler/SimpleMappingExceptionResolver.java index 8a5fade1ea06..f2f7affef7c9 100644 --- a/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/handler/SimpleMappingExceptionResolver.java +++ b/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/handler/SimpleMappingExceptionResolver.java @@ -18,51 +18,29 @@ import java.util.Enumeration; import java.util.Properties; -import java.util.Set; - import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -import org.springframework.core.Ordered; -import org.springframework.web.servlet.HandlerExceptionResolver; import org.springframework.web.servlet.ModelAndView; import org.springframework.web.util.WebUtils; /** - * {@link org.springframework.web.servlet.HandlerExceptionResolver} implementation - * that allows for mapping exception class names to view names, either for a - * set of given handlers or for all handlers in the DispatcherServlet. + * {@link org.springframework.web.servlet.HandlerExceptionResolver} implementation that allows for mapping exception + * class names to view names, either for a set of given handlers or for all handlers in the DispatcherServlet. * - * <p>Error views are analogous to error page JSPs, but can be used with any - * kind of exception including any checked one, with fine-granular mappings for - * specific handlers. + * <p>Error views are analogous to error page JSPs, but can be used with any kind of exception including any checked + * one, with fine-granular mappings for specific handlers. * * @author Juergen Hoeller - * @since 22.11.2003 + * @author Arjen Poutsma * @see org.springframework.web.servlet.DispatcherServlet + * @since 22.11.2003 */ -public class SimpleMappingExceptionResolver implements HandlerExceptionResolver, Ordered { +public class SimpleMappingExceptionResolver extends AbstractHandlerExceptionResolver { - /** - * The default name of the exception attribute: "exception". - */ + /** The default name of the exception attribute: "exception". */ public static final String DEFAULT_EXCEPTION_ATTRIBUTE = "exception"; - - /** Logger available to subclasses */ - protected final Log logger = LogFactory.getLog(getClass()); - - private int order = Integer.MAX_VALUE; // default: same as non-Ordered - - private Set mappedHandlers; - - private Class[] mappedHandlerClasses; - - private Log warnLogger; - private Properties exceptionMappings; private String defaultErrorView; @@ -71,74 +49,18 @@ public class SimpleMappingExceptionResolver implements HandlerExceptionResolver, private String exceptionAttribute = DEFAULT_EXCEPTION_ATTRIBUTE; - - public void setOrder(int order) { - this.order = order; - } - - public int getOrder() { - return this.order; - } - /** - * Specify the set of handlers that this exception resolver should apply to. - * The exception mappings and the default error view will only apply - * to the specified handlers. - * <p>If no handlers and handler classes are set, the exception mappings - * and the default error view will apply to all handlers. This means that - * a specified default error view will be used as fallback for all exceptions; - * any further HandlerExceptionResolvers in the chain will be ignored in - * this case. - */ - public void setMappedHandlers(Set mappedHandlers) { - this.mappedHandlers = mappedHandlers; - } - - /** - * Specify the set of classes that this exception resolver should apply to. - * The exception mappings and the default error view will only apply - * to handlers of the specified type; the specified types may be interfaces - * and superclasses of handlers as well. - * <p>If no handlers and handler classes are set, the exception mappings - * and the default error view will apply to all handlers. This means that - * a specified default error view will be used as fallback for all exceptions; - * any further HandlerExceptionResolvers in the chain will be ignored in - * this case. - */ - public void setMappedHandlerClasses(Class[] mappedHandlerClasses) { - this.mappedHandlerClasses = mappedHandlerClasses; - } - - /** - * Set the log category for warn logging. The name will be passed to the - * underlying logger implementation through Commons Logging, getting - * interpreted as log category according to the logger's configuration. - * <p>Default is no warn logging. Specify this setting to activate - * warn logging into a specific category. Alternatively, override - * the {@link #logException} method for custom logging. - * @see org.apache.commons.logging.LogFactory#getLog(String) - * @see org.apache.log4j.Logger#getLogger(String) - * @see java.util.logging.Logger#getLogger(String) - */ - public void setWarnLogCategory(String loggerName) { - this.warnLogger = LogFactory.getLog(loggerName); - } - - /** - * Set the mappings between exception class names and error view names. - * The exception class name can be a substring, with no wildcard support - * at present. A value of "ServletException" would match - * <code>javax.servlet.ServletException</code> and subclasses, for example. - * <p><b>NB:</b> Consider carefully how specific the pattern is, and whether - * to include package information (which isn't mandatory). For example, - * "Exception" will match nearly anything, and will probably hide other rules. - * "java.lang.Exception" would be correct if "Exception" was meant to define - * a rule for all checked exceptions. With more unusual exception names such - * as "BaseBusinessException" there's no need to use a FQN. - * <p>Follows the same matching algorithm as RuleBasedTransactionAttribute - * and RollbackRuleAttribute. - * @param mappings exception patterns (can also be fully qualified class names) - * as keys, and error view names as values + * Set the mappings between exception class names and error view names. The exception class name can be a substring, + * with no wildcard support at present. A value of "ServletException" would match + * <code>javax.servlet.ServletException</code> and subclasses, for example. <p><b>NB:</b> Consider carefully how + * specific the pattern is, and whether to include package information (which isn't mandatory). For example, + * "Exception" will match nearly anything, and will probably hide other rules. "java.lang.Exception" would be correct + * if "Exception" was meant to define a rule for all checked exceptions. With more unusual exception names such as + * "BaseBusinessException" there's no need to use a FQN. <p>Follows the same matching algorithm as + * RuleBasedTransactionAttribute and RollbackRuleAttribute. + * + * @param mappings exception patterns (can also be fully qualified class names) as keys, and error view names as + * values * @see org.springframework.transaction.interceptor.RuleBasedTransactionAttribute * @see org.springframework.transaction.interceptor.RollbackRuleAttribute */ @@ -147,24 +69,20 @@ public void setExceptionMappings(Properties mappings) { } /** - * Set the name of the default error view. - * This view will be returned if no specific mapping was found. - * <p>Default is none. + * Set the name of the default error view. This view will be returned if no specific mapping was found. <p>Default is + * none. */ public void setDefaultErrorView(String defaultErrorView) { this.defaultErrorView = defaultErrorView; } /** - * Set the default HTTP status code that this exception resolver will apply - * if it resolves an error view. - * <p>Note that this error code will only get applied in case of a top-level - * request. It will not be set for an include request, since the HTTP status - * cannot be modified from within an include. - * <p>If not specified, no status code will be applied, either leaving this to - * the controller or view, or keeping the servlet engine's default of 200 (OK). - * @param defaultStatusCode HTTP status code value, for example - * 500 (SC_INTERNAL_SERVER_ERROR) or 404 (SC_NOT_FOUND) + * Set the default HTTP status code that this exception resolver will apply if it resolves an error view. <p>Note that + * this error code will only get applied in case of a top-level request. It will not be set for an include request, + * since the HTTP status cannot be modified from within an include. <p>If not specified, no status code will be + * applied, either leaving this to the controller or view, or keeping the servlet engine's default of 200 (OK). + * + * @param defaultStatusCode HTTP status code value, for example 500 (SC_INTERNAL_SERVER_ERROR) or 404 (SC_NOT_FOUND) * @see javax.servlet.http.HttpServletResponse#SC_INTERNAL_SERVER_ERROR * @see javax.servlet.http.HttpServletResponse#SC_NOT_FOUND */ @@ -173,84 +91,33 @@ public void setDefaultStatusCode(int defaultStatusCode) { } /** - * Set the name of the model attribute as which the exception should - * be exposed. Default is "exception". - * <p>This can be either set to a different attribute name or to - * <code>null</code> for not exposing an exception attribute at all. + * Set the name of the model attribute as which the exception should be exposed. Default is "exception". <p>This can be + * either set to a different attribute name or to <code>null</code> for not exposing an exception attribute at all. + * * @see #DEFAULT_EXCEPTION_ATTRIBUTE */ public void setExceptionAttribute(String exceptionAttribute) { this.exceptionAttribute = exceptionAttribute; } - /** - * Checks whether this resolver is supposed to apply (i.e. the handler - * matches in case of "mappedHandlers" having been specified), then - * delegates to the {@link #doResolveException} template method. - */ - public ModelAndView resolveException( - HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) { - - if (shouldApplyTo(request, handler)) { - return doResolveException(request, response, handler, ex); - } - else { - return null; - } - } - - /** - * Check whether this resolver is supposed to apply to the given handler. - * <p>The default implementation checks against the specified mapped handlers - * and handler classes, if any. - * @param request current HTTP request - * @param handler the executed handler, or <code>null</code> if none chosen at the - * time of the exception (for example, if multipart resolution failed) - * @return whether this resolved should proceed with resolving the exception - * for the given request and handler - * @see #setMappedHandlers - * @see #setMappedHandlerClasses - */ - protected boolean shouldApplyTo(HttpServletRequest request, Object handler) { - if (handler != null) { - if (this.mappedHandlers != null && this.mappedHandlers.contains(handler)) { - return true; - } - if (this.mappedHandlerClasses != null) { - for (Class handlerClass : this.mappedHandlerClasses) { - if (handlerClass.isInstance(handler)) { - return true; - } - } - } - } - // Else only apply if there are no explicit handler mappings. - return (this.mappedHandlers == null && this.mappedHandlerClasses == null); - } - - /** - * Actually resolve the given exception that got thrown during on handler execution, - * returning a ModelAndView that represents a specific error page if appropriate. - * <p>May be overridden in subclasses, in order to apply specific exception checks. - * Note that this template method will be invoked <i>after</i> checking whether - * this resolved applies ("mappedHandlers" etc), so an implementation may simply - * proceed with its actual exception handling. + * Actually resolve the given exception that got thrown during on handler execution, returning a ModelAndView that + * represents a specific error page if appropriate. <p>May be overridden in subclasses, in order to apply specific + * exception checks. Note that this template method will be invoked <i>after</i> checking whether this resolved applies + * ("mappedHandlers" etc), so an implementation may simply proceed with its actual exception handling. + * * @param request current HTTP request * @param response current HTTP response - * @param handler the executed handler, or <code>null</code> if none chosen at the - * time of the exception (for example, if multipart resolution failed) + * @param handler the executed handler, or <code>null</code> if none chosen at the time of the exception (for example, + * if multipart resolution failed) * @param ex the exception that got thrown during handler execution * @return a corresponding ModelAndView to forward to, or <code>null</code> for default processing */ - protected ModelAndView doResolveException( - HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) { - - // Log exception, both at debug log level and at warn level, if desired. - if (logger.isDebugEnabled()) { - logger.debug("Resolving exception from handler [" + handler + "]: " + ex); - } - logException(ex, request); + @Override + protected ModelAndView doResolveException(HttpServletRequest request, + HttpServletResponse response, + Object handler, + Exception ex) { // Expose ModelAndView for chosen error view. String viewName = determineViewName(ex, request); @@ -268,40 +135,10 @@ protected ModelAndView doResolveException( } } - - /** - * Log the given exception at warn level, provided that warn logging has been - * activated through the {@link #setWarnLogCategory "warnLogCategory"} property. - * <p>Calls {@link #buildLogMessage} in order to determine the concrete message - * to log. Always passes the full exception to the logger. - * @param ex the exception that got thrown during handler execution - * @param request current HTTP request (useful for obtaining metadata) - * @see #setWarnLogCategory - * @see #buildLogMessage - * @see org.apache.commons.logging.Log#warn(Object, Throwable) - */ - protected void logException(Exception ex, HttpServletRequest request) { - if (this.warnLogger != null && this.warnLogger.isWarnEnabled()) { - this.warnLogger.warn(buildLogMessage(ex, request), ex); - } - } - /** - * Build a log message for the given exception, occured during processing - * the given request. - * @param ex the exception that got thrown during handler execution - * @param request current HTTP request (useful for obtaining metadata) - * @return the log message to use - */ - protected String buildLogMessage(Exception ex, HttpServletRequest request) { - return "Handler execution resulted in exception"; - } - - - /** - * Determine the view name for the given exception, searching the - * {@link #setExceptionMappings "exceptionMappings"}, using the - * {@link #setDefaultErrorView "defaultErrorView"} as fallback. + * Determine the view name for the given exception, searching the {@link #setExceptionMappings "exceptionMappings"}, + * using the {@link #setDefaultErrorView "defaultErrorView"} as fallback. + * * @param ex the exception that got thrown during handler execution * @param request current HTTP request (useful for obtaining metadata) * @return the resolved view name, or <code>null</code> if none found @@ -315,8 +152,8 @@ protected String determineViewName(Exception ex, HttpServletRequest request) { // Return default error view else, if defined. if (viewName == null && this.defaultErrorView != null) { if (logger.isDebugEnabled()) { - logger.debug("Resolving to default view '" + this.defaultErrorView + - "' for exception of type [" + ex.getClass().getName() + "]"); + logger.debug("Resolving to default view '" + this.defaultErrorView + "' for exception of type [" + + ex.getClass().getName() + "]"); } viewName = this.defaultErrorView; } @@ -325,6 +162,7 @@ protected String determineViewName(Exception ex, HttpServletRequest request) { /** * Find a matching view name in the given exception mappings. + * * @param exceptionMappings mappings between exception class names and error view names * @param ex the exception that got thrown during handler execution * @return the view name, or <code>null</code> if none found @@ -351,11 +189,9 @@ protected String findMatchingViewName(Properties exceptionMappings, Exception ex } /** - * Return the depth to the superclass matching. - * <p>0 means ex matches exactly. Returns -1 if there's no match. - * Otherwise, returns depth. Lowest depth wins. - * <p>Follows the same algorithm as - * {@link org.springframework.transaction.interceptor.RollbackRuleAttribute}. + * Return the depth to the superclass matching. <p>0 means ex matches exactly. Returns -1 if there's no match. + * Otherwise, returns depth. Lowest depth wins. <p>Follows the same algorithm as {@link + * org.springframework.transaction.interceptor.RollbackRuleAttribute}. */ protected int getDepth(String exceptionMapping, Exception ex) { return getDepth(exceptionMapping, ex.getClass(), 0); @@ -373,17 +209,15 @@ private int getDepth(String exceptionMapping, Class exceptionClass, int depth) { return getDepth(exceptionMapping, exceptionClass.getSuperclass(), depth + 1); } - /** - * Determine the HTTP status code to apply for the given error view. - * <p>The default implementation always returns the specified - * {@link #setDefaultStatusCode "defaultStatusCode"}, as a common - * status code for all error views. Override this in a custom subclass - * to determine a specific status code for the given view. + * Determine the HTTP status code to apply for the given error view. <p>The default implementation always returns the + * specified {@link #setDefaultStatusCode "defaultStatusCode"}, as a common status code for all error views. Override + * this in a custom subclass to determine a specific status code for the given view. + * * @param request current HTTP request * @param viewName the name of the error view - * @return the HTTP status code to use, or <code>null</code> for the - * servlet container's default (200 in case of a standard error view) + * @return the HTTP status code to use, or <code>null</code> for the servlet container's default (200 in case of a + * standard error view) * @see #setDefaultStatusCode * @see #applyStatusCodeIfPossible */ @@ -392,8 +226,9 @@ protected Integer determineStatusCode(HttpServletRequest request, String viewNam } /** - * Apply the specified HTTP status code to the given response, if possible - * (that is, if not executing within an include request). + * Apply the specified HTTP status code to the given response, if possible (that is, if not executing within an include + * request). + * * @param request current HTTP request * @param response current HTTP response * @param statusCode the status code to apply @@ -412,8 +247,9 @@ protected void applyStatusCodeIfPossible(HttpServletRequest request, HttpServlet } /** - * Return a ModelAndView for the given request, view name and exception. - * <p>The default implementation delegates to {@link #getModelAndView(String, Exception)}. + * Return a ModelAndView for the given request, view name and exception. <p>The default implementation delegates to + * {@link #getModelAndView(String, Exception)}. + * * @param viewName the name of the error view * @param ex the exception that got thrown during handler execution * @param request current HTTP request (useful for obtaining metadata) @@ -424,9 +260,9 @@ protected ModelAndView getModelAndView(String viewName, Exception ex, HttpServle } /** - * Return a ModelAndView for the given view name and exception. - * <p>The default implementation adds the specified exception attribute. - * Can be overridden in subclasses. + * Return a ModelAndView for the given view name and exception. <p>The default implementation adds the specified + * exception attribute. Can be overridden in subclasses. + * * @param viewName the name of the error view * @param ex the exception that got thrown during handler execution * @return the ModelAndView instance diff --git a/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/mvc/annotation/AnnotationMethodHandlerAdapter.java b/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/mvc/annotation/AnnotationMethodHandlerAdapter.java index 3b419798e3ee..4cc68680ce38 100644 --- a/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/mvc/annotation/AnnotationMethodHandlerAdapter.java +++ b/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/mvc/annotation/AnnotationMethodHandlerAdapter.java @@ -52,7 +52,6 @@ import org.springframework.core.ParameterNameDiscoverer; import org.springframework.core.annotation.AnnotationUtils; import org.springframework.http.HttpInputMessage; -import org.springframework.http.MediaType; import org.springframework.http.converter.ByteArrayHttpMessageConverter; import org.springframework.http.converter.FormHttpMessageConverter; import org.springframework.http.converter.HttpMessageConverter; @@ -68,7 +67,6 @@ import org.springframework.util.PathMatcher; import org.springframework.util.StringUtils; import org.springframework.validation.support.BindingAwareModelMap; -import org.springframework.web.HttpMediaTypeNotSupportedException; import org.springframework.web.HttpRequestMethodNotSupportedException; import org.springframework.web.HttpSessionRequiredException; import org.springframework.web.bind.MissingServletRequestParameterException; @@ -101,38 +99,36 @@ import org.springframework.web.util.WebUtils; /** - * Implementation of the {@link org.springframework.web.servlet.HandlerAdapter} - * interface that maps handler methods based on HTTP paths, HTTP methods and - * request parameters expressed through the {@link RequestMapping} annotation. + * Implementation of the {@link org.springframework.web.servlet.HandlerAdapter} interface that maps handler methods + * based on HTTP paths, HTTP methods and request parameters expressed through the {@link RequestMapping} annotation. * - * <p>Supports request parameter binding through the {@link RequestParam} annotation. - * Also supports the {@link ModelAttribute} annotation for exposing model attribute - * values to the view, as well as {@link InitBinder} for binder initialization methods - * and {@link SessionAttributes} for automatic session management of specific attributes. + * <p>Supports request parameter binding through the {@link RequestParam} annotation. Also supports the {@link + * ModelAttribute} annotation for exposing model attribute values to the view, as well as {@link InitBinder} for binder + * initialization methods and {@link SessionAttributes} for automatic session management of specific attributes. * - * <p>This adapter can be customized through various bean properties. - * A common use case is to apply shared binder initialization logic through - * a custom {@link #setWebBindingInitializer WebBindingInitializer}. + * <p>This adapter can be customized through various bean properties. A common use case is to apply shared binder + * initialization logic through a custom {@link #setWebBindingInitializer WebBindingInitializer}. * * @author Juergen Hoeller * @author Arjen Poutsma - * @since 2.5 * @see #setPathMatcher * @see #setMethodNameResolver * @see #setWebBindingInitializer * @see #setSessionAttributeStore + * @since 2.5 */ public class AnnotationMethodHandlerAdapter extends WebContentGenerator implements HandlerAdapter { /** * Log category to use when no mapped handler is found for a request. + * * @see #pageNotFoundLogger */ public static final String PAGE_NOT_FOUND_LOG_CATEGORY = "org.springframework.web.servlet.PageNotFound"; - /** * Additional logger to use when no mapped handler is found for a request. + * * @see #PAGE_NOT_FOUND_LOG_CATEGORY */ protected static final Log pageNotFoundLogger = LogFactory.getLog(PAGE_NOT_FOUND_LOG_CATEGORY); @@ -167,12 +163,11 @@ public AnnotationMethodHandlerAdapter() { super(false); } - /** - * Set if URL lookup should always use the full path within the current servlet - * context. Else, the path within the current servlet mapping is used if applicable - * (that is, in the case of a ".../*" servlet mapping in web.xml). + * Set if URL lookup should always use the full path within the current servlet context. Else, the path within the + * current servlet mapping is used if applicable (that is, in the case of a ".../*" servlet mapping in web.xml). * <p>Default is "false". + * * @see org.springframework.web.util.UrlPathHelper#setAlwaysUseFullPath */ public void setAlwaysUseFullPath(boolean alwaysUseFullPath) { @@ -180,10 +175,10 @@ public void setAlwaysUseFullPath(boolean alwaysUseFullPath) { } /** - * Set if context path and request URI should be URL-decoded. Both are returned - * <i>undecoded</i> by the Servlet API, in contrast to the servlet path. - * <p>Uses either the request encoding or the default encoding according - * to the Servlet spec (ISO-8859-1). + * Set if context path and request URI should be URL-decoded. Both are returned <i>undecoded</i> by the Servlet API, in + * contrast to the servlet path. <p>Uses either the request encoding or the default encoding according to the Servlet + * spec (ISO-8859-1). + * * @see org.springframework.web.util.UrlPathHelper#setUrlDecode */ public void setUrlDecode(boolean urlDecode) { @@ -191,10 +186,8 @@ public void setUrlDecode(boolean urlDecode) { } /** - * Set the UrlPathHelper to use for resolution of lookup paths. - * <p>Use this to override the default UrlPathHelper with a custom subclass, - * or to share common UrlPathHelper settings across multiple HandlerMappings - * and HandlerAdapters. + * Set the UrlPathHelper to use for resolution of lookup paths. <p>Use this to override the default UrlPathHelper with + * a custom subclass, or to share common UrlPathHelper settings across multiple HandlerMappings and HandlerAdapters. */ public void setUrlPathHelper(UrlPathHelper urlPathHelper) { Assert.notNull(urlPathHelper, "UrlPathHelper must not be null"); @@ -202,8 +195,9 @@ public void setUrlPathHelper(UrlPathHelper urlPathHelper) { } /** - * Set the PathMatcher implementation to use for matching URL paths - * against registered URL patterns. Default is AntPathMatcher. + * Set the PathMatcher implementation to use for matching URL paths against registered URL patterns. Default is + * AntPathMatcher. + * * @see org.springframework.util.AntPathMatcher */ public void setPathMatcher(PathMatcher pathMatcher) { @@ -212,9 +206,8 @@ public void setPathMatcher(PathMatcher pathMatcher) { } /** - * Set the MethodNameResolver to use for resolving default handler methods - * (carrying an empty <code>@RequestMapping</code> annotation). - * <p>Will only kick in when the handler method cannot be resolved uniquely + * Set the MethodNameResolver to use for resolving default handler methods (carrying an empty + * <code>@RequestMapping</code> annotation). <p>Will only kick in when the handler method cannot be resolved uniquely * through the annotation metadata already. */ public void setMethodNameResolver(MethodNameResolver methodNameResolver) { @@ -222,18 +215,16 @@ public void setMethodNameResolver(MethodNameResolver methodNameResolver) { } /** - * Specify a WebBindingInitializer which will apply pre-configured - * configuration to every DataBinder that this controller uses. + * Specify a WebBindingInitializer which will apply pre-configured configuration to every DataBinder that this + * controller uses. */ public void setWebBindingInitializer(WebBindingInitializer webBindingInitializer) { this.webBindingInitializer = webBindingInitializer; } /** - * Specify the strategy to store session attributes with. - * <p>Default is {@link org.springframework.web.bind.support.DefaultSessionAttributeStore}, - * storing session attributes in the HttpSession, using the same - * attribute name as in the model. + * Specify the strategy to store session attributes with. <p>Default is {@link org.springframework.web.bind.support.DefaultSessionAttributeStore}, + * storing session attributes in the HttpSession, using the same attribute name as in the model. */ public void setSessionAttributeStore(SessionAttributeStore sessionAttributeStore) { Assert.notNull(sessionAttributeStore, "SessionAttributeStore must not be null"); @@ -241,11 +232,11 @@ public void setSessionAttributeStore(SessionAttributeStore sessionAttributeStore } /** - * Cache content produced by <code>@SessionAttributes</code> annotated handlers - * for the given number of seconds. Default is 0, preventing caching completely. - * <p>In contrast to the "cacheSeconds" property which will apply to all general - * handlers (but not to <code>@SessionAttributes</code> annotated handlers), this - * setting will apply to <code>@SessionAttributes</code> annotated handlers only. + * Cache content produced by <code>@SessionAttributes</code> annotated handlers for the given number of seconds. + * Default is 0, preventing caching completely. <p>In contrast to the "cacheSeconds" property which will apply to all + * general handlers (but not to <code>@SessionAttributes</code> annotated handlers), this setting will apply to + * <code>@SessionAttributes</code> annotated handlers only. + * * @see #setCacheSeconds * @see org.springframework.web.bind.annotation.SessionAttributes */ @@ -254,20 +245,15 @@ public void setCacheSecondsForSessionAttributeHandlers(int cacheSecondsForSessio } /** - * Set if controller execution should be synchronized on the session, - * to serialize parallel invocations from the same client. - * <p>More specifically, the execution of each handler method will get - * synchronized if this flag is "true". The best available session mutex - * will be used for the synchronization; ideally, this will be a mutex - * exposed by HttpSessionMutexListener. - * <p>The session mutex is guaranteed to be the same object during - * the entire lifetime of the session, available under the key defined - * by the <code>SESSION_MUTEX_ATTRIBUTE</code> constant. It serves as a - * safe reference to synchronize on for locking on the current session. - * <p>In many cases, the HttpSession reference itself is a safe mutex - * as well, since it will always be the same object reference for the - * same active logical session. However, this is not guaranteed across - * different servlet containers; the only 100% safe way is a session mutex. + * Set if controller execution should be synchronized on the session, to serialize parallel invocations from the same + * client. <p>More specifically, the execution of each handler method will get synchronized if this flag is "true". The + * best available session mutex will be used for the synchronization; ideally, this will be a mutex exposed by + * HttpSessionMutexListener. <p>The session mutex is guaranteed to be the same object during the entire lifetime of the + * session, available under the key defined by the <code>SESSION_MUTEX_ATTRIBUTE</code> constant. It serves as a safe + * reference to synchronize on for locking on the current session. <p>In many cases, the HttpSession reference itself + * is a safe mutex as well, since it will always be the same object reference for the same active logical session. + * However, this is not guaranteed across different servlet containers; the only 100% safe way is a session mutex. + * * @see org.springframework.web.util.HttpSessionMutexListener * @see org.springframework.web.util.WebUtils#getSessionMutex(javax.servlet.http.HttpSession) */ @@ -276,44 +262,38 @@ public void setSynchronizeOnSession(boolean synchronizeOnSession) { } /** - * Set the ParameterNameDiscoverer to use for resolving method parameter - * names if needed (e.g. for default attribute names). - * <p>Default is a {@link org.springframework.core.LocalVariableTableParameterNameDiscoverer}. + * Set the ParameterNameDiscoverer to use for resolving method parameter names if needed (e.g. for default attribute + * names). <p>Default is a {@link org.springframework.core.LocalVariableTableParameterNameDiscoverer}. */ public void setParameterNameDiscoverer(ParameterNameDiscoverer parameterNameDiscoverer) { this.parameterNameDiscoverer = parameterNameDiscoverer; } /** - * Set a custom ArgumentResolvers to use for special method parameter types. - * Such a custom ArgumentResolver will kick in first, having a chance to - * resolve an argument value before the standard argument handling kicks in. + * Set a custom ArgumentResolvers to use for special method parameter types. Such a custom ArgumentResolver will kick + * in first, having a chance to resolve an argument value before the standard argument handling kicks in. */ public void setCustomArgumentResolver(WebArgumentResolver argumentResolver) { - this.customArgumentResolvers = new WebArgumentResolver[] {argumentResolver}; + this.customArgumentResolvers = new WebArgumentResolver[]{argumentResolver}; } /** - * Set one or more custom ArgumentResolvers to use for special method - * parameter types. Any such custom ArgumentResolver will kick in first, - * having a chance to resolve an argument value before the standard - * argument handling kicks in. + * Set one or more custom ArgumentResolvers to use for special method parameter types. Any such custom ArgumentResolver + * will kick in first, having a chance to resolve an argument value before the standard argument handling kicks in. */ public void setCustomArgumentResolvers(WebArgumentResolver[] argumentResolvers) { this.customArgumentResolvers = argumentResolvers; } /** - * Set the message body converters to use. These converters are used to convert - * from and to HTTP requests and responses. + * Set the message body converters to use. These converters are used to convert from and to HTTP requests and + * responses. */ public void setMessageConverters(HttpMessageConverter<?>[] messageConverters) { Assert.notEmpty(messageConverters, "'messageConverters' must not be empty"); this.messageConverters = messageConverters; } - - public boolean supports(Object handler) { return getMethodResolver(handler).hasHandlerMethods(); } @@ -345,122 +325,46 @@ public ModelAndView handle(HttpServletRequest request, HttpServletResponse respo return invokeHandlerMethod(request, response, handler); } - protected ModelAndView invokeHandlerMethod( - HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception { - - try { - ServletHandlerMethodResolver methodResolver = getMethodResolver(handler); - Method handlerMethod = methodResolver.resolveHandlerMethod(request); - ServletHandlerMethodInvoker methodInvoker = new ServletHandlerMethodInvoker(methodResolver); - ServletWebRequest webRequest = new ServletWebRequest(request, response); - ExtendedModelMap implicitModel = new BindingAwareModelMap(); - - Object result = methodInvoker.invokeHandlerMethod(handlerMethod, handler, webRequest, implicitModel); - ModelAndView mav = - methodInvoker.getModelAndView(handlerMethod, handler.getClass(), result, implicitModel, webRequest); - methodInvoker.updateModelAttributes( - handler, (mav != null ? mav.getModel() : null), implicitModel, webRequest); - return mav; - } - catch (NoSuchRequestHandlingMethodException ex) { - return handleNoSuchRequestHandlingMethod(ex, request, response); - } - catch (HttpRequestMethodNotSupportedException ex) { - return handleHttpRequestMethodNotSupportedException(ex, request, response); - } - catch (HttpMediaTypeNotSupportedException ex) { - return handleHttpMediaTypeNotSupportedException(ex, request, response); - } - } - - public long getLastModified(HttpServletRequest request, Object handler) { - return -1; - } - - /** - * Handle the case where no request handler method was found. - * <p>The default implementation logs a warning and sends an HTTP 404 error. - * Alternatively, a fallback view could be chosen, or the - * NoSuchRequestHandlingMethodException could be rethrown as-is. - * @param ex the NoSuchRequestHandlingMethodException to be handled - * @param request current HTTP request - * @param response current HTTP response - * @return a ModelAndView to render, or <code>null</code> if handled directly - * @throws Exception an Exception that should be thrown as result of the servlet request - */ - protected ModelAndView handleNoSuchRequestHandlingMethod( - NoSuchRequestHandlingMethodException ex, HttpServletRequest request, HttpServletResponse response) - throws Exception { - - pageNotFoundLogger.warn(ex.getMessage()); - response.sendError(HttpServletResponse.SC_NOT_FOUND); - return null; - } - - /** - * Handle the case where no request handler method was found for the particular HTTP request method. - * <p>The default implementation logs a warning, sends an HTTP 405 error and sets the "Allow" header. - * Alternatively, a fallback view could be chosen, or the HttpRequestMethodNotSupportedException - * could be rethrown as-is. - * @param ex the HttpRequestMethodNotSupportedException to be handled - * @param request current HTTP request - * @param response current HTTP response - * @return a ModelAndView to render, or <code>null</code> if handled directly - * @throws Exception an Exception that should be thrown as result of the servlet request - */ - protected ModelAndView handleHttpRequestMethodNotSupportedException( - HttpRequestMethodNotSupportedException ex, HttpServletRequest request, HttpServletResponse response) + protected ModelAndView invokeHandlerMethod(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception { - pageNotFoundLogger.warn(ex.getMessage()); - response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED); - response.addHeader("Allow", StringUtils.arrayToDelimitedString(ex.getSupportedMethods(), ", ")); - return null; + ServletHandlerMethodResolver methodResolver = getMethodResolver(handler); + Method handlerMethod = methodResolver.resolveHandlerMethod(request); + ServletHandlerMethodInvoker methodInvoker = new ServletHandlerMethodInvoker(methodResolver); + ServletWebRequest webRequest = new ServletWebRequest(request, response); + ExtendedModelMap implicitModel = new BindingAwareModelMap(); + + Object result = methodInvoker.invokeHandlerMethod(handlerMethod, handler, webRequest, implicitModel); + ModelAndView mav = + methodInvoker.getModelAndView(handlerMethod, handler.getClass(), result, implicitModel, webRequest); + methodInvoker.updateModelAttributes(handler, (mav != null ? mav.getModel() : null), implicitModel, webRequest); + return mav; } - /** - * Handle the case where no {@linkplain HttpMessageConverter message converters} was found for the PUT or POSTed - * content. - * <p>The default implementation logs a warning, sends an HTTP 415 error and sets the "Allow" header. - * Alternatively, a fallback view could be chosen, or the HttpMediaTypeNotSupportedException - * could be rethrown as-is. - * @param ex the HttpMediaTypeNotSupportedException to be handled - * @param request current HTTP request - * @param response current HTTP response - * @return a ModelAndView to render, or <code>null</code> if handled directly - * @throws Exception an Exception that should be thrown as result of the servlet request - */ - protected ModelAndView handleHttpMediaTypeNotSupportedException( - HttpMediaTypeNotSupportedException ex, HttpServletRequest request, HttpServletResponse response) - throws Exception { - - response.sendError(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE); - response.addHeader("Accept", MediaType.toString(ex.getSupportedMediaTypes())); - return null; + public long getLastModified(HttpServletRequest request, Object handler) { + return -1; } /** - * Template method for creating a new ServletRequestDataBinder instance. - * <p>The default implementation creates a standard ServletRequestDataBinder. - * This can be overridden for custom ServletRequestDataBinder subclasses. + * Template method for creating a new ServletRequestDataBinder instance. <p>The default implementation creates a + * standard ServletRequestDataBinder. This can be overridden for custom ServletRequestDataBinder subclasses. + * * @param request current HTTP request - * @param target the target object to bind onto (or <code>null</code> - * if the binder is just used to convert a plain parameter value) + * @param target the target object to bind onto (or <code>null</code> if the binder is just used to convert a plain + * parameter value) * @param objectName the objectName of the target object * @return the ServletRequestDataBinder instance to use * @throws Exception in case of invalid state or arguments * @see ServletRequestDataBinder#bind(javax.servlet.ServletRequest) * @see ServletRequestDataBinder#convertIfNecessary(Object, Class, MethodParameter) */ - protected ServletRequestDataBinder createBinder( - HttpServletRequest request, Object target, String objectName) throws Exception { + protected ServletRequestDataBinder createBinder(HttpServletRequest request, Object target, String objectName) + throws Exception { return new ServletRequestDataBinder(target, objectName); } - /** - * Build a HandlerMethodResolver for the given handler type. - */ + /** Build a HandlerMethodResolver for the given handler type. */ private ServletHandlerMethodResolver getMethodResolver(Object handler) { Class handlerClass = ClassUtils.getUserClass(handler); ServletHandlerMethodResolver resolver = this.methodResolverCache.get(handlerClass); @@ -471,10 +375,7 @@ private ServletHandlerMethodResolver getMethodResolver(Object handler) { return resolver; } - - /** - * Servlet-specific subclass of {@link HandlerMethodResolver}. - */ + /** Servlet-specific subclass of {@link HandlerMethodResolver}. */ private class ServletHandlerMethodResolver extends HandlerMethodResolver { private ServletHandlerMethodResolver(Class<?> handlerType) { @@ -499,7 +400,7 @@ public Method resolveHandlerMethod(HttpServletRequest request) throws ServletExc } boolean match = false; if (mappingInfo.paths.length > 0) { - List<String> matchedPaths = new ArrayList<String>(mappingInfo.paths.length); + List<String> matchedPaths = new ArrayList<String>(mappingInfo.paths.length); for (String mappedPath : mappingInfo.paths) { if (isPathMatch(mappedPath, lookupPath)) { if (checkParameters(mappingInfo, request)) { @@ -515,7 +416,7 @@ public Method resolveHandlerMethod(HttpServletRequest request) throws ServletExc } } Collections.sort(matchedPaths, pathComparator); - mappingInfo.matchedPaths = matchedPaths.toArray(new String[matchedPaths.size()]); + mappingInfo.matchedPaths = matchedPaths.toArray(new String[matchedPaths.size()]); } else { // No paths specified: parameter match sufficient. @@ -548,17 +449,19 @@ public Method resolveHandlerMethod(HttpServletRequest request) throws ServletExc } } if (oldMappedMethod != null) { - throw new IllegalStateException("Ambiguous handler methods mapped for HTTP path '" + - lookupPath + "': {" + oldMappedMethod + ", " + handlerMethod + - "}. If you intend to handle the same path in multiple methods, then factor " + - "them out into a dedicated handler class with that path mapped at the type level!"); + throw new IllegalStateException( + "Ambiguous handler methods mapped for HTTP path '" + lookupPath + "': {" + + oldMappedMethod + ", " + handlerMethod + + "}. If you intend to handle the same path in multiple methods, then factor " + + "them out into a dedicated handler class with that path mapped at the type level!"); } } } } if (!targetHandlerMethods.isEmpty()) { List<RequestMappingInfo> matches = new ArrayList<RequestMappingInfo>(targetHandlerMethods.keySet()); - RequestMappingInfoComparator requestMappingInfoComparator = new RequestMappingInfoComparator(pathComparator); + RequestMappingInfoComparator requestMappingInfoComparator = + new RequestMappingInfoComparator(pathComparator); Collections.sort(matches, requestMappingInfoComparator); RequestMappingInfo bestMappingMatch = matches.get(0); if (bestMappingMatch.matchedPaths.length > 0) { @@ -597,7 +500,9 @@ private boolean checkParameters(RequestMappingInfo mapping, HttpServletRequest r } @SuppressWarnings("unchecked") - private void extractHandlerMethodUriTemplates(String mappedPath, String lookupPath, HttpServletRequest request) { + private void extractHandlerMethodUriTemplates(String mappedPath, + String lookupPath, + HttpServletRequest request) { Map<String, String> variables = null; boolean hasSuffix = (mappedPath.indexOf('.') != -1); if (!hasSuffix && pathMatcher.match(mappedPath + ".*", lookupPath)) { @@ -610,7 +515,8 @@ private void extractHandlerMethodUriTemplates(String mappedPath, String lookupPa String realPath = "/**/" + mappedPath; if (pathMatcher.match(realPath, lookupPath)) { variables = pathMatcher.extractUriTemplateVariables(realPath, lookupPath); - } else { + } + else { realPath = realPath + ".*"; if (pathMatcher.match(realPath, lookupPath)) { variables = pathMatcher.extractUriTemplateVariables(realPath, lookupPath); @@ -628,17 +534,14 @@ private void extractHandlerMethodUriTemplates(String mappedPath, String lookupPa } } - - /** - * Servlet-specific subclass of {@link HandlerMethodInvoker}. - */ + /** Servlet-specific subclass of {@link HandlerMethodInvoker}. */ private class ServletHandlerMethodInvoker extends HandlerMethodInvoker { private boolean responseArgumentUsed = false; private ServletHandlerMethodInvoker(HandlerMethodResolver resolver) { - super(resolver, webBindingInitializer, sessionAttributeStore, - parameterNameDiscoverer, customArgumentResolvers, messageConverters); + super(resolver, webBindingInitializer, sessionAttributeStore, parameterNameDiscoverer, + customArgumentResolvers, messageConverters); } @Override @@ -655,8 +558,8 @@ protected void raiseSessionRequiredException(String message) throws Exception { protected WebDataBinder createBinder(NativeWebRequest webRequest, Object target, String objectName) throws Exception { - return AnnotationMethodHandlerAdapter.this.createBinder( - (HttpServletRequest) webRequest.getNativeRequest(), target, objectName); + return AnnotationMethodHandlerAdapter.this + .createBinder((HttpServletRequest) webRequest.getNativeRequest(), target, objectName); } @Override @@ -699,14 +602,14 @@ protected String resolvePathVariable(String pathVarName, Class paramType, Native Map<String, String> uriTemplateVariables = (Map<String, String>) servletRequest.getAttribute(HandlerMapping.URI_TEMPLATE_VARIABLES_ATTRIBUTE); if (uriTemplateVariables == null || !uriTemplateVariables.containsKey(pathVarName)) { - throw new IllegalStateException("Could not find @PathVariable [" + pathVarName + "] in @RequestMapping"); + throw new IllegalStateException( + "Could not find @PathVariable [" + pathVarName + "] in @RequestMapping"); } return uriTemplateVariables.get(pathVarName); } @Override - protected Object resolveStandardArgument(Class parameterType, NativeWebRequest webRequest) - throws Exception { + protected Object resolveStandardArgument(Class parameterType, NativeWebRequest webRequest) throws Exception { HttpServletRequest request = (HttpServletRequest) webRequest.getNativeRequest(); HttpServletResponse response = (HttpServletResponse) webRequest.getNativeResponse(); @@ -745,8 +648,11 @@ else if (Writer.class.isAssignableFrom(parameterType)) { } @SuppressWarnings("unchecked") - public ModelAndView getModelAndView(Method handlerMethod, Class handlerType, Object returnValue, - ExtendedModelMap implicitModel, ServletWebRequest webRequest) { + public ModelAndView getModelAndView(Method handlerMethod, + Class handlerType, + Object returnValue, + ExtendedModelMap implicitModel, + ServletWebRequest webRequest) { if (returnValue instanceof ModelAndView) { ModelAndView mav = (ModelAndView) returnValue; @@ -792,7 +698,6 @@ else if (!BeanUtils.isSimpleProperty(returnValue.getClass())) { } } - static class RequestMappingInfo { String[] paths = new String[0]; @@ -806,7 +711,7 @@ static class RequestMappingInfo { String bestMatchedPath() { return matchedPaths.length > 0 ? matchedPaths[0] : null; } - + @Override public boolean equals(Object obj) { RequestMappingInfo other = (RequestMappingInfo) obj; @@ -823,16 +728,12 @@ public int hashCode() { /** * Comparator capable of sorting {@link RequestMappingInfo}s (RHIs) so that sorting a list with this comparator will - * result in: - * <ul> - * <li>RHIs with {@linkplain RequestMappingInfo#matchedPaths better matched paths} take prescedence over those with - * a weaker match (as expressed by the {@linkplain PathMatcher#getPatternComparator(String) path pattern - * comparator}.) Typically, this means that patterns without wild chards and uri templates will be ordered before those without.</li> - * <li>RHIs with one single {@linkplain RequestMappingInfo#methods request method} will be ordered before those - * without a method, or with more than one method.</li> - * <li>RHIs with more {@linkplain RequestMappingInfo#params request parameters} will be ordered before those with - * less parameters</li> - * </ol> + * result in: <ul> <li>RHIs with {@linkplain RequestMappingInfo#matchedPaths better matched paths} take prescedence + * over those with a weaker match (as expressed by the {@linkplain PathMatcher#getPatternComparator(String) path + * pattern comparator}.) Typically, this means that patterns without wild chards and uri templates will be ordered + * before those without.</li> <li>RHIs with one single {@linkplain RequestMappingInfo#methods request method} will be + * ordered before those without a method, or with more than one method.</li> <li>RHIs with more {@linkplain + * RequestMappingInfo#params request parameters} will be ordered before those with less parameters</li> </ol> */ static class RequestMappingInfoComparator implements Comparator<RequestMappingInfo> { @@ -867,5 +768,5 @@ else if (info2MethodCount == 1 & info1MethodCount > 1) { return (info1ParamCount < info2ParamCount ? 1 : (info1ParamCount == info2ParamCount ? 0 : -1)); } } - + } diff --git a/org.springframework.web.servlet/src/test/java/org/springframework/web/servlet/handler/DefaultHandlerExceptionResolverTests.java b/org.springframework.web.servlet/src/test/java/org/springframework/web/servlet/handler/DefaultHandlerExceptionResolverTests.java new file mode 100644 index 000000000000..9ee8b2035e0a --- /dev/null +++ b/org.springframework.web.servlet/src/test/java/org/springframework/web/servlet/handler/DefaultHandlerExceptionResolverTests.java @@ -0,0 +1,107 @@ +/* + * Copyright 2002-2009 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.web.servlet.handler; + +import java.util.Collections; + +import static org.junit.Assert.*; +import org.junit.Before; +import org.junit.Test; + +import org.springframework.beans.TypeMismatchException; +import org.springframework.http.MediaType; +import org.springframework.http.converter.HttpMessageNotReadableException; +import org.springframework.http.converter.HttpMessageNotWritableException; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.mock.web.MockHttpServletResponse; +import org.springframework.web.HttpMediaTypeNotSupportedException; +import org.springframework.web.HttpRequestMethodNotSupportedException; +import org.springframework.web.bind.MissingServletRequestParameterException; +import org.springframework.web.servlet.mvc.multiaction.NoSuchRequestHandlingMethodException; + +/** @author Arjen Poutsma */ +public class DefaultHandlerExceptionResolverTests { + + private DefaultHandlerExceptionResolver exceptionResolver; + + private MockHttpServletRequest request; + + private MockHttpServletResponse response; + + @Before + public void setUp() { + exceptionResolver = new DefaultHandlerExceptionResolver(); + request = new MockHttpServletRequest(); + response = new MockHttpServletResponse(); + request.setMethod("GET"); + } + + @Test + public void handleNoSuchRequestHandlingMethod() { + NoSuchRequestHandlingMethodException ex = new NoSuchRequestHandlingMethodException(request); + exceptionResolver.resolveException(request, response, null, ex); + assertEquals("Invalid status code", 404, response.getStatus()); + } + + @Test + public void handleHttpRequestMethodNotSupported() { + HttpRequestMethodNotSupportedException ex = + new HttpRequestMethodNotSupportedException("GET", new String[]{"POST", "PUT"}); + exceptionResolver.resolveException(request, response, null, ex); + assertEquals("Invalid status code", 405, response.getStatus()); + assertEquals("Invalid Allow header", "POST, PUT", response.getHeader("Allow")); + } + + @Test + public void handleHttpMediaTypeNotSupported() { + HttpMediaTypeNotSupportedException ex = new HttpMediaTypeNotSupportedException(new MediaType("text", "plain"), + Collections.singletonList(new MediaType("application", "pdf"))); + exceptionResolver.resolveException(request, response, null, ex); + assertEquals("Invalid status code", 415, response.getStatus()); + assertEquals("Invalid Accept header", "application/pdf", response.getHeader("Accept")); + } + + @Test + public void handleMissingServletRequestParameter() { + MissingServletRequestParameterException ex = new MissingServletRequestParameterException("foo", "bar"); + exceptionResolver.resolveException(request, response, null, ex); + assertEquals("Invalid status code", 400, response.getStatus()); + } + + @Test + public void handleTypeMismatch() { + TypeMismatchException ex = new TypeMismatchException("foo", String.class); + exceptionResolver.resolveException(request, response, null, ex); + assertEquals("Invalid status code", 400, response.getStatus()); + } + + @Test + public void handleHttpMessageNotReadable() { + HttpMessageNotReadableException ex = new HttpMessageNotReadableException("foo"); + exceptionResolver.resolveException(request, response, null, ex); + assertEquals("Invalid status code", 400, response.getStatus()); + } + + @Test + public void handleHttpMessageNotWritable() { + HttpMessageNotWritableException ex = new HttpMessageNotWritableException("foo"); + exceptionResolver.resolveException(request, response, null, ex); + assertEquals("Invalid status code", 500, response.getStatus()); + } + + +} diff --git a/org.springframework.web.servlet/src/test/java/org/springframework/web/servlet/mvc/annotation/ServletAnnotationControllerTests.java b/org.springframework.web.servlet/src/test/java/org/springframework/web/servlet/mvc/annotation/ServletAnnotationControllerTests.java index 74efeaf73419..2e6771ae593c 100644 --- a/org.springframework.web.servlet/src/test/java/org/springframework/web/servlet/mvc/annotation/ServletAnnotationControllerTests.java +++ b/org.springframework.web.servlet/src/test/java/org/springframework/web/servlet/mvc/annotation/ServletAnnotationControllerTests.java @@ -21,6 +21,7 @@ import java.security.Principal; import java.text.SimpleDateFormat; import java.util.Arrays; +import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.LinkedList; @@ -51,6 +52,12 @@ import org.springframework.beans.propertyeditors.CustomDateEditor; import org.springframework.context.annotation.AnnotationConfigUtils; import org.springframework.core.MethodParameter; +import org.springframework.http.HttpInputMessage; +import org.springframework.http.HttpOutputMessage; +import org.springframework.http.MediaType; +import org.springframework.http.converter.HttpMessageConverter; +import org.springframework.http.converter.HttpMessageNotReadableException; +import org.springframework.http.converter.HttpMessageNotWritableException; import org.springframework.mock.web.MockHttpServletRequest; import org.springframework.mock.web.MockHttpServletResponse; import org.springframework.mock.web.MockServletConfig; @@ -62,7 +69,6 @@ import org.springframework.util.StringUtils; import org.springframework.validation.BindingResult; import org.springframework.validation.Errors; -import org.springframework.web.bind.MissingServletRequestParameterException; import org.springframework.web.bind.WebDataBinder; import org.springframework.web.bind.annotation.CookieValue; import org.springframework.web.bind.annotation.InitBinder; @@ -107,13 +113,25 @@ public void standardHandleMethod() throws Exception { assertEquals("test", response.getContentAsString()); } - @Test(expected = MissingServletRequestParameterException.class) + @Test public void requiredParamMissing() throws Exception { initServlet(RequiredParamController.class); MockHttpServletRequest request = new MockHttpServletRequest("GET", "/myPath.do"); MockHttpServletResponse response = new MockHttpServletResponse(); servlet.service(request, response); + assertEquals("Invalid response status code", HttpServletResponse.SC_BAD_REQUEST, response.getStatus()); + } + + @Test + public void typeConversionError() throws Exception { + initServlet(RequiredParamController.class); + + MockHttpServletRequest request = new MockHttpServletRequest("GET", "/myPath.do"); + request.addParameter("id", "foo"); + MockHttpServletResponse response = new MockHttpServletResponse(); + servlet.service(request, response); + assertEquals("Invalid response status code", HttpServletResponse.SC_BAD_REQUEST, response.getStatus()); } @Test @@ -157,7 +175,7 @@ public void methodNotAllowed() throws Exception { MockHttpServletResponse response = new MockHttpServletResponse(); servlet.service(request, response); assertEquals("Invalid response status", HttpServletResponse.SC_METHOD_NOT_ALLOWED, response.getStatus()); - String allowHeader = (String)response.getHeader("Allow"); + String allowHeader = (String) response.getHeader("Allow"); assertNotNull("No Allow header", allowHeader); Set<String> allowedMethods = new HashSet<String>(); allowedMethods.addAll(Arrays.asList(StringUtils.delimitedListToStringArray(allowHeader, ", "))); @@ -249,7 +267,6 @@ protected WebApplicationContext createWebApplicationContext(WebApplicationContex servlet.init(new MockServletConfig()); } - private void doTestAdaptedHandleMethods(final Class<?> controllerClass) throws Exception { initServlet(controllerClass); @@ -878,6 +895,31 @@ public void unsupportedRequestBody() throws ServletException, IOException { assertNotNull("No Accept response header set", response.getHeader("Accept")); } + @Test + public void badRequestRequestBody() throws ServletException, IOException { + @SuppressWarnings("serial") DispatcherServlet servlet = new DispatcherServlet() { + @Override + protected WebApplicationContext createWebApplicationContext(WebApplicationContext parent) { + GenericWebApplicationContext wac = new GenericWebApplicationContext(); + wac.registerBeanDefinition("controller", new RootBeanDefinition(RequestBodyController.class)); + RootBeanDefinition adapterDef = new RootBeanDefinition(AnnotationMethodHandlerAdapter.class); + adapterDef.getPropertyValues().addPropertyValue("messageConverters", new MyMessageConverter()); + wac.registerBeanDefinition("handlerAdapter", adapterDef); + wac.refresh(); + return wac; + } + }; + servlet.init(new MockServletConfig()); + + MockHttpServletRequest request = new MockHttpServletRequest("PUT", "/something"); + String requestBody = "Hello World"; + request.setContent(requestBody.getBytes("UTF-8")); + request.addHeader("Content-Type", "application/pdf"); + MockHttpServletResponse response = new MockHttpServletResponse(); + servlet.service(request, response); + assertEquals("Invalid response status code", HttpServletResponse.SC_BAD_REQUEST, response.getStatus()); + } + /* * Controllers */ @@ -893,8 +935,7 @@ protected ModelAndView handleRequestInternal(HttpServletRequest request, HttpSer } } - - /** @noinspection UnusedDeclaration*/ + /** @noinspection UnusedDeclaration */ private static class BaseController { @RequestMapping(method = RequestMethod.GET) @@ -903,7 +944,6 @@ public void myPath2(HttpServletResponse response) throws IOException { } } - @Controller private static class MyAdaptedController { @@ -913,8 +953,10 @@ public void myHandle(HttpServletRequest request, HttpServletResponse response) t } @RequestMapping("/myPath2.do") - public void myHandle(@RequestParam("param1") String p1, @RequestParam("param2") int p2, - @RequestHeader("header1") long h1, @CookieValue("cookie1") Cookie c1, + public void myHandle(@RequestParam("param1") String p1, + @RequestParam("param2") int p2, + @RequestHeader("header1") long h1, + @CookieValue("cookie1") Cookie c1, HttpServletResponse response) throws IOException { response.getWriter().write("test-" + p1 + "-" + p2 + "-" + h1 + "-" + c1.getValue()); } @@ -930,7 +972,6 @@ public void myHandle(TestBean tb, Errors errors, HttpServletResponse response) t } } - @Controller @RequestMapping("/*.do") private static class MyAdaptedController2 { @@ -941,8 +982,11 @@ public void myHandle(HttpServletRequest request, HttpServletResponse response) t } @RequestMapping("/myPath2.do") - public void myHandle(@RequestParam("param1") String p1, int param2, HttpServletResponse response, - @RequestHeader("header1") String h1, @CookieValue("cookie1") String c1) throws IOException { + public void myHandle(@RequestParam("param1") String p1, + int param2, + HttpServletResponse response, + @RequestHeader("header1") String h1, + @CookieValue("cookie1") String c1) throws IOException { response.getWriter().write("test-" + p1 + "-" + param2 + "-" + h1 + "-" + c1); } @@ -957,13 +1001,15 @@ public void myHandle(TestBean tb, Errors errors, HttpServletResponse response) t } } - @Controller private static class MyAdaptedControllerBase<T> { @RequestMapping("/myPath2.do") - public void myHandle(@RequestParam("param1") T p1, int param2, @RequestHeader Integer header1, - @CookieValue int cookie1, HttpServletResponse response) throws IOException { + public void myHandle(@RequestParam("param1") T p1, + int param2, + @RequestHeader Integer header1, + @CookieValue int cookie1, + HttpServletResponse response) throws IOException { response.getWriter().write("test-" + p1 + "-" + param2 + "-" + header1 + "-" + cookie1); } @@ -976,7 +1022,6 @@ public void modelAttribute(@RequestParam("param1") T p1, int param2) { } } - @RequestMapping("/*.do") private static class MyAdaptedController3 extends MyAdaptedControllerBase<String> { @@ -986,8 +1031,11 @@ public void myHandle(HttpServletRequest request, HttpServletResponse response) t } @Override - public void myHandle(@RequestParam("param1") String p1, int param2, @RequestHeader Integer header1, - @CookieValue int cookie1, HttpServletResponse response) throws IOException { + public void myHandle(@RequestParam("param1") String p1, + int param2, + @RequestHeader Integer header1, + @CookieValue int cookie1, + HttpServletResponse response) throws IOException { response.getWriter().write("test-" + p1 + "-" + param2 + "-" + header1 + "-" + cookie1); } @@ -1012,7 +1060,6 @@ public void modelAttribute(@RequestParam("param1") String p1, int param2) { } } - @Controller @RequestMapping(method = RequestMethod.GET) private static class EmptyParameterListHandlerMethodController { @@ -1029,7 +1076,6 @@ public void nonEmptyParameterListHandler(HttpServletResponse response) { } } - @Controller public static class MyFormController { @@ -1042,7 +1088,7 @@ public List<TestBean> getTestBeans() { } @RequestMapping("/myPath.do") - public String myHandle(@ModelAttribute("myCommand")TestBean tb, BindingResult errors, ModelMap model) { + public String myHandle(@ModelAttribute("myCommand") TestBean tb, BindingResult errors, ModelMap model) { if (!model.containsKey("myKey")) { model.addAttribute("myKey", "myValue"); } @@ -1050,7 +1096,6 @@ public String myHandle(@ModelAttribute("myCommand")TestBean tb, BindingResult er } } - @Controller public static class MyModelFormController { @@ -1063,7 +1108,7 @@ public List<TestBean> getTestBeans() { } @RequestMapping("/myPath.do") - public String myHandle(@ModelAttribute("myCommand")TestBean tb, BindingResult errors, Model model) { + public String myHandle(@ModelAttribute("myCommand") TestBean tb, BindingResult errors, Model model) { if (!model.containsAttribute("myKey")) { model.addAttribute("myKey", "myValue"); } @@ -1071,13 +1116,13 @@ public String myHandle(@ModelAttribute("myCommand")TestBean tb, BindingResult er } } - @Controller private static class MyCommandProvidingFormController<T, TB, TB2> extends MyFormController { @SuppressWarnings("unused") @ModelAttribute("myCommand") - private TestBean createTestBean(@RequestParam T defaultName, Map<String, Object> model, + private TestBean createTestBean(@RequestParam T defaultName, + Map<String, Object> model, @RequestParam Date date) { model.put("myKey", "myOriginalValue"); return new TestBean(defaultName.getClass().getSimpleName() + ":" + defaultName.toString()); @@ -1085,7 +1130,7 @@ private TestBean createTestBean(@RequestParam T defaultName, Map<String, Object> @Override @RequestMapping("/myPath.do") - public String myHandle(@ModelAttribute("myCommand")TestBean tb, BindingResult errors, ModelMap model) { + public String myHandle(@ModelAttribute("myCommand") TestBean tb, BindingResult errors, ModelMap model) { return super.myHandle(tb, errors, model); } @@ -1110,21 +1155,18 @@ protected TB2 getModelAttr() { } } - private static class MySpecialArg { public MySpecialArg(String value) { } } - @Controller private static class MyTypedCommandProvidingFormController extends MyCommandProvidingFormController<Integer, TestBean, ITestBean> { } - @Controller private static class MyBinderInitializingCommandProvidingFormController extends MyCommandProvidingFormController { @@ -1138,7 +1180,6 @@ private void initBinder(WebDataBinder binder) { } } - @Controller private static class MySpecificBinderInitializingCommandProvidingFormController extends MyCommandProvidingFormController { @@ -1155,7 +1196,6 @@ private void initBinder(WebDataBinder binder, String date, @RequestParam("date") } } - private static class MyWebBindingInitializer implements WebBindingInitializer { public void initBinder(WebDataBinder binder, WebRequest request) { @@ -1166,7 +1206,6 @@ public void initBinder(WebDataBinder binder, WebRequest request) { } } - private static class MySpecialArgumentResolver implements WebArgumentResolver { public Object resolveArgument(MethodParameter methodParameter, NativeWebRequest webRequest) { @@ -1177,7 +1216,6 @@ public Object resolveArgument(MethodParameter methodParameter, NativeWebRequest } } - @Controller @RequestMapping("/myPath.do") private static class MyParameterDispatchingController { @@ -1223,7 +1261,6 @@ public void mySurpriseHandle(HttpServletResponse response) throws IOException { } } - @Controller @RequestMapping(value = "/myPath.do", params = {"active"}) private static class MyConstrainedParameterDispatchingController { @@ -1239,14 +1276,12 @@ public void myLangHandle(HttpServletResponse response) throws IOException { } } - @Controller @RequestMapping(value = "/*.do", method = RequestMethod.POST, params = "myParam=myValue") private static class MyPostMethodNameDispatchingController extends MethodNameDispatchingController { } - @Controller @RequestMapping("/myApp/*") private static class MyRelativePathDispatchingController { @@ -1272,7 +1307,6 @@ public void mySurpriseHandle(HttpServletResponse response) throws IOException { } } - @Controller private static class MyNullCommandController { @@ -1287,8 +1321,11 @@ public Principal getPrincipal() { } @RequestMapping("/myPath") - public void handle(@ModelAttribute TestBean testBean, Errors errors, @ModelAttribute TestPrincipal modelPrinc, - OtherPrincipal requestPrinc, Writer writer) throws IOException { + public void handle(@ModelAttribute TestBean testBean, + Errors errors, + @ModelAttribute TestPrincipal modelPrinc, + OtherPrincipal requestPrinc, + Writer writer) throws IOException { assertNull(testBean); assertNotNull(modelPrinc); assertNotNull(requestPrinc); @@ -1298,7 +1335,6 @@ public void handle(@ModelAttribute TestBean testBean, Errors errors, @ModelAttri } } - private static class TestPrincipal implements Principal { public String getName() { @@ -1306,7 +1342,6 @@ public String getName() { } } - private static class OtherPrincipal implements Principal { public String getName() { @@ -1314,7 +1349,6 @@ public String getName() { } } - private static class TestViewResolver implements ViewResolver { public View resolveViewName(final String viewName, Locale locale) throws Exception { @@ -1345,9 +1379,9 @@ public void render(Map model, HttpServletRequest request, HttpServletResponse re } List<TestBean> testBeans = (List<TestBean>) model.get("testBeanList"); if (errors.hasFieldErrors("age")) { - response.getWriter().write(viewName + "-" + tb.getName() + "-" + - errors.getFieldError("age").getCode() + "-" + testBeans.get(0).getName() + "-" + - model.get("myKey")); + response.getWriter() + .write(viewName + "-" + tb.getName() + "-" + errors.getFieldError("age").getCode() + + "-" + testBeans.get(0).getName() + "-" + model.get("myKey")); } else { response.getWriter().write(viewName + "-" + tb.getName() + "-" + tb.getAge() + "-" + @@ -1358,7 +1392,6 @@ public void render(Map model, HttpServletRequest request, HttpServletResponse re } } - public static class ParentController { @RequestMapping(method = RequestMethod.GET) @@ -1366,7 +1399,6 @@ public void doGet(HttpServletRequest req, HttpServletResponse resp) { } } - @Controller @RequestMapping("/child/test") public static class ChildController extends ParentController { @@ -1376,68 +1408,66 @@ public void doGet(HttpServletRequest req, HttpServletResponse resp, @RequestPara } } - @Controller public static class RequiredParamController { @RequestMapping("/myPath.do") - public void myHandle(@RequestParam(value = "id", required = true) String id, + public void myHandle(@RequestParam(value = "id", required = true) int id, @RequestHeader(value = "header", required = true) String header) { } } - @Controller public static class OptionalParamController { @RequestMapping("/myPath.do") - public void myHandle(@RequestParam(required = false) String id, @RequestParam(required = false) boolean flag, - @RequestHeader(value = "header", required = false) String header, HttpServletResponse response) - throws IOException { + public void myHandle(@RequestParam(required = false) String id, + @RequestParam(required = false) boolean flag, + @RequestHeader(value = "header", required = false) String header, + HttpServletResponse response) throws IOException { response.getWriter().write(String.valueOf(id) + "-" + flag + "-" + String.valueOf(header)); } } - @Controller public static class DefaultValueParamController { @RequestMapping("/myPath.do") public void myHandle(@RequestParam(value = "id", defaultValue = "foo") String id, - @RequestHeader(defaultValue = "bar") String header, HttpServletResponse response) - throws IOException { + @RequestHeader(defaultValue = "bar") String header, + HttpServletResponse response) throws IOException { response.getWriter().write(String.valueOf(id) + "-" + String.valueOf(header)); } } - @Controller public static class MethodNotAllowedController { - @RequestMapping(value="/myPath.do", method = RequestMethod.DELETE) + @RequestMapping(value = "/myPath.do", method = RequestMethod.DELETE) public void delete() { } - @RequestMapping(value="/myPath.do", method = RequestMethod.HEAD) + @RequestMapping(value = "/myPath.do", method = RequestMethod.HEAD) public void head() { } - @RequestMapping(value="/myPath.do", method = RequestMethod.OPTIONS) + @RequestMapping(value = "/myPath.do", method = RequestMethod.OPTIONS) public void options() { } - @RequestMapping(value="/myPath.do", method = RequestMethod.POST) + + @RequestMapping(value = "/myPath.do", method = RequestMethod.POST) public void post() { } - @RequestMapping(value="/myPath.do", method = RequestMethod.PUT) + @RequestMapping(value = "/myPath.do", method = RequestMethod.PUT) public void put() { } - @RequestMapping(value="/myPath.do", method = RequestMethod.TRACE) + @RequestMapping(value = "/myPath.do", method = RequestMethod.TRACE) public void trace() { } - @RequestMapping(value="/otherPath.do", method = RequestMethod.GET) + @RequestMapping(value = "/otherPath.do", method = RequestMethod.GET) public void get() { } } @@ -1445,7 +1475,6 @@ public void get() { @Controller public static class PathOrderingController { - @RequestMapping(value = {"/dir/myPath1.do", "/**/*.do"}) public void method1(Writer writer) throws IOException { writer.write("method1"); @@ -1466,4 +1495,26 @@ public void handle(@RequestBody String body, Writer writer) throws IOException { } } + public static class MyMessageConverter implements HttpMessageConverter { + + public boolean supports(Class clazz) { + return true; + } + + public List getSupportedMediaTypes() { + return Collections.singletonList(new MediaType("application", "pdf")); + } + + public Object read(Class clazz, HttpInputMessage inputMessage) + throws IOException, HttpMessageNotReadableException { + throw new HttpMessageNotReadableException("Could not read"); + } + + public void write(Object o, HttpOutputMessage outputMessage) + throws IOException, HttpMessageNotWritableException { + throw new UnsupportedOperationException("Not implemented"); + } + } + + } diff --git a/org.springframework.web.servlet/src/test/java/org/springframework/web/servlet/mvc/annotation/UriTemplateServletAnnotationControllerTests.java b/org.springframework.web.servlet/src/test/java/org/springframework/web/servlet/mvc/annotation/UriTemplateServletAnnotationControllerTests.java index 331ac63c9e42..df0549ae009f 100644 --- a/org.springframework.web.servlet/src/test/java/org/springframework/web/servlet/mvc/annotation/UriTemplateServletAnnotationControllerTests.java +++ b/org.springframework.web.servlet/src/test/java/org/springframework/web/servlet/mvc/annotation/UriTemplateServletAnnotationControllerTests.java @@ -5,6 +5,7 @@ import java.text.SimpleDateFormat; import java.util.Date; import javax.servlet.ServletException; +import javax.servlet.http.HttpServletResponse; import static org.junit.Assert.*; import org.junit.Test; @@ -24,9 +25,7 @@ import org.springframework.web.context.support.GenericWebApplicationContext; import org.springframework.web.servlet.DispatcherServlet; -/** - * @author Arjen Poutsma - */ +/** @author Arjen Poutsma */ public class UriTemplateServletAnnotationControllerTests { private DispatcherServlet servlet; @@ -92,6 +91,16 @@ public void extension() throws Exception { } + @Test + public void typeConversionError() throws Exception { + initServlet(SimpleUriTemplateController.class); + + MockHttpServletRequest request = new MockHttpServletRequest("GET", "/foo.xml"); + MockHttpServletResponse response = new MockHttpServletResponse(); + servlet.service(request, response); + assertEquals("Invalid response status code", HttpServletResponse.SC_BAD_REQUEST, response.getStatus()); + } + private void initServlet(final Class<?> controllerclass) throws ServletException { servlet = new DispatcherServlet() { @Override
1d1f8c41ced44e78fcb8e1749cf7478e65aeb0fb
intellij-community
IDEADEV-20992--
c
https://github.com/JetBrains/intellij-community
diff --git a/codeInsight/impl/com/intellij/codeInsight/daemon/impl/DaemonListeners.java b/codeInsight/impl/com/intellij/codeInsight/daemon/impl/DaemonListeners.java index d1cf0629b1f3a..92ebb05658de2 100644 --- a/codeInsight/impl/com/intellij/codeInsight/daemon/impl/DaemonListeners.java +++ b/codeInsight/impl/com/intellij/codeInsight/daemon/impl/DaemonListeners.java @@ -259,7 +259,7 @@ boolean canChangeFileSilently(PsiFileSystemItem file) { if (activeVcs == null) return true; FileStatus status = FileStatusManager.getInstance(project).getStatus(virtualFile); - return status != FileStatus.NOT_CHANGED; + return status == FileStatus.MODIFIED || status == FileStatus.ADDED; } private class MyApplicationListener extends ApplicationAdapter {
db1599cb657d598ef7d4a56e682dfea201aadd57
restlet-framework-java
- Updated Jetty to version 6.1.15--
p
https://github.com/restlet/restlet-framework-java
diff --git a/build/tmpl/eclipse/dictionary.xml b/build/tmpl/eclipse/dictionary.xml index 6f3370400b..b11df09131 100644 --- a/build/tmpl/eclipse/dictionary.xml +++ b/build/tmpl/eclipse/dictionary.xml @@ -24,3 +24,4 @@ thierry boileau callback chunked +jetty diff --git a/modules/org.restlet.ext.jetty_6.1/src/org/restlet/ext/jetty/JettyServerHelper.java b/modules/org.restlet.ext.jetty_6.1/src/org/restlet/ext/jetty/JettyServerHelper.java index f104e6d12f..10672b69ad 100644 --- a/modules/org.restlet.ext.jetty_6.1/src/org/restlet/ext/jetty/JettyServerHelper.java +++ b/modules/org.restlet.ext.jetty_6.1/src/org/restlet/ext/jetty/JettyServerHelper.java @@ -34,7 +34,7 @@ import org.mortbay.jetty.AbstractConnector; import org.mortbay.jetty.HttpConnection; import org.mortbay.jetty.Server; -import org.mortbay.thread.BoundedThreadPool; +import org.mortbay.thread.QueuedThreadPool; /** * Abstract Jetty Web server connector. Here is the list of parameters that are @@ -194,7 +194,7 @@ public JettyServerHelper(org.restlet.Server server) { this.wrappedServer = new WrappedServer(this); // Configuring the thread pool - final BoundedThreadPool btp = new BoundedThreadPool(); + final QueuedThreadPool btp = new QueuedThreadPool(); btp.setLowThreads(getLowThreads()); btp.setMaxIdleTimeMs(getThreadMaxIdleTimeMs()); btp.setMaxThreads(getMaxThreads()); @@ -292,8 +292,8 @@ public int getLowResourceMaxIdleTimeMs() { * considered as running low on resources. */ public int getLowThreads() { - return Integer.parseInt(getHelpedParameters().getFirstValue("lowThreads", - "25")); + return Integer.parseInt(getHelpedParameters().getFirstValue( + "lowThreads", "25")); } /** @@ -302,8 +302,8 @@ public int getLowThreads() { * @return The maximum threads that will service requests. */ public int getMaxThreads() { - return Integer.parseInt(getHelpedParameters().getFirstValue("maxThreads", - "255")); + return Integer.parseInt(getHelpedParameters().getFirstValue( + "maxThreads", "255")); } /** @@ -312,8 +312,8 @@ public int getMaxThreads() { * @return The minimum threads waiting to service requests. */ public int getMinThreads() { - return Integer.parseInt(getHelpedParameters() - .getFirstValue("minThreads", "1")); + return Integer.parseInt(getHelpedParameters().getFirstValue( + "minThreads", "1")); } /** @@ -342,8 +342,8 @@ public int getResponseBufferSize() { * @return The SO linger time (see Jetty 6 documentation). */ public int getSoLingerTime() { - return Integer.parseInt(getHelpedParameters().getFirstValue("soLingerTime", - "1000")); + return Integer.parseInt(getHelpedParameters().getFirstValue( + "soLingerTime", "1000")); } /**
fbf70d4fbfd01a5f7f5798c32d61aad97b7ca540
drools
fix test using using no longer existing- ConsequenceException.getRule() method--
c
https://github.com/kiegroup/drools
diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java b/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java index c2b421650e1..7ce9c814c87 100644 --- a/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java +++ b/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java @@ -2874,7 +2874,7 @@ public void testConsequenceException() throws Exception { fail( "Should throw an Exception from the Consequence" ); } catch ( final org.drools.runtime.rule.ConsequenceException e ) { assertEquals( "Throw Consequence Exception", - e.getRule().getName() ); + e.getActivation().getRule().getName() ); assertEquals( "this should throw an exception", e.getCause().getMessage() ); }
b5d1d49a9aa0420d22096c898545aa349a13f07c
orientdb
Fixed issue 110 about HTTP/REST invocation of- "command" parameter passing a SELECT statement--
c
https://github.com/orientechnologies/orientdb
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLSelect.java b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLSelect.java index aaf8f66177b..61eaf54250b 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLSelect.java +++ b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLSelect.java @@ -44,6 +44,7 @@ import com.orientechnologies.orient.core.sql.operator.OQueryOperatorContainsText; import com.orientechnologies.orient.core.sql.operator.OQueryOperatorEquals; import com.orientechnologies.orient.core.sql.query.OSQLAsynchQuery; +import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery; import com.orientechnologies.orient.core.storage.ORecordBrowsingListener; import com.orientechnologies.orient.core.storage.impl.local.OStorageLocal; @@ -76,7 +77,15 @@ public OCommandExecutorSQLSelect parse(final OCommandRequestText iRequest) { init(iRequest.getDatabase(), iRequest.getText()); - request = (OSQLAsynchQuery<ORecordSchemaAware<?>>) iRequest; + if (iRequest instanceof OSQLAsynchQuery) + request = (OSQLAsynchQuery<ORecordSchemaAware<?>>) iRequest; + else { + // BUILD A QUERY OBJECT FROM THE COMMAND REQUEST + request = new OSQLSynchQuery<ORecordSchemaAware<?>>(iRequest.getText()); + request.setDatabase(iRequest.getDatabase()); + if (iRequest.getResultListener() != null) + request.setResultListener(iRequest.getResultListener()); + } int pos = extractProjections(); // TODO: IF NO PROJECTION WHAT??? @@ -156,7 +165,7 @@ record = database.load(rid); throw new OQueryParsingException("No source found in query: specify class, clusters or single records"); processResultSet(); - return null; + return request instanceof OSQLSynchQuery ? ((OSQLSynchQuery<ORecordSchemaAware<?>>) request).getResult() : tempResult; } public boolean foreach(final ORecordInternal<?> iRecord) {
9a40de8e6c1974d4ae187b181055ecd4b1cc93da
camel
Fixed unit test having problem on Windows- deleting files for cleanup.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@888416 13f79535-47bb-0310-9956-ffa450edef68-
c
https://github.com/apache/camel
diff --git a/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrAuthTestBase.java b/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrAuthTestBase.java index 737ee4a243855..4dc522016fb1f 100644 --- a/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrAuthTestBase.java +++ b/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrAuthTestBase.java @@ -17,8 +17,6 @@ package org.apache.camel.component.jcr; import java.io.File; -import java.io.IOException; - import javax.jcr.Repository; import javax.jcr.SimpleCredentials; import javax.naming.Context; @@ -30,7 +28,6 @@ import org.apache.jackrabbit.api.security.user.UserManager; import org.apache.jackrabbit.core.SessionImpl; import org.apache.jackrabbit.core.TransientRepository; -import org.apache.jackrabbit.core.fs.local.FileUtil; import org.apache.jackrabbit.core.security.authorization.JackrabbitAccessControlList; import org.junit.Before; @@ -49,14 +46,11 @@ public abstract class JcrAuthTestBase extends CamelTestSupport { private Repository repository; - private void clean() throws IOException { - File[] files = {new File("target/repository_with_auth"), - new File("derby.log") }; - for (File file : files) { - if (file.exists()) { - FileUtil.delete(file); - } - } + @Override + @Before + public void setUp() throws Exception { + deleteDirectory("target/repository"); + super.setUp(); } @Override @@ -106,11 +100,4 @@ protected Repository getRepository() { return repository; } - @Override - @Before - public void setUp() throws Exception { - clean(); - super.setUp(); - } - } \ No newline at end of file diff --git a/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrNodePathCreationTest.java b/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrNodePathCreationTest.java index bf0e7c3a235b6..6aaef3822793c 100644 --- a/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrNodePathCreationTest.java +++ b/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrNodePathCreationTest.java @@ -16,9 +16,6 @@ */ package org.apache.camel.component.jcr; -import java.io.File; -import java.io.IOException; - import javax.jcr.Node; import javax.jcr.Repository; import javax.jcr.Session; @@ -29,7 +26,6 @@ import org.apache.camel.builder.RouteBuilder; import org.apache.camel.test.junit4.CamelTestSupport; import org.apache.jackrabbit.core.TransientRepository; -import org.apache.jackrabbit.core.fs.local.FileUtil; import org.junit.Before; import org.junit.Test; @@ -40,20 +36,10 @@ public class JcrNodePathCreationTest extends CamelTestSupport { @Override @Before public void setUp() throws Exception { - clean(); + deleteDirectory("target/repository"); super.setUp(); } - private void clean() throws IOException { - File[] files = {new File("target/repository"), new File("target/repository.xml"), - new File("derby.log")}; - for (File file : files) { - if (file.exists()) { - FileUtil.delete(file); - } - } - } - @Test public void testJcrNodePathCreation() throws Exception { Exchange exchange = createExchangeWithBody("<body/>"); diff --git a/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrRouteTest.java b/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrRouteTest.java index ac2d197973c4d..fdfb35948af49 100644 --- a/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrRouteTest.java +++ b/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrRouteTest.java @@ -16,9 +16,6 @@ */ package org.apache.camel.component.jcr; -import java.io.File; -import java.io.IOException; - import javax.jcr.Node; import javax.jcr.Repository; import javax.jcr.Session; @@ -29,7 +26,6 @@ import org.apache.camel.builder.RouteBuilder; import org.apache.camel.test.junit4.CamelTestSupport; import org.apache.jackrabbit.core.TransientRepository; -import org.apache.jackrabbit.core.fs.local.FileUtil; import org.junit.Before; import org.junit.Test; @@ -40,20 +36,10 @@ public class JcrRouteTest extends CamelTestSupport { @Override @Before public void setUp() throws Exception { - clean(); + deleteDirectory("target/repository"); super.setUp(); } - private void clean() throws IOException { - File[] files = {new File("target/repository"), new File("target/repository.xml"), - new File("derby.log")}; - for (File file : files) { - if (file.exists()) { - FileUtil.delete(file); - } - } - } - @Test public void testJcrRoute() throws Exception { Exchange exchange = createExchangeWithBody("<hello>world!</hello>");
493e8ad45f39306754484ada51bfe397928404fd
ReactiveX-RxJava
additions to tests--
p
https://github.com/ReactiveX/RxJava
diff --git a/language-adaptors/rxjava-groovy/src/test/groovy/rx/lang/groovy/ObservableTests.groovy b/language-adaptors/rxjava-groovy/src/test/groovy/rx/lang/groovy/ObservableTests.groovy index 45b4436724..ceeb25dcf6 100644 --- a/language-adaptors/rxjava-groovy/src/test/groovy/rx/lang/groovy/ObservableTests.groovy +++ b/language-adaptors/rxjava-groovy/src/test/groovy/rx/lang/groovy/ObservableTests.groovy @@ -298,6 +298,31 @@ def class ObservableTests { verify(a, times(1)).received(true); } + + @Test + public void testZip() { + Observable o1 = Observable.from(1, 2, 3); + Observable o2 = Observable.from(4, 5, 6); + Observable o3 = Observable.from(7, 8, 9); + + List values = Observable.zip(o1, o2, o3, {a, b, c -> [a, b, c] }).toList().toBlockingObservable().single(); + assertEquals([1, 4, 7], values.get(0)); + assertEquals([2, 5, 8], values.get(1)); + assertEquals([3, 6, 9], values.get(2)); + } + + @Test + public void testZipWithIterable() { + Observable o1 = Observable.from(1, 2, 3); + Observable o2 = Observable.from(4, 5, 6); + Observable o3 = Observable.from(7, 8, 9); + + List values = Observable.zip([o1, o2, o3], {a, b, c -> [a, b, c] }).toList().toBlockingObservable().single(); + assertEquals([1, 4, 7], values.get(0)); + assertEquals([2, 5, 8], values.get(1)); + assertEquals([3, 6, 9], values.get(2)); + } + @Test public void testGroupBy() { int count=0; diff --git a/rxjava-core/src/test/java/rx/ReduceTests.java b/rxjava-core/src/test/java/rx/ReduceTests.java index 822001e615..1f7ea5550c 100644 --- a/rxjava-core/src/test/java/rx/ReduceTests.java +++ b/rxjava-core/src/test/java/rx/ReduceTests.java @@ -27,6 +27,22 @@ public Integer call(Integer t1, Integer t2) { @Test public void reduceWithObjects() { + Observable<Movie> horrorMovies = Observable.<Movie> from(new HorrorMovie()); + + Func2<Movie, Movie, Movie> chooseSecondMovie = + new Func2<Movie, Movie, Movie>() { + public Movie call(Movie t1, Movie t2) { + return t2; + } + }; + + Observable<Movie> reduceResult = Observable.create(OperationScan.scan(horrorMovies, chooseSecondMovie)).takeLast(1); + + Observable<Movie> reduceResult2 = horrorMovies.reduce(chooseSecondMovie); + } + + @Test + public void reduceWithCovariantObjects() { Observable<HorrorMovie> horrorMovies = Observable.from(new HorrorMovie()); Func2<Movie, Movie, Movie> chooseSecondMovie = @@ -61,8 +77,10 @@ public Movie call(Movie t1, Movie t2) { } }; + Observable<Movie> reduceResult = Observable.create(OperationScan.scan(obs, chooseSecondMovie)).takeLast(1); + //TODO this isn't compiling - // Observable<Movie> reduceResult = obs.reduce((Func2<? super Movie, ? super Movie, ? extends Movie>) chooseSecondMovie); + // Observable<Movie> reduceResult2 = obs.reduce(chooseSecondMovie); // do something with reduceResult... }
8b9ffeb5f2883e3d5187ad2f4a3a35540ff70326
drools
JBRULES-527: fixing compilation problems--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@6992 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
c
https://github.com/kiegroup/drools
diff --git a/drools-core/src/main/java/org/drools/reteoo/BetaMemory.java b/drools-core/src/main/java/org/drools/reteoo/BetaMemory.java index 3242224b102..90471a806f0 100644 --- a/drools-core/src/main/java/org/drools/reteoo/BetaMemory.java +++ b/drools-core/src/main/java/org/drools/reteoo/BetaMemory.java @@ -1,10 +1,14 @@ package org.drools.reteoo; +import java.util.HashMap; +import java.util.Map; + import org.drools.util.TupleHashTable; public class BetaMemory { private TupleHashTable tupleMemory; private ObjectHashTable objectMemory; + private Map createdHandles; public BetaMemory(final TupleHashTable tupleMemory, final ObjectHashTable objectMemory) { @@ -19,4 +23,11 @@ public ObjectHashTable getObjectMemory() { public TupleHashTable getTupleMemory() { return this.tupleMemory; } + + public Map getCreatedHandles() { + if(createdHandles == null) { + createdHandles = new HashMap(); + } + return createdHandles; + } } diff --git a/drools-core/src/main/java/org/drools/reteoo/BetaNode.java b/drools-core/src/main/java/org/drools/reteoo/BetaNode.java index 20525acf049..fd03f8f6dc0 100644 --- a/drools-core/src/main/java/org/drools/reteoo/BetaNode.java +++ b/drools-core/src/main/java/org/drools/reteoo/BetaNode.java @@ -227,42 +227,6 @@ public boolean equals(final Object object) { */ public Object createMemory(final RuleBaseConfiguration config) { return this.constraints.createBetaMemory(); - // // iterate over all the constraints untill we find one that is indexeable. When we find it we remove it from the list and create the - // // BetaMemory for it. If we don't find one, we create a normal beta memory. We don't need the constraint as we can assume that - // // anything returned by the memory already passes that test. - // LinkedList constraints = this.constraints.getConstraints(); - // BetaMemory memory = null; - // - // if ( constraints != null ) { - // for ( LinkedListEntry entry = (LinkedListEntry) constraints.getFirst(); entry != null; entry = (LinkedListEntry) entry.getNext() ) { - // BetaNodeFieldConstraint constraint = (BetaNodeFieldConstraint) entry.getObject(); - // if ( constraint.getClass() == VariableConstraint.class ) { - // VariableConstraint variableConstraint = (VariableConstraint) constraint; - // FieldExtractor extractor = variableConstraint.getFieldExtractor(); - // Evaluator evaluator = variableConstraint.getEvaluator(); - // if ( evaluator.getOperator() == Operator.EQUAL ) { - // // make suret the indexed constraint is first - // if ( constraints.getFirst() != entry ) { - // constraints.remove( entry ); - // constraints.insertAfter( null, - // entry ); - // } - // memory = new BetaMemory( new TupleHashTable(), - // new FieldIndexHashTable( extractor, - // variableConstraint.getRequiredDeclarations()[0] ) ); - // break; - // - // } - // } - // } - // } - // - // if ( memory == null ) { - // memory = new BetaMemory( new TupleHashTable(), - // new FactHashTable() ); - // } - // - // return memory; } /** diff --git a/drools-core/src/main/java/org/drools/reteoo/CollectNode.java b/drools-core/src/main/java/org/drools/reteoo/CollectNode.java index bfc0ff76abb..088e43a74fa 100755 --- a/drools-core/src/main/java/org/drools/reteoo/CollectNode.java +++ b/drools-core/src/main/java/org/drools/reteoo/CollectNode.java @@ -125,8 +125,9 @@ public void assertTuple(final ReteTuple leftTuple, final Collection result = this.collect.instantiateResultObject(); final Iterator it = memory.getObjectMemory().iterator( leftTuple ); - this.constraints.updateFromTuple( workingMemory, leftTuple ); - + this.constraints.updateFromTuple( workingMemory, + leftTuple ); + for ( FactEntry entry = (FactEntry) it.next(); entry != null; entry = (FactEntry) it.next() ) { final InternalFactHandle handle = entry.getFactHandle(); if ( this.constraints.isAllowedCachedLeft( handle.getObject() ) ) { @@ -144,13 +145,15 @@ public void assertTuple(final ReteTuple leftTuple, } } if ( isAllowed ) { - final InternalFactHandle handle = workingMemory.getFactHandleFactory().newFactHandle( result ); - if ( this.resultsBinder.isAllowedCachedLeft( result ) ) { - this.sink.propagateAssertTuple( leftTuple, - handle, - context, - workingMemory ); + final InternalFactHandle handle = workingMemory.getFactHandleFactory().newFactHandle( result ); + memory.getCreatedHandles().put( leftTuple, + handle ); + + sink.propagateAssertTuple( leftTuple, + handle, + context, + workingMemory ); } } } @@ -161,31 +164,22 @@ public void assertTuple(final ReteTuple leftTuple, public void retractTuple(final ReteTuple leftTuple, final PropagationContext context, final InternalWorkingMemory workingMemory) { - // FIXME -// final BetaMemory memory = (BetaMemory) workingMemory.getNodeMemory( this ); -// memory.getTupleMemory().remove( leftTuple ); -// -// final Map matches = leftTuple.getTupleMatches(); -// -// if ( !matches.isEmpty() ) { -// for ( final Iterator it = matches.values().iterator(); it.hasNext(); ) { -// final CompositeTupleMatch compositeTupleMatch = (CompositeTupleMatch) it.next(); -// compositeTupleMatch.getObjectMatches().remove( compositeTupleMatch ); -// it.remove(); -// } -// } -// -// // if tuple was propagated -// if ( (leftTuple.getChildEntries() != null) && (leftTuple.getChildEntries().size() > 0) ) { -// // Need to store the collection result object for later disposal -// final InternalFactHandle lastHandle = ((ReteTuple) ((LinkedListEntry) leftTuple.getChildEntries().getFirst()).getObject()).getLastHandle(); -// -// leftTuple.retractChildEntries( context, -// workingMemory ); -// -// // Destroying the acumulate result object -// workingMemory.getFactHandleFactory().destroyFactHandle( lastHandle ); -// } + + final BetaMemory memory = (BetaMemory) workingMemory.getNodeMemory( this ); + memory.getTupleMemory().remove( leftTuple ); + final InternalFactHandle handle = (InternalFactHandle) memory.getCreatedHandles().remove( leftTuple ); + + // if tuple was propagated + if ( handle != null ) { + + this.sink.propagateRetractTuple( leftTuple, + handle, + context, + workingMemory ); + + // Destroying the acumulate result object + workingMemory.getFactHandleFactory().destroyFactHandle( handle ); + } } /** @@ -205,11 +199,16 @@ public void assertObject(final InternalFactHandle handle, memory.getObjectMemory().add( handle ); final Iterator it = memory.getTupleMemory().iterator(); - this.constraints.updateFromFactHandle( workingMemory, handle ); + this.constraints.updateFromFactHandle( workingMemory, + handle ); for ( ReteTuple tuple = (ReteTuple) it.next(); tuple != null; tuple = (ReteTuple) it.next() ) { if ( this.constraints.isAllowedCachedRight( tuple ) ) { - this.retractTuple( tuple, context, workingMemory ); - this.assertTuple( tuple, context, workingMemory ); + this.retractTuple( tuple, + context, + workingMemory ); + this.assertTuple( tuple, + context, + workingMemory ); } } } @@ -223,9 +222,25 @@ public void assertObject(final InternalFactHandle handle, public void retractObject(final InternalFactHandle handle, final PropagationContext context, final InternalWorkingMemory workingMemory) { + final BetaMemory memory = (BetaMemory) workingMemory.getNodeMemory( this ); + if ( !memory.getObjectMemory().remove( handle ) ) { + return; + } - // FIXME + final Iterator it = memory.getTupleMemory().iterator(); + this.constraints.updateFromFactHandle( workingMemory, + handle ); + for ( ReteTuple tuple = (ReteTuple) it.next(); tuple != null; tuple = (ReteTuple) it.next() ) { + if ( this.constraints.isAllowedCachedRight( tuple ) ) { + this.retractTuple( tuple, + context, + workingMemory ); + this.assertTuple( tuple, + context, + workingMemory ); + } + } } public String toString() { @@ -237,19 +252,12 @@ public void updateSink(TupleSink sink, InternalWorkingMemory workingMemory) { final BetaMemory memory = (BetaMemory) workingMemory.getNodeMemory( this ); - final Iterator tupleIter = memory.getTupleMemory().iterator(); - for ( ReteTuple tuple = (ReteTuple) tupleIter.next(); tuple != null; tuple = (ReteTuple) tupleIter.next() ) { - final Iterator objectIter = memory.getObjectMemory().iterator( tuple ); - this.constraints.updateFromTuple( workingMemory, tuple ); - for ( FactEntry entry = (FactEntry) objectIter.next(); entry != null; entry = (FactEntry) objectIter.next() ) { - final InternalFactHandle handle = entry.getFactHandle(); - if ( this.constraints.isAllowedCachedLeft( handle.getObject() ) ) { - sink.assertTuple( new ReteTuple( tuple, - handle ), - context, - workingMemory ); - } - } + for ( java.util.Iterator it = memory.getCreatedHandles().entrySet().iterator(); it.hasNext(); ) { + Map.Entry entry = (Map.Entry) it.next(); + sink.assertTuple( new ReteTuple( (ReteTuple)entry.getKey(), + (InternalFactHandle) entry.getValue()), + context, + workingMemory ); } }
d36f376a8e13c69c3bd78b4d43f554b44b692ed1
elasticsearch
fix cluster state mapping informaton, return it- with the mapping name as key and source field--
c
https://github.com/elastic/elasticsearch
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/rest/action/admin/cluster/state/RestClusterStateAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/rest/action/admin/cluster/state/RestClusterStateAction.java index dc9192c3566d4..75aa769ebcb68 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/rest/action/admin/cluster/state/RestClusterStateAction.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/rest/action/admin/cluster/state/RestClusterStateAction.java @@ -72,7 +72,7 @@ public class RestClusterStateAction extends BaseRestHandler { builder.startObject("mappings"); for (Map.Entry<String, String> entry : indexMetaData.mappings().entrySet()) { - builder.startObject("mapping").field("name", entry.getKey()).field("value", entry.getValue()).endObject(); + builder.startObject(entry.getKey()).field("source", entry.getValue()).endObject(); } builder.endObject();
14c657d448b6dd743806c4df2a321d58f4e0618e
kotlin
Extract Function: Consider reference "broken" if- corresponding diagnostics are changed after code fragment extraction - -KT-8633 Fixed--
c
https://github.com/JetBrains/kotlin
diff --git a/idea/src/org/jetbrains/kotlin/idea/refactoring/introduce/extractionEngine/ExtractionData.kt b/idea/src/org/jetbrains/kotlin/idea/refactoring/introduce/extractionEngine/ExtractionData.kt index 867f02f713b50..f3f1edbef9762 100644 --- a/idea/src/org/jetbrains/kotlin/idea/refactoring/introduce/extractionEngine/ExtractionData.kt +++ b/idea/src/org/jetbrains/kotlin/idea/refactoring/introduce/extractionEngine/ExtractionData.kt @@ -28,6 +28,7 @@ import org.jetbrains.kotlin.idea.codeInsight.JetFileReferencesResolver import org.jetbrains.kotlin.idea.core.compareDescriptors import org.jetbrains.kotlin.idea.core.refactoring.getContextForContainingDeclarationBody import org.jetbrains.kotlin.idea.util.psi.patternMatching.JetPsiRange +import org.jetbrains.kotlin.idea.util.psi.patternMatching.JetPsiUnifier import org.jetbrains.kotlin.psi.* import org.jetbrains.kotlin.psi.psiUtil.getQualifiedExpressionForSelector import org.jetbrains.kotlin.psi.psiUtil.getStrictParentOfType @@ -213,7 +214,9 @@ data class ExtractionData( if (parent is JetUserType && (parent.getParent() as? JetUserType)?.getQualifier() == parent) continue val descriptor = context[BindingContext.REFERENCE_TARGET, ref] - val isBadRef = !compareDescriptors(project, originalResolveResult.descriptor, descriptor) || smartCast != null + val isBadRef = !(compareDescriptors(project, originalResolveResult.descriptor, descriptor) + && originalContext.diagnostics.forElement(originalResolveResult.originalRefExpr) == context.diagnostics.forElement(ref)) + || smartCast != null if (isBadRef && !originalResolveResult.declaration.isInsideOf(originalElements)) { val originalResolvedCall = originalResolveResult.resolvedCall as? VariableAsFunctionResolvedCall val originalFunctionCall = originalResolvedCall?.functionCall diff --git a/idea/testData/refactoring/extractFunction/parameters/extractThis/missingReceiver.kt b/idea/testData/refactoring/extractFunction/parameters/extractThis/missingReceiver.kt new file mode 100644 index 0000000000000..941a0f255843f --- /dev/null +++ b/idea/testData/refactoring/extractFunction/parameters/extractThis/missingReceiver.kt @@ -0,0 +1,14 @@ +// PARAM_TYPES: kotlin.String +// PARAM_DESCRIPTOR: internal final fun kotlin.String.foo(): kotlin.Unit defined in X + +fun print(a: Any) { + +} + +class X { + fun String.foo() { + <selection>print(extension)</selection> + } + + val String.extension: Int get() = length() +} \ No newline at end of file diff --git a/idea/testData/refactoring/extractFunction/parameters/extractThis/missingReceiver.kt.after b/idea/testData/refactoring/extractFunction/parameters/extractThis/missingReceiver.kt.after new file mode 100644 index 0000000000000..ef79504739a12 --- /dev/null +++ b/idea/testData/refactoring/extractFunction/parameters/extractThis/missingReceiver.kt.after @@ -0,0 +1,18 @@ +// PARAM_TYPES: kotlin.String +// PARAM_DESCRIPTOR: internal final fun kotlin.String.foo(): kotlin.Unit defined in X + +fun print(a: Any) { + +} + +class X { + fun String.foo() { + __dummyTestFun__() + } + + private fun String.__dummyTestFun__() { + print(extension) + } + + val String.extension: Int get() = length() +} \ No newline at end of file diff --git a/idea/tests/org/jetbrains/kotlin/idea/refactoring/introduce/JetExtractionTestGenerated.java b/idea/tests/org/jetbrains/kotlin/idea/refactoring/introduce/JetExtractionTestGenerated.java index a6ac3eca9ac2a..1d659de3d14c1 100644 --- a/idea/tests/org/jetbrains/kotlin/idea/refactoring/introduce/JetExtractionTestGenerated.java +++ b/idea/tests/org/jetbrains/kotlin/idea/refactoring/introduce/JetExtractionTestGenerated.java @@ -1979,6 +1979,12 @@ public void testImplicitThisWithSmartCast() throws Exception { doExtractFunctionTest(fileName); } + @TestMetadata("missingReceiver.kt") + public void testMissingReceiver() throws Exception { + String fileName = JetTestUtils.navigationMetadata("idea/testData/refactoring/extractFunction/parameters/extractThis/missingReceiver.kt"); + doExtractFunctionTest(fileName); + } + @TestMetadata("paramAsExplicitInvoke.kt") public void testParamAsExplicitInvoke() throws Exception { String fileName = JetTestUtils.navigationMetadata("idea/testData/refactoring/extractFunction/parameters/extractThis/paramAsExplicitInvoke.kt");
82bd585bb78ab580d1bd16e0e9ae7402c5348579
drools
[DROOLS-114] Support Defeasible rules--
a
https://github.com/kiegroup/drools
diff --git a/drools-beliefs/src/main/java/org/drools/beliefs/bayes/BayesBeliefSet.java b/drools-beliefs/src/main/java/org/drools/beliefs/bayes/BayesBeliefSet.java index 9247cf6f69a..a5f6dcd8087 100644 --- a/drools-beliefs/src/main/java/org/drools/beliefs/bayes/BayesBeliefSet.java +++ b/drools-beliefs/src/main/java/org/drools/beliefs/bayes/BayesBeliefSet.java @@ -98,7 +98,12 @@ public boolean isNegated() { @Override public boolean isUndecided() { - return conflictCounter > 0 ; + return isConflicting(); + } + + @Override + public boolean isConflicting() { + return conflictCounter > 0; } @Override diff --git a/drools-compiler/src/test/java/org/drools/compiler/beliefsystem/defeasible/DefeasibilityTest.java b/drools-compiler/src/test/java/org/drools/compiler/beliefsystem/defeasible/DefeasibilityTest.java index 246cc6c5bad..374f1c9666a 100644 --- a/drools-compiler/src/test/java/org/drools/compiler/beliefsystem/defeasible/DefeasibilityTest.java +++ b/drools-compiler/src/test/java/org/drools/compiler/beliefsystem/defeasible/DefeasibilityTest.java @@ -911,4 +911,31 @@ public void testManyDefeasibles() { } + + @Test + public void testRetractNegativeDefeaters() { + + String drl = "declare Foo end " + + + "rule Def " + + " @Defeater " + + "when " + + " String() " + + "then " + + " insertLogical( new Foo(), 'neg' ); " + + "end "; + StatefulKnowledgeSession session = getSessionFromString( drl ); + + FactHandle h = session.insert( "foo" ); + + session.fireAllRules(); + assertEquals( 1, session.getObjects().size() ); + + session.delete( h ); + + session.fireAllRules(); + assertEquals( 0, session.getObjects().size() ); + } + + } diff --git a/drools-core/src/main/java/org/drools/core/beliefsystem/BeliefSet.java b/drools-core/src/main/java/org/drools/core/beliefsystem/BeliefSet.java index 40624e678cc..f782d0b551f 100644 --- a/drools-core/src/main/java/org/drools/core/beliefsystem/BeliefSet.java +++ b/drools-core/src/main/java/org/drools/core/beliefsystem/BeliefSet.java @@ -41,6 +41,8 @@ public interface BeliefSet { boolean isUndecided(); + boolean isConflicting(); + boolean isPositive(); } diff --git a/drools-core/src/main/java/org/drools/core/beliefsystem/jtms/JTMSBeliefSetImpl.java b/drools-core/src/main/java/org/drools/core/beliefsystem/jtms/JTMSBeliefSetImpl.java index 687843e12a9..cc332141852 100644 --- a/drools-core/src/main/java/org/drools/core/beliefsystem/jtms/JTMSBeliefSetImpl.java +++ b/drools-core/src/main/java/org/drools/core/beliefsystem/jtms/JTMSBeliefSetImpl.java @@ -90,6 +90,11 @@ public boolean isNegated() { @Override public boolean isUndecided() { + return isConflicting(); + } + + @Override + public boolean isConflicting() { return posCounter > 0 && negCounter > 0; } diff --git a/drools-core/src/main/java/org/drools/core/beliefsystem/jtms/JTMSBeliefSystem.java b/drools-core/src/main/java/org/drools/core/beliefsystem/jtms/JTMSBeliefSystem.java index 8642f233ccb..477336914f9 100644 --- a/drools-core/src/main/java/org/drools/core/beliefsystem/jtms/JTMSBeliefSystem.java +++ b/drools-core/src/main/java/org/drools/core/beliefsystem/jtms/JTMSBeliefSystem.java @@ -147,7 +147,7 @@ public void delete(LogicalDependency<T> node, BeliefSet beliefSet, PropagationContext context) { JTMSBeliefSet jtmsBeliefSet = (JTMSBeliefSet) beliefSet; - boolean wasConflicting = jtmsBeliefSet.isUndecided(); + boolean wasUndecided = jtmsBeliefSet.isUndecided(); boolean wasNegated = jtmsBeliefSet.isNegated(); // If the prime object is removed, we need to update the FactHandle, and tell the callback to update @@ -160,7 +160,7 @@ public void delete(LogicalDependency<T> node, beliefSet.remove( (JTMSMode) node.getMode() ); if ( beliefSet.isEmpty() ) { - if ( wasNegated ) { + if ( wasNegated && ! wasUndecided ) { defEP.getObjectStore().addHandle( beliefSet.getFactHandle(), beliefSet.getFactHandle().getObject() ); // was negated, so add back in, so main retract works InternalFactHandle fh = jtmsBeliefSet.getNegativeFactHandle(); ((NamedEntryPoint) fh.getEntryPoint()).delete( fh, context.getRuleOrigin(), node.getJustifier() ); @@ -172,14 +172,14 @@ public void delete(LogicalDependency<T> node, ((NamedEntryPoint) fh.getEntryPoint()).delete( fh, context.getRuleOrigin(), node.getJustifier() ); } - } else if ( wasConflicting && !jtmsBeliefSet.isUndecided() ) { + } else if ( wasUndecided && !jtmsBeliefSet.isUndecided() ) { insertBelief( node, defEP.getObjectTypeConfigurationRegistry().getObjectTypeConf( defEP.getEntryPoint(), node.getObject() ), jtmsBeliefSet, context, false, wasNegated, - wasConflicting ); + wasUndecided ); } else if ( primeChanged ) { // we know there must be at least one more of the same type, as they are still in conflict diff --git a/drools-core/src/main/java/org/drools/core/beliefsystem/simple/SimpleBeliefSet.java b/drools-core/src/main/java/org/drools/core/beliefsystem/simple/SimpleBeliefSet.java index ea40aad79f3..dfd1ab5f48f 100644 --- a/drools-core/src/main/java/org/drools/core/beliefsystem/simple/SimpleBeliefSet.java +++ b/drools-core/src/main/java/org/drools/core/beliefsystem/simple/SimpleBeliefSet.java @@ -74,6 +74,11 @@ public boolean isUndecided() { return false; } + @Override + public boolean isConflicting() { + return false; + } + @Override public boolean isPositive() { return ! isEmpty();
7d8aff2ef670306fc01bd2160b1e755196558da1
orientdb
Fix by Andrey on marshalling/unmarshalling of- embedded documents--
c
https://github.com/orientechnologies/orientdb
diff --git a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerCSVAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerCSVAbstract.java index 4ff35d1ed82..f188da62984 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerCSVAbstract.java +++ b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerCSVAbstract.java @@ -51,6 +51,7 @@ import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.serialization.serializer.OStringSerializerHelper; import com.orientechnologies.orient.core.serialization.serializer.object.OObjectSerializerHelper; +import com.orientechnologies.orient.core.serialization.serializer.string.OStringSerializerAnyStreamable; import com.orientechnologies.orient.core.tx.OTransactionRecordEntry; @SuppressWarnings("unchecked") @@ -134,7 +135,12 @@ public Object fieldFromStream(final ORecordInternal<?> iSourceRecord, final OTyp if (iValue.length() > 2) { // REMOVE BEGIN & END EMBEDDED CHARACTERS final String value = iValue.substring(1, iValue.length() - 1); - return fieldTypeFromStream((ODocument) iSourceRecord, iType, value); + + // RECORD + final Object result = OStringSerializerAnyStreamable.INSTANCE.fromStream(iSourceRecord.getDatabase(), value); + if (result instanceof ODocument) + ((ODocument) result).addOwner(iSourceRecord); + return result; } else return null; @@ -174,7 +180,7 @@ public Map<String, Object> embeddedMapFromStream(final ODocument iSourceDocument String mapValue = entry.get(1); final OType linkedType; - + if (iLinkedType == null) if (mapValue.length() > 0) { linkedType = getType(mapValue); diff --git a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerSchemaAware2CSV.java b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerSchemaAware2CSV.java index 4c9c877fdbc..5db718dd9c3 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerSchemaAware2CSV.java +++ b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerSchemaAware2CSV.java @@ -407,7 +407,7 @@ else if (fieldValue.equals("true") || fieldValue.equals("false")) } } - if (type == OType.EMBEDDEDLIST || type == OType.EMBEDDEDSET || type == OType.EMBEDDEDMAP) + if (type == OType.EMBEDDEDLIST || type == OType.EMBEDDEDSET || type == OType.EMBEDDEDMAP || type == OType.EMBEDDED) // SAVE THE TYPE AS EMBEDDED record.field(fieldName, fieldFromStream(iRecord, type, linkedClass, linkedType, fieldName, fieldValue), type); else
ff7d4eebd8ebbf011656313dca8c6ee1a598c2aa
spring-framework
Polish AbstractHandlerMethodMapping--Issue: SPR-11541-
p
https://github.com/spring-projects/spring-framework
diff --git a/spring-webmvc/src/main/java/org/springframework/web/servlet/handler/AbstractHandlerMapping.java b/spring-webmvc/src/main/java/org/springframework/web/servlet/handler/AbstractHandlerMapping.java index c463ee1fcb85..367dcb765bf7 100644 --- a/spring-webmvc/src/main/java/org/springframework/web/servlet/handler/AbstractHandlerMapping.java +++ b/spring-webmvc/src/main/java/org/springframework/web/servlet/handler/AbstractHandlerMapping.java @@ -390,10 +390,15 @@ protected HandlerExecutionChain getHandlerExecutionChain(Object handler, HttpSer /** * Retrieve the CORS configuration for the given handler. + * @param handler the handler to check (never {@code null}). + * @param request the current request. + * @return the CORS configuration for the handler or {@code null}. */ protected CorsConfiguration getCorsConfiguration(Object handler, HttpServletRequest request) { - handler = (handler instanceof HandlerExecutionChain) ? ((HandlerExecutionChain) handler).getHandler() : handler; - if (handler != null && handler instanceof CorsConfigurationSource) { + if (handler instanceof HandlerExecutionChain) { + handler = ((HandlerExecutionChain) handler).getHandler(); + } + if (handler instanceof CorsConfigurationSource) { return ((CorsConfigurationSource) handler).getCorsConfiguration(request); } return null; diff --git a/spring-webmvc/src/main/java/org/springframework/web/servlet/handler/AbstractHandlerMethodMapping.java b/spring-webmvc/src/main/java/org/springframework/web/servlet/handler/AbstractHandlerMethodMapping.java index 9a36308ce00d..4fc653eb8b7a 100644 --- a/spring-webmvc/src/main/java/org/springframework/web/servlet/handler/AbstractHandlerMethodMapping.java +++ b/spring-webmvc/src/main/java/org/springframework/web/servlet/handler/AbstractHandlerMethodMapping.java @@ -83,7 +83,7 @@ public abstract class AbstractHandlerMethodMapping<T> extends AbstractHandlerMap private final MultiValueMap<String, HandlerMethod> nameMap = new LinkedMultiValueMap<String, HandlerMethod>(); - private final Map<Method, CorsConfiguration> corsConfigurations = new LinkedHashMap<Method, CorsConfiguration>(); + private final Map<Method, CorsConfiguration> corsMap = new LinkedHashMap<Method, CorsConfiguration>(); /** @@ -113,20 +113,6 @@ public Map<T, HandlerMethod> getHandlerMethods() { return Collections.unmodifiableMap(this.handlerMethods); } - protected Map<Method, CorsConfiguration> getCorsConfigurations() { - return corsConfigurations; - } - - @Override - protected CorsConfiguration getCorsConfiguration(Object handler, HttpServletRequest request) { - CorsConfiguration config = super.getCorsConfiguration(handler, request); - if (config == null && handler instanceof HandlerMethod) { - HandlerMethod handlerMethod = (HandlerMethod)handler; - config = this.getCorsConfigurations().get(handlerMethod.getMethod()); - } - return config; - } - /** * Return the handler methods mapped to the mapping with the given name. * @param mappingName the mapping name @@ -159,10 +145,9 @@ protected void initHandlerMethods() { BeanFactoryUtils.beanNamesForTypeIncludingAncestors(getApplicationContext(), Object.class) : getApplicationContext().getBeanNamesForType(Object.class)); - for (String beanName : beanNames) { - if (!beanName.startsWith(SCOPED_TARGET_NAME_PREFIX) && - isHandler(getApplicationContext().getType(beanName))){ - detectHandlerMethods(beanName); + for (String name : beanNames) { + if (!name.startsWith(SCOPED_TARGET_NAME_PREFIX) && isHandler(getApplicationContext().getType(name))) { + detectHandlerMethods(name); } } registerMultiMatchCorsConfiguration(); @@ -175,7 +160,7 @@ private void registerMultiMatchCorsConfiguration() { config.addAllowedMethod("*"); config.addAllowedHeader("*"); config.setAllowCredentials(true); - this.corsConfigurations.put(PREFLIGHT_MULTI_MATCH_HANDLER_METHOD.getMethod(), config); + this.corsMap.put(PREFLIGHT_MULTI_MATCH_HANDLER_METHOD.getMethod(), config); } /** @@ -262,7 +247,7 @@ protected void registerHandlerMethod(Object handler, Method method, T mapping) { CorsConfiguration config = initCorsConfiguration(handler, method, mapping); if (config != null) { - this.corsConfigurations.put(method, config); + this.corsMap.put(method, config); } } @@ -442,6 +427,14 @@ protected HandlerMethod handleNoMatch(Set<T> mappings, String lookupPath, HttpSe return null; } + @Override + protected CorsConfiguration getCorsConfiguration(Object handler, HttpServletRequest request) { + if (handler instanceof HandlerMethod) { + this.corsMap.get(((HandlerMethod) handler).getMethod()); + } + return null; + } + /** * A thin wrapper around a matched HandlerMethod and its mapping, for the purpose of