commit_id
stringlengths 40
40
| project
stringclasses 11
values | commit_message
stringlengths 3
3.04k
| type
stringclasses 3
values | url
stringclasses 11
values | git_diff
stringlengths 555
691k
|
|---|---|---|---|---|---|
cec891510bb4cdb379d11691eaf004289a50d044
|
ReactiveX-RxJava
|
Fix the initialization of Completable.complete()- (-4146)--
|
c
|
https://github.com/ReactiveX/RxJava
|
diff --git a/src/main/java/rx/Completable.java b/src/main/java/rx/Completable.java
index c32a0a2ea8..2f6f3cce50 100644
--- a/src/main/java/rx/Completable.java
+++ b/src/main/java/rx/Completable.java
@@ -93,7 +93,7 @@ public void call(CompletableSubscriber s) {
s.onSubscribe(Subscriptions.unsubscribed());
s.onCompleted();
}
- }, true); // hook is handled in complete()
+ }, false); // hook is handled in complete()
/** Single instance of a never Completable. */
static final Completable NEVER = new Completable(new CompletableOnSubscribe() {
@@ -101,7 +101,7 @@ public void call(CompletableSubscriber s) {
public void call(CompletableSubscriber s) {
s.onSubscribe(Subscriptions.unsubscribed());
}
- }, true); // hook is handled in never()
+ }, false); // hook is handled in never()
/**
* Returns a Completable which terminates as soon as one of the source Completables
@@ -315,7 +315,7 @@ public static Completable complete() {
if (cos == COMPLETE.onSubscribe) {
return COMPLETE;
}
- return new Completable(cos, true);
+ return new Completable(cos, false);
}
/**
@@ -742,7 +742,7 @@ public static Completable never() {
if (cos == NEVER.onSubscribe) {
return NEVER;
}
- return new Completable(cos, true);
+ return new Completable(cos, false);
}
/**
|
d3fde78394aa28a344bc40f7724fc794c5682898
|
elasticsearch
|
Fix test failure.--
|
c
|
https://github.com/elastic/elasticsearch
|
diff --git a/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java b/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java
index d1ac77f254060..5f683ae4a3dcc 100644
--- a/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java
+++ b/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java
@@ -71,7 +71,9 @@ public AtomicReaderContext readerContext() {
public IndexSearcher searcher() {
if (atomicIndexSearcher == null) {
- atomicIndexSearcher = new IndexSearcher(readerContext);
+ // Use the reader directly otherwise the IndexSearcher assertion will trip because it expects a top level
+ // reader context.
+ atomicIndexSearcher = new IndexSearcher(readerContext.reader());
}
return atomicIndexSearcher;
}
|
ebe8052d559ef5fac8a93820cf5847a8de5e9e43
|
spring-framework
|
fixed detection of element type in case of nested- collections (SPR-7569)--
|
c
|
https://github.com/spring-projects/spring-framework
|
diff --git a/org.springframework.core/src/main/java/org/springframework/core/convert/TypeDescriptor.java b/org.springframework.core/src/main/java/org/springframework/core/convert/TypeDescriptor.java
index 72d8d8454ded..59335f2b7a11 100644
--- a/org.springframework.core/src/main/java/org/springframework/core/convert/TypeDescriptor.java
+++ b/org.springframework.core/src/main/java/org/springframework/core/convert/TypeDescriptor.java
@@ -76,6 +76,8 @@ public class TypeDescriptor {
private Field field;
+ private int fieldNestingLevel = 1;
+
private Object value;
private TypeDescriptor elementType;
@@ -133,6 +135,19 @@ public TypeDescriptor(Field field, Class<?> type) {
this.type = type;
}
+ /**
+ * Create a new type descriptor for a field.
+ * Use this constructor when a target conversion point originates from a field.
+ * @param field the field to wrap
+ * @param type the specific type to expose (may be an array/collection element)
+ */
+ private TypeDescriptor(Field field, int nestingLevel, Class<?> type) {
+ Assert.notNull(field, "Field must not be null");
+ this.field = field;
+ this.fieldNestingLevel = nestingLevel;
+ this.type = type;
+ }
+
/**
* Internal constructor for a NULL descriptor.
*/
@@ -397,10 +412,12 @@ public TypeDescriptor forElementType(Class<?> elementType) {
return TypeDescriptor.UNKNOWN;
}
else if (this.methodParameter != null) {
- return new TypeDescriptor(this.methodParameter, elementType);
+ MethodParameter nested = new MethodParameter(this.methodParameter);
+ nested.increaseNestingLevel();
+ return new TypeDescriptor(nested, elementType);
}
else if (this.field != null) {
- return new TypeDescriptor(this.field, elementType);
+ return new TypeDescriptor(this.field, this.fieldNestingLevel + 1, elementType);
}
else {
return TypeDescriptor.valueOf(elementType);
@@ -434,7 +451,7 @@ public int hashCode() {
}
/**
- * A textual representation of the type descriptor (eg. Map<String,Foo>) for use in messages
+ * A textual representation of the type descriptor (eg. Map<String,Foo>) for use in messages.
*/
public String asString() {
return toString();
@@ -442,28 +459,22 @@ public String asString() {
public String toString() {
if (this == TypeDescriptor.NULL) {
- return "[TypeDescriptor.NULL]";
+ return "null";
}
else {
StringBuilder builder = new StringBuilder();
- builder.append("[TypeDescriptor ");
Annotation[] anns = getAnnotations();
for (Annotation ann : anns) {
builder.append("@").append(ann.annotationType().getName()).append(' ');
}
builder.append(ClassUtils.getQualifiedName(getType()));
if (isMap()) {
- Class<?> mapKeyType = getMapKeyType();
- Class<?> valueKeyType = getMapValueType();
- builder.append("<").append(mapKeyType != null ? ClassUtils.getQualifiedName(mapKeyType) : "?");
- builder.append(", ").append(valueKeyType != null ? ClassUtils.getQualifiedName(valueKeyType) : "?");
- builder.append(">");
+ builder.append("<").append(getMapKeyTypeDescriptor());
+ builder.append(", ").append(getMapValueTypeDescriptor()).append(">");
}
else if (isCollection()) {
- Class<?> elementType = getElementType();
- builder.append("<").append(elementType != null ? ClassUtils.getQualifiedName(elementType) : "?").append(">");
+ builder.append("<").append(getElementTypeDescriptor()).append(">");
}
- builder.append("]");
return builder.toString();
}
}
@@ -486,7 +497,7 @@ else if (isCollection()) {
@SuppressWarnings("unchecked")
private Class<?> resolveCollectionElementType() {
if (this.field != null) {
- return GenericCollectionTypeResolver.getCollectionFieldType(this.field);
+ return GenericCollectionTypeResolver.getCollectionFieldType(this.field, this.fieldNestingLevel);
}
else if (this.methodParameter != null) {
return GenericCollectionTypeResolver.getCollectionParameterType(this.methodParameter);
@@ -497,7 +508,10 @@ else if (this.value instanceof Collection) {
return elementType;
}
}
- return (this.type != null ? GenericCollectionTypeResolver.getCollectionType((Class<? extends Collection>) this.type) : null);
+ else if (this.type != null) {
+ return GenericCollectionTypeResolver.getCollectionType((Class<? extends Collection>) this.type);
+ }
+ return null;
}
@SuppressWarnings("unchecked")
@@ -514,7 +528,10 @@ else if (this.value instanceof Map<?, ?>) {
return keyType;
}
}
- return (this.type != null && isMap() ? GenericCollectionTypeResolver.getMapKeyType((Class<? extends Map>) this.type) : null);
+ else if (this.type != null && isMap()) {
+ return GenericCollectionTypeResolver.getMapKeyType((Class<? extends Map>) this.type);
+ }
+ return null;
}
@SuppressWarnings("unchecked")
@@ -531,7 +548,10 @@ else if (this.value instanceof Map<?, ?>) {
return valueType;
}
}
- return (isMap() && this.type != null ? GenericCollectionTypeResolver.getMapValueType((Class<? extends Map>) this.type) : null);
+ else if (this.type != null && isMap()) {
+ return GenericCollectionTypeResolver.getMapValueType((Class<? extends Map>) this.type);
+ }
+ return null;
}
private Annotation[] resolveAnnotations() {
diff --git a/org.springframework.core/src/test/java/org/springframework/core/convert/TypeDescriptorTests.java b/org.springframework.core/src/test/java/org/springframework/core/convert/TypeDescriptorTests.java
index 4d00866c0840..64f04e66df9c 100644
--- a/org.springframework.core/src/test/java/org/springframework/core/convert/TypeDescriptorTests.java
+++ b/org.springframework.core/src/test/java/org/springframework/core/convert/TypeDescriptorTests.java
@@ -16,16 +16,16 @@
package org.springframework.core.convert;
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.assertTrue;
-import static org.junit.Assert.assertFalse;
-
import java.util.ArrayList;
+import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertTrue;
+import static org.junit.Assert.assertFalse;
import org.junit.Test;
/**
@@ -33,44 +33,75 @@
*/
public class TypeDescriptorTests {
- List<String> listOfString;
- int[] intArray;
- List<String>[] arrayOfListOfString;
+ public List<String> listOfString;
+
+ public List<List<String>> listOfListOfString = new ArrayList<List<String>>();
+
+ public List<List> listOfListOfUnknown = new ArrayList<List>();
+
+ public int[] intArray;
+
+ public List<String>[] arrayOfListOfString;
+
+ public List<Integer> listField = new ArrayList<Integer>();
+
+ public Map<String, Integer> mapField = new HashMap<String, Integer>();
+
@Test
- public void listDescriptors() throws Exception {
+ public void listDescriptor() throws Exception {
TypeDescriptor typeDescriptor = new TypeDescriptor(TypeDescriptorTests.class.getDeclaredField("listOfString"));
assertFalse(typeDescriptor.isArray());
- assertEquals(List.class,typeDescriptor.getType());
- assertEquals(String.class,typeDescriptor.getElementType());
+ assertEquals(List.class, typeDescriptor.getType());
+ assertEquals(String.class, typeDescriptor.getElementType());
// TODO caught shorten these names but it is OK that they are fully qualified for now
- assertEquals("[TypeDescriptor java.util.List<java.lang.String>]",typeDescriptor.asString());
+ assertEquals("java.util.List<java.lang.String>", typeDescriptor.asString());
+ }
+
+ @Test
+ public void listOfListOfStringDescriptor() throws Exception {
+ TypeDescriptor typeDescriptor = new TypeDescriptor(TypeDescriptorTests.class.getDeclaredField("listOfListOfString"));
+ assertFalse(typeDescriptor.isArray());
+ assertEquals(List.class, typeDescriptor.getType());
+ assertEquals(List.class, typeDescriptor.getElementType());
+ assertEquals(String.class, typeDescriptor.getElementTypeDescriptor().getElementType());
+ assertEquals("java.util.List<java.util.List<java.lang.String>>", typeDescriptor.asString());
}
-
+
@Test
- public void arrayTypeDescriptors() throws Exception {
+ public void listOfListOfUnknownDescriptor() throws Exception {
+ TypeDescriptor typeDescriptor = new TypeDescriptor(TypeDescriptorTests.class.getDeclaredField("listOfListOfUnknown"));
+ assertFalse(typeDescriptor.isArray());
+ assertEquals(List.class, typeDescriptor.getType());
+ assertEquals(List.class, typeDescriptor.getElementType());
+ assertEquals(Object.class, typeDescriptor.getElementTypeDescriptor().getElementType());
+ assertEquals("java.util.List<java.util.List<java.lang.Object>>", typeDescriptor.asString());
+ }
+
+ @Test
+ public void arrayTypeDescriptor() throws Exception {
TypeDescriptor typeDescriptor = new TypeDescriptor(TypeDescriptorTests.class.getDeclaredField("intArray"));
assertTrue(typeDescriptor.isArray());
assertEquals(Integer.TYPE,typeDescriptor.getElementType());
- assertEquals("[TypeDescriptor int[]]",typeDescriptor.asString());
+ assertEquals("int[]",typeDescriptor.asString());
}
@Test
- public void buildingArrayTypeDescriptors() throws Exception {
+ public void buildingArrayTypeDescriptor() throws Exception {
TypeDescriptor typeDescriptor = TypeDescriptor.valueOf(int[].class);
assertTrue(typeDescriptor.isArray());
- assertEquals(Integer.TYPE,typeDescriptor.getElementType());
+ assertEquals(Integer.TYPE ,typeDescriptor.getElementType());
}
-
+
@Test
- public void complexTypeDescriptors() throws Exception {
+ public void complexTypeDescriptor() throws Exception {
TypeDescriptor typeDescriptor = new TypeDescriptor(TypeDescriptorTests.class.getDeclaredField("arrayOfListOfString"));
assertTrue(typeDescriptor.isArray());
assertEquals(List.class,typeDescriptor.getElementType());
// TODO asc notice that the type of the list elements is lost: typeDescriptor.getElementType() should return a TypeDescriptor
- assertEquals("[TypeDescriptor java.util.List[]]",typeDescriptor.asString());
+ assertEquals("java.util.List[]",typeDescriptor.asString());
}
-
+
@Test
public void testEquals() throws Exception {
TypeDescriptor t1 = TypeDescriptor.valueOf(String.class);
@@ -94,9 +125,5 @@ public void testEquals() throws Exception {
TypeDescriptor t12 = new TypeDescriptor(getClass().getField("mapField"));
assertEquals(t11, t12);
}
-
- public List<Integer> listField = new ArrayList<Integer>();
-
- public Map<String, Integer> mapField = new HashMap<String, Integer>();
}
|
76f5cc4ae6906d0885a1626ad280f4402ce7ac69
|
ReactiveX-RxJava
|
Better naming, and new test--... using to compare 0.19 and 0.20 so want in both.-
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/perf/java/rx/operators/OperatorMergePerf.java b/rxjava-core/src/perf/java/rx/operators/OperatorMergePerf.java
index 17fafcc566..687d81e789 100644
--- a/rxjava-core/src/perf/java/rx/operators/OperatorMergePerf.java
+++ b/rxjava-core/src/perf/java/rx/operators/OperatorMergePerf.java
@@ -53,7 +53,22 @@ public int getSize() {
}
@Benchmark
- public void mergeSynchronous(final Input input) throws InterruptedException {
+ public void merge1SyncStreamOfN(final Input input) throws InterruptedException {
+ Observable<Observable<Integer>> os = Observable.just(1).map(new Func1<Integer, Observable<Integer>>() {
+
+ @Override
+ public Observable<Integer> call(Integer i) {
+ return Observable.range(0, input.size);
+ }
+
+ });
+ LatchedObserver<Integer> o = input.newLatchedObserver();
+ Observable.merge(os).subscribe(o);
+ o.latch.await();
+ }
+
+ @Benchmark
+ public void mergeNSyncStreamsOfN(final Input input) throws InterruptedException {
Observable<Observable<Integer>> os = input.observable.map(new Func1<Integer, Observable<Integer>>() {
@Override
@@ -68,7 +83,7 @@ public Observable<Integer> call(Integer i) {
}
@Benchmark
- public void mergeAsynchronous(final Input input) throws InterruptedException {
+ public void mergeNAsyncStreamsOfN(final Input input) throws InterruptedException {
Observable<Observable<Integer>> os = input.observable.map(new Func1<Integer, Observable<Integer>>() {
@Override
@@ -83,7 +98,7 @@ public Observable<Integer> call(Integer i) {
}
@Benchmark
- public void mergeTwoAsyncStreams(final Input input) throws InterruptedException {
+ public void mergeTwoAsyncStreamsOfN(final Input input) throws InterruptedException {
LatchedObserver<Integer> o = input.newLatchedObserver();
Observable<Integer> ob = Observable.range(0, input.size).subscribeOn(Schedulers.computation());
Observable.merge(ob, ob).subscribe(o);
@@ -91,7 +106,7 @@ public void mergeTwoAsyncStreams(final Input input) throws InterruptedException
}
@Benchmark
- public void mergeNStreams(final InputForMergeN input) throws InterruptedException {
+ public void mergeNSyncStreamsOf1(final InputForMergeN input) throws InterruptedException {
LatchedObserver<Integer> o = input.newLatchedObserver();
Observable.merge(input.observables).subscribe(o);
o.latch.await();
|
6f14330db96d599a0ec4a880786320d1b493c861
|
intellij-community
|
IDEA-35738 Cannot drag around label with empty- text--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/plugins/ui-designer/src/com/intellij/uiDesigner/FormEditingUtil.java b/plugins/ui-designer/src/com/intellij/uiDesigner/FormEditingUtil.java
index 7a0803b70538a..69ef61ec61211 100644
--- a/plugins/ui-designer/src/com/intellij/uiDesigner/FormEditingUtil.java
+++ b/plugins/ui-designer/src/com/intellij/uiDesigner/FormEditingUtil.java
@@ -186,12 +186,56 @@ private static void deleteEmptyGridCells(final RadContainer parent, final GridCo
}
}
+ private static final int EMPTY_COMPONENT_SIZE = 5;
+
+ private static Component getDeepestEmptyComponentAt(JComponent parent, Point location) {
+ int size = parent.getComponentCount();
+
+ for (int i = 0; i < size; i++) {
+ Component child = parent.getComponent(i);
+
+ if (child.isShowing()) {
+ if (child.getWidth() < EMPTY_COMPONENT_SIZE || child.getHeight() < EMPTY_COMPONENT_SIZE) {
+ Point childLocation = child.getLocationOnScreen();
+ Rectangle bounds = new Rectangle();
+
+ bounds.x = childLocation.x;
+ bounds.y = childLocation.y;
+ bounds.width = child.getWidth();
+ bounds.height = child.getHeight();
+ bounds.grow(child.getWidth() < EMPTY_COMPONENT_SIZE ? EMPTY_COMPONENT_SIZE : 0,
+ child.getHeight() < EMPTY_COMPONENT_SIZE ? EMPTY_COMPONENT_SIZE : 0);
+
+ if (bounds.contains(location)) {
+ return child;
+ }
+ }
+
+ if (child instanceof JComponent) {
+ Component result = getDeepestEmptyComponentAt((JComponent)child, location);
+
+ if (result != null) {
+ return result;
+ }
+ }
+ }
+ }
+
+ return null;
+ }
+
/**
* @param x in editor pane coordinates
* @param y in editor pane coordinates
*/
public static RadComponent getRadComponentAt(final RadRootContainer rootContainer, final int x, final int y){
- Component c = SwingUtilities.getDeepestComponentAt(rootContainer.getDelegee(), x, y);
+ Point location = new Point(x, y);
+ SwingUtilities.convertPointToScreen(location, rootContainer.getDelegee());
+ Component c = getDeepestEmptyComponentAt(rootContainer.getDelegee(), location);
+
+ if (c == null) {
+ c = SwingUtilities.getDeepestComponentAt(rootContainer.getDelegee(), x, y);
+ }
RadComponent result = null;
@@ -310,9 +354,9 @@ public static ArrayList<RadComponent> getAllSelectedComponents(@NotNull final Gu
final ArrayList<RadComponent> result = new ArrayList<RadComponent>();
iterate(
editor.getRootContainer(),
- new ComponentVisitor<RadComponent>(){
+ new ComponentVisitor<RadComponent>() {
public boolean visit(final RadComponent component) {
- if(component.isSelected()){
+ if (component.isSelected()) {
result.add(component);
}
return true;
@@ -793,26 +837,28 @@ public static void iterateStringDescriptors(final IComponent component,
iterate(component, new ComponentVisitor<IComponent>() {
public boolean visit(final IComponent component) {
- for(IProperty prop: component.getModifiedProperties()) {
+ for (IProperty prop : component.getModifiedProperties()) {
Object value = prop.getPropertyValue(component);
if (value instanceof StringDescriptor) {
- if (!visitor.visit(component, (StringDescriptor) value)) {
+ if (!visitor.visit(component, (StringDescriptor)value)) {
return false;
}
}
}
if (component.getParentContainer() instanceof ITabbedPane) {
- StringDescriptor tabTitle = ((ITabbedPane) component.getParentContainer()).getTabProperty(component, ITabbedPane.TAB_TITLE_PROPERTY);
+ StringDescriptor tabTitle =
+ ((ITabbedPane)component.getParentContainer()).getTabProperty(component, ITabbedPane.TAB_TITLE_PROPERTY);
if (tabTitle != null && !visitor.visit(component, tabTitle)) {
return false;
}
- StringDescriptor tabToolTip = ((ITabbedPane) component.getParentContainer()).getTabProperty(component, ITabbedPane.TAB_TOOLTIP_PROPERTY);
+ StringDescriptor tabToolTip =
+ ((ITabbedPane)component.getParentContainer()).getTabProperty(component, ITabbedPane.TAB_TOOLTIP_PROPERTY);
if (tabToolTip != null && !visitor.visit(component, tabToolTip)) {
return false;
}
}
if (component instanceof IContainer) {
- final StringDescriptor borderTitle = ((IContainer) component).getBorderTitle();
+ final StringDescriptor borderTitle = ((IContainer)component).getBorderTitle();
if (borderTitle != null && !visitor.visit(component, borderTitle)) {
return false;
}
|
7819aeccba263171444ca56c1621aca3f7d649e8
|
intellij-community
|
NPE--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/lang-impl/src/com/intellij/codeInspection/ex/InspectionRVContentProvider.java b/lang-impl/src/com/intellij/codeInspection/ex/InspectionRVContentProvider.java
index 8da0e755dfc7f..3ec11d5a283e0 100644
--- a/lang-impl/src/com/intellij/codeInspection/ex/InspectionRVContentProvider.java
+++ b/lang-impl/src/com/intellij/codeInspection/ex/InspectionRVContentProvider.java
@@ -99,13 +99,18 @@ protected <T> List<InspectionTreeNode> buildTree(final Map<String, Set<T>> packa
if (packageNode.getChildCount() > 0) {
InspectionModuleNode moduleNode = moduleNodes.get(moduleName);
if (moduleNode == null) {
- final Module module = ModuleManager.getInstance(myProject).findModuleByName(moduleName);
- if (module != null) {
- moduleNode = new InspectionModuleNode(module);
- moduleNodes.put(moduleName, moduleNode);
- }
- else { //module content was removed ?
- continue;
+ if (moduleName != null) {
+ final Module module = ModuleManager.getInstance(myProject).findModuleByName(moduleName);
+ if (module != null) {
+ moduleNode = new InspectionModuleNode(module);
+ moduleNodes.put(moduleName, moduleNode);
+ }
+ else { //module content was removed ?
+ continue;
+ }
+ } else {
+ content.addAll(packageNodes.values());
+ break;
}
}
if (packageNode.getPackageName() != null) {
|
c12028b5b932403ea2ce77a45ad699a013b8d488
|
hbase
|
HBASE-2057 Cluster won't stop--git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@894111 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/src/java/org/apache/hadoop/hbase/master/ZKMasterAddressWatcher.java b/src/java/org/apache/hadoop/hbase/master/ZKMasterAddressWatcher.java
index 49f783dfc87b..fccf46d93896 100644
--- a/src/java/org/apache/hadoop/hbase/master/ZKMasterAddressWatcher.java
+++ b/src/java/org/apache/hadoop/hbase/master/ZKMasterAddressWatcher.java
@@ -110,6 +110,7 @@ boolean writeAddressToZooKeeper(
}
if(this.zookeeper.writeMasterAddress(address)) {
this.zookeeper.setClusterState(true);
+ this.zookeeper.setClusterStateWatch(this);
// Watch our own node
this.zookeeper.readMasterAddress(this);
return true;
|
747ce871172baf71ecc0eb8c86f5a0aa4f624b4f
|
intellij-community
|
StackOverflow fixed--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/source/com/intellij/codeInsight/completion/Java15CompletionData.java b/source/com/intellij/codeInsight/completion/Java15CompletionData.java
index 59328a9f08295..87dc6862ace7f 100644
--- a/source/com/intellij/codeInsight/completion/Java15CompletionData.java
+++ b/source/com/intellij/codeInsight/completion/Java15CompletionData.java
@@ -77,7 +77,7 @@ public boolean isAcceptable(Object element, PsiElement context) {
}, 2)
)));
final CompletionVariant variant = new CompletionVariant(PsiReferenceExpression.class, position);
- variant.addCompletionFilterOnElement(new ClassFilter(PsiEnumConstant.class), TailType.COND_EXPR_COLON);
+ variant.addCompletionFilterOnElement(new ClassFilter(PsiEnumConstant.class), ':');
registerVariant(variant);
}
}
|
e15bf1a95abeec1a39f212d65887d88a19f9f68e
|
drools
|
-fixed MVEL parser context naming issue.--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@23995 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/main/java/org/drools/rule/builder/dialect/mvel/MVELDialect.java b/drools-compiler/src/main/java/org/drools/rule/builder/dialect/mvel/MVELDialect.java
index 8206e2099a2..4ddff78583d 100644
--- a/drools-compiler/src/main/java/org/drools/rule/builder/dialect/mvel/MVELDialect.java
+++ b/drools-compiler/src/main/java/org/drools/rule/builder/dialect/mvel/MVELDialect.java
@@ -134,9 +134,7 @@ public class MVELDialect
private boolean strictMode;
private int languageLevel;
- public static final Object COMPILER_LOCK = new Object();
-
- private static AtomicInteger nameCounter = new AtomicInteger();
+ public static final Object COMPILER_LOCK = new Object();
public MVELDialect(PackageBuilder builder,
PackageRegistry pkgRegistry,
@@ -666,13 +664,17 @@ public ParserContext getParserContext(final Dialect.AnalysisResult analysis,
// @todo proper source file name
String name;
if ( context != null && context.getPkg() != null & context.getPkg().getName() != null ) {
- name = context.getPkg().getName();
+ if ( context instanceof RuleBuildContext ) {
+ name = context.getPkg().getName() + "." + ((RuleBuildContext)context).getRuleDescr().getClassName();
+ } else {
+ name = context.getPkg().getName() + ".Unknown";
+ }
} else {
- name = "";
+ name = "Unknown";
}
final ParserContext parserContext = new ParserContext( this.imports,
null,
- name + "_" + nameCounter.getAndIncrement() );
+ name );
// getRuleDescr().getClassName() );
for ( Iterator it = this.packageImports.values().iterator(); it.hasNext(); ) {
|
63d60c6aff8362db9683b83d670556a1a828e8d1
|
drools
|
[DROOLS-198] avoid useless cleanup of staged- activations in phreak--
|
p
|
https://github.com/kiegroup/drools
|
diff --git a/drools-workbench-models/drools-workbench-models-test-scenarios/src/test/java/org/drools/workbench/models/testscenarios/backend/ScenarioRunnerTest.java b/drools-workbench-models/drools-workbench-models-test-scenarios/src/test/java/org/drools/workbench/models/testscenarios/backend/ScenarioRunnerTest.java
index 7b4376e75c3..29dd30023e0 100644
--- a/drools-workbench-models/drools-workbench-models-test-scenarios/src/test/java/org/drools/workbench/models/testscenarios/backend/ScenarioRunnerTest.java
+++ b/drools-workbench-models/drools-workbench-models-test-scenarios/src/test/java/org/drools/workbench/models/testscenarios/backend/ScenarioRunnerTest.java
@@ -627,7 +627,6 @@ public void testRuleFlowGroupActivation() throws Exception {
new FieldData("name",
"mic")),
false), new ActivateRuleFlowGroup("asdf")};
- ksession.getAgenda().clear();
scenario.getFixtures().addAll(Arrays.asList(given));
scenario.getFixtures().add(executionTrace);
((RuleFlowGroupImpl) ksession.getAgenda().getRuleFlowGroup("asdf")).setAutoDeactivate(false);
|
8ee632caa79b92b1af98684f83b01c3447a119ee
|
hadoop
|
YARN-2740. Fix NodeLabelsManager to properly handle- node label modifications when distributed node label configuration enabled.- (Naganarasimha G R via wangda)--(cherry picked from commit db1b674b50ddecf2774f4092d677c412722bdcb1)-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 20de1edb7efbe..ca9247f13eb2d 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -217,6 +217,9 @@ Release 2.8.0 - UNRELEASED
YARN-3530. ATS throws exception on trying to filter results without otherinfo.
(zhijie shen via xgong)
+ YARN-2740. Fix NodeLabelsManager to properly handle node label modifications
+ when distributed node label configuration enabled. (Naganarasimha G R via wangda)
+
Release 2.7.1 - UNRELEASED
INCOMPATIBLE CHANGES
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index c8f9648147fb3..4dd01d24bb8d9 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -1779,6 +1779,12 @@ private static void addDeprecatedKeys() {
public static final String DEFAULT_NODELABEL_CONFIGURATION_TYPE =
CENTALIZED_NODELABEL_CONFIGURATION_TYPE;
+ @Private
+ public static boolean isDistributedNodeLabelConfiguration(Configuration conf) {
+ return DISTRIBUTED_NODELABEL_CONFIGURATION_TYPE.equals(conf.get(
+ NODELABEL_CONFIGURATION_TYPE, DEFAULT_NODELABEL_CONFIGURATION_TYPE));
+ }
+
public YarnConfiguration() {
super();
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/CommonNodeLabelsManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/CommonNodeLabelsManager.java
index 7493169201ebc..f2ff0f629971c 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/CommonNodeLabelsManager.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/CommonNodeLabelsManager.java
@@ -97,6 +97,8 @@ public class CommonNodeLabelsManager extends AbstractService {
protected NodeLabelsStore store;
private boolean nodeLabelsEnabled = false;
+ private boolean isDistributedNodeLabelConfiguration = false;
+
/**
* A <code>Host</code> can have multiple <code>Node</code>s
*/
@@ -213,6 +215,10 @@ protected void serviceInit(Configuration conf) throws Exception {
nodeLabelsEnabled =
conf.getBoolean(YarnConfiguration.NODE_LABELS_ENABLED,
YarnConfiguration.DEFAULT_NODE_LABELS_ENABLED);
+
+ isDistributedNodeLabelConfiguration =
+ YarnConfiguration.isDistributedNodeLabelConfiguration(conf);
+
if (nodeLabelsEnabled) {
initNodeLabelStore(conf);
}
@@ -223,7 +229,7 @@ protected void serviceInit(Configuration conf) throws Exception {
protected void initNodeLabelStore(Configuration conf) throws Exception {
this.store = new FileSystemNodeLabelsStore(this);
this.store.init(conf);
- this.store.recover();
+ this.store.recover(isDistributedNodeLabelConfiguration);
}
// for UT purpose
@@ -613,7 +619,10 @@ protected void internalUpdateLabelsOnNodes(
}
}
- if (null != dispatcher) {
+ if (null != dispatcher && !isDistributedNodeLabelConfiguration) {
+ // In case of DistributedNodeLabelConfiguration, no need to save the the
+ // NodeLabels Mapping to the back-end store, as on RM restart/failover
+ // NodeLabels are collected from NM through Register/Heartbeat again
dispatcher.getEventHandler().handle(
new UpdateNodeToLabelsMappingsEvent(newNMToLabels));
}
@@ -799,8 +808,10 @@ public List<NodeLabel> getClusterNodeLabels() {
readLock.lock();
List<NodeLabel> nodeLabels = new ArrayList<>();
for (RMNodeLabel label : labelCollections.values()) {
- nodeLabels.add(NodeLabel.newInstance(label.getLabelName(),
- label.getIsExclusive()));
+ if (!label.getLabelName().equals(NO_LABEL)) {
+ nodeLabels.add(NodeLabel.newInstance(label.getLabelName(),
+ label.getIsExclusive()));
+ }
}
return nodeLabels;
} finally {
@@ -824,7 +835,6 @@ public boolean isExclusiveNodeLabel(String nodeLabel) throws IOException {
readLock.unlock();
}
}
-
private void checkAndThrowLabelName(String label) throws IOException {
if (label == null || label.isEmpty() || label.length() > MAX_LABEL_LENGTH) {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/FileSystemNodeLabelsStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/FileSystemNodeLabelsStore.java
index ea185f2c0a248..f26e2048a02cd 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/FileSystemNodeLabelsStore.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/FileSystemNodeLabelsStore.java
@@ -154,8 +154,12 @@ public void removeClusterNodeLabels(Collection<String> labels)
ensureCloseEditlogFile();
}
+ /* (non-Javadoc)
+ * @see org.apache.hadoop.yarn.nodelabels.NodeLabelsStore#recover(boolean)
+ */
@Override
- public void recover() throws YarnException, IOException {
+ public void recover(boolean ignoreNodeToLabelsMappings) throws YarnException,
+ IOException {
/*
* Steps of recover
* 1) Read from last mirror (from mirror or mirror.old)
@@ -222,7 +226,15 @@ public void recover() throws YarnException, IOException {
new ReplaceLabelsOnNodeRequestPBImpl(
ReplaceLabelsOnNodeRequestProto.parseDelimitedFrom(is))
.getNodeToLabels();
- mgr.replaceLabelsOnNode(map);
+ if (!ignoreNodeToLabelsMappings) {
+ /*
+ * In case of Distributed NodeLabels setup,
+ * ignoreNodeToLabelsMappings will be set to true and recover will
+ * be invoked. As RM will collect the node labels from NM through
+ * registration/HB
+ */
+ mgr.replaceLabelsOnNode(map);
+ }
break;
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/NodeLabelsStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/NodeLabelsStore.java
index 47b7370dff843..46b94fd0d5c9a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/NodeLabelsStore.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/NodeLabelsStore.java
@@ -56,9 +56,18 @@ public abstract void removeClusterNodeLabels(Collection<String> labels)
throws IOException;
/**
- * Recover labels and node to labels mappings from store
+ * Recover labels and node to labels mappings from store, but if
+ * ignoreNodeToLabelsMappings is true then node to labels mappings should not
+ * be recovered. In case of Distributed NodeLabels setup
+ * ignoreNodeToLabelsMappings will be set to true and recover will be invoked
+ * as RM will collect the node labels from NM through registration/HB
+ *
+ * @param ignoreNodeToLabelsMappings
+ * @throws IOException
+ * @throws YarnException
*/
- public abstract void recover() throws IOException, YarnException;
+ public abstract void recover(boolean ignoreNodeToLabelsMappings)
+ throws IOException, YarnException;
public void init(Configuration conf) throws Exception {}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/DummyCommonNodeLabelsManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/DummyCommonNodeLabelsManager.java
index 48d6dc877154b..fce663a1c952c 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/DummyCommonNodeLabelsManager.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/DummyCommonNodeLabelsManager.java
@@ -39,7 +39,8 @@ public void initNodeLabelStore(Configuration conf) {
this.store = new NodeLabelsStore(this) {
@Override
- public void recover() throws IOException {
+ public void recover(boolean ignoreNodeToLabelsMappings)
+ throws IOException {
}
@Override
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestCommonNodeLabelsManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestCommonNodeLabelsManager.java
index beb2cf8585851..09838b43ada1d 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestCommonNodeLabelsManager.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestCommonNodeLabelsManager.java
@@ -554,4 +554,29 @@ private void verifyNodeLabelAdded(Set<String> expectedAddedLabelNames,
Assert.assertTrue(expectedAddedLabelNames.contains(label.getName()));
}
}
+
+ @Test(timeout = 5000)
+ public void testReplaceLabelsOnNodeInDistributedMode() throws Exception {
+ //create new DummyCommonNodeLabelsManager than the one got from @before
+ mgr.stop();
+ mgr = new DummyCommonNodeLabelsManager();
+ Configuration conf = new YarnConfiguration();
+ conf.setBoolean(YarnConfiguration.NODE_LABELS_ENABLED, true);
+ conf.set(YarnConfiguration.NODELABEL_CONFIGURATION_TYPE,
+ YarnConfiguration.DISTRIBUTED_NODELABEL_CONFIGURATION_TYPE);
+
+ mgr.init(conf);
+ mgr.start();
+
+ mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3"));
+ mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1")));
+ Set<String> labelsByNode = mgr.getLabelsByNode(toNodeId("n1"));
+
+ Assert.assertNull(
+ "Labels are not expected to be written to the NodeLabelStore",
+ mgr.lastNodeToLabels);
+ Assert.assertNotNull("Updated labels should be available from the Mgr",
+ labelsByNode);
+ Assert.assertTrue(labelsByNode.contains("p1"));
+ }
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestFileSystemNodeLabelsStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestFileSystemNodeLabelsStore.java
index f070c205f5a1c..fb60cd6a6427c 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestFileSystemNodeLabelsStore.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestFileSystemNodeLabelsStore.java
@@ -144,6 +144,40 @@ public void testRecoverWithMirror() throws Exception {
mgr.stop();
}
+ @SuppressWarnings({ "unchecked", "rawtypes" })
+ @Test(timeout = 10000)
+ public void testRecoverWithDistributedNodeLabels() throws Exception {
+ mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3"));
+ mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p4"));
+ mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p5", "p6"));
+ mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n1"), toSet("p1"),
+ toNodeId("n2"), toSet("p2")));
+ mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n3"), toSet("p3"),
+ toNodeId("n4"), toSet("p4"), toNodeId("n5"), toSet("p5"),
+ toNodeId("n6"), toSet("p6"), toNodeId("n7"), toSet("p6")));
+
+ mgr.removeFromClusterNodeLabels(toSet("p1"));
+ mgr.removeFromClusterNodeLabels(Arrays.asList("p3", "p5"));
+ mgr.stop();
+
+ mgr = new MockNodeLabelManager();
+ Configuration cf = new Configuration(conf);
+ cf.set(YarnConfiguration.NODELABEL_CONFIGURATION_TYPE,
+ YarnConfiguration.DISTRIBUTED_NODELABEL_CONFIGURATION_TYPE);
+ mgr.init(cf);
+
+ // check variables
+ Assert.assertEquals(3, mgr.getClusterNodeLabels().size());
+ Assert.assertTrue(mgr.getClusterNodeLabelNames().containsAll(
+ Arrays.asList("p2", "p4", "p6")));
+
+ Assert.assertTrue("During recovery in distributed node-labels setup, "
+ + "node to labels mapping should not be recovered ", mgr
+ .getNodeLabels().size() == 0);
+
+ mgr.stop();
+ }
+
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test(timeout = 10000)
public void testEditlogRecover() throws Exception {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java
index c921326fbdce3..0ad90c0ed4c6b 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java
@@ -112,6 +112,9 @@ public class AdminService extends CompositeService implements
private final RecordFactory recordFactory =
RecordFactoryProvider.getRecordFactory(null);
+ @VisibleForTesting
+ boolean isDistributedNodeLabelConfiguration = false;
+
public AdminService(ResourceManager rm, RMContext rmContext) {
super(AdminService.class.getName());
this.rm = rm;
@@ -141,6 +144,10 @@ public void serviceInit(Configuration conf) throws Exception {
YarnConfiguration.DEFAULT_YARN_ADMIN_ACL)), UserGroupInformation
.getCurrentUser());
rmId = conf.get(YarnConfiguration.RM_HA_ID);
+
+ isDistributedNodeLabelConfiguration =
+ YarnConfiguration.isDistributedNodeLabelConfiguration(conf);
+
super.serviceInit(conf);
}
@@ -637,32 +644,35 @@ public AddToClusterNodeLabelsResponse addToClusterNodeLabels(AddToClusterNodeLab
@Override
public RemoveFromClusterNodeLabelsResponse removeFromClusterNodeLabels(
RemoveFromClusterNodeLabelsRequest request) throws YarnException, IOException {
- String argName = "removeFromClusterNodeLabels";
+ String operation = "removeFromClusterNodeLabels";
final String msg = "remove labels.";
- UserGroupInformation user = checkAcls(argName);
- checkRMStatus(user.getShortUserName(), argName, msg);
+ UserGroupInformation user = checkAcls(operation);
+
+ checkRMStatus(user.getShortUserName(), operation, msg);
RemoveFromClusterNodeLabelsResponse response =
recordFactory.newRecordInstance(RemoveFromClusterNodeLabelsResponse.class);
try {
rmContext.getNodeLabelManager().removeFromClusterNodeLabels(request.getNodeLabels());
RMAuditLogger
- .logSuccess(user.getShortUserName(), argName, "AdminService");
+ .logSuccess(user.getShortUserName(), operation, "AdminService");
return response;
} catch (IOException ioe) {
- throw logAndWrapException(ioe, user.getShortUserName(), argName, msg);
+ throw logAndWrapException(ioe, user.getShortUserName(), operation, msg);
}
}
@Override
public ReplaceLabelsOnNodeResponse replaceLabelsOnNode(
ReplaceLabelsOnNodeRequest request) throws YarnException, IOException {
- String argName = "replaceLabelsOnNode";
+ String operation = "replaceLabelsOnNode";
final String msg = "set node to labels.";
- UserGroupInformation user = checkAcls(argName);
- checkRMStatus(user.getShortUserName(), argName, msg);
+ checkAndThrowIfDistributedNodeLabelConfEnabled(operation);
+ UserGroupInformation user = checkAcls(operation);
+
+ checkRMStatus(user.getShortUserName(), operation, msg);
ReplaceLabelsOnNodeResponse response =
recordFactory.newRecordInstance(ReplaceLabelsOnNodeResponse.class);
@@ -670,30 +680,41 @@ public ReplaceLabelsOnNodeResponse replaceLabelsOnNode(
rmContext.getNodeLabelManager().replaceLabelsOnNode(
request.getNodeToLabels());
RMAuditLogger
- .logSuccess(user.getShortUserName(), argName, "AdminService");
+ .logSuccess(user.getShortUserName(), operation, "AdminService");
return response;
} catch (IOException ioe) {
- throw logAndWrapException(ioe, user.getShortUserName(), argName, msg);
+ throw logAndWrapException(ioe, user.getShortUserName(), operation, msg);
}
}
- private void checkRMStatus(String user, String argName, String msg)
+ private void checkRMStatus(String user, String operation, String msg)
throws StandbyException {
if (!isRMActive()) {
- RMAuditLogger.logFailure(user, argName, "",
+ RMAuditLogger.logFailure(user, operation, "",
"AdminService", "ResourceManager is not active. Can not " + msg);
throwStandbyException();
}
}
private YarnException logAndWrapException(Exception exception, String user,
- String argName, String msg) throws YarnException {
+ String operation, String msg) throws YarnException {
LOG.warn("Exception " + msg, exception);
- RMAuditLogger.logFailure(user, argName, "",
+ RMAuditLogger.logFailure(user, operation, "",
"AdminService", "Exception " + msg);
return RPCUtil.getRemoteException(exception);
}
+ private void checkAndThrowIfDistributedNodeLabelConfEnabled(String operation)
+ throws YarnException {
+ if (isDistributedNodeLabelConfiguration) {
+ String msg =
+ String.format("Error when invoke method=%s because of "
+ + "distributed node label configuration enabled.", operation);
+ LOG.error(msg);
+ throw RPCUtil.getRemoteException(new IOException(msg));
+ }
+ }
+
@Override
public CheckForDecommissioningNodesResponse checkForDecommissioningNodes(
CheckForDecommissioningNodesRequest checkForDecommissioningNodesRequest)
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java
index 5e2dc7e4f2543..16b6a890ac923 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java
@@ -104,7 +104,7 @@ public class ResourceTrackerService extends AbstractService implements
private int minAllocMb;
private int minAllocVcores;
- private boolean isDistributesNodeLabelsConf;
+ private boolean isDistributedNodeLabelsConf;
static {
resync.setNodeAction(NodeAction.RESYNC);
@@ -155,13 +155,8 @@ protected void serviceInit(Configuration conf) throws Exception {
YarnConfiguration.RM_NODEMANAGER_MINIMUM_VERSION,
YarnConfiguration.DEFAULT_RM_NODEMANAGER_MINIMUM_VERSION);
- String nodeLabelConfigurationType =
- conf.get(YarnConfiguration.NODELABEL_CONFIGURATION_TYPE,
- YarnConfiguration.DEFAULT_NODELABEL_CONFIGURATION_TYPE);
-
- isDistributesNodeLabelsConf =
- YarnConfiguration.DISTRIBUTED_NODELABEL_CONFIGURATION_TYPE
- .equals(nodeLabelConfigurationType);
+ isDistributedNodeLabelsConf =
+ YarnConfiguration.isDistributedNodeLabelConfiguration(conf);
super.serviceInit(conf);
}
@@ -352,7 +347,7 @@ public RegisterNodeManagerResponse registerNodeManager(
// Update node's labels to RM's NodeLabelManager.
Set<String> nodeLabels = request.getNodeLabels();
- if (isDistributesNodeLabelsConf && nodeLabels != null) {
+ if (isDistributedNodeLabelsConf && nodeLabels != null) {
try {
updateNodeLabelsFromNMReport(nodeLabels, nodeId);
response.setAreNodeLabelsAcceptedByRM(true);
@@ -470,7 +465,7 @@ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request)
this.rmContext.getDispatcher().getEventHandler().handle(nodeStatusEvent);
// 5. Update node's labels to RM's NodeLabelManager.
- if (isDistributesNodeLabelsConf && request.getNodeLabels() != null) {
+ if (isDistributedNodeLabelsConf && request.getNodeLabels() != null) {
try {
updateNodeLabelsFromNMReport(request.getNodeLabels(), nodeId);
nodeHeartBeatResponse.setAreNodeLabelsAcceptedByRM(true);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java
index 6cd6d56281f66..9aea62d1c8408 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java
@@ -149,6 +149,7 @@
import org.apache.hadoop.yarn.webapp.NotFoundException;
import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
+import com.google.common.annotations.VisibleForTesting;
import com.google.inject.Inject;
import com.google.inject.Singleton;
@@ -165,6 +166,9 @@ public class RMWebServices {
private final Configuration conf;
private @Context HttpServletResponse response;
+ @VisibleForTesting
+ boolean isDistributedNodeLabelConfiguration = false;
+
public final static String DELEGATION_TOKEN_HEADER =
"Hadoop-YARN-RM-Delegation-Token";
@@ -172,6 +176,19 @@ public class RMWebServices {
public RMWebServices(final ResourceManager rm, Configuration conf) {
this.rm = rm;
this.conf = conf;
+ isDistributedNodeLabelConfiguration =
+ YarnConfiguration.isDistributedNodeLabelConfiguration(conf);
+ }
+
+ private void checkAndThrowIfDistributedNodeLabelConfEnabled(String operation)
+ throws IOException {
+ if (isDistributedNodeLabelConfiguration) {
+ String msg =
+ String.format("Error when invoke method=%s because of "
+ + "distributed node label configuration enabled.", operation);
+ LOG.error(msg);
+ throw new IOException(msg);
+ }
}
RMWebServices(ResourceManager rm, Configuration conf,
@@ -816,38 +833,64 @@ public LabelsToNodesInfo getLabelsToNodes(
@POST
@Path("/replace-node-to-labels")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
- public Response replaceLabelsOnNodes(
- final NodeToLabelsInfo newNodeToLabels,
- @Context HttpServletRequest hsr)
- throws IOException {
+ public Response replaceLabelsOnNodes(final NodeToLabelsInfo newNodeToLabels,
+ @Context HttpServletRequest hsr) throws IOException {
+ Map<NodeId, Set<String>> nodeIdToLabels =
+ new HashMap<NodeId, Set<String>>();
+
+ for (Map.Entry<String, NodeLabelsInfo> nitle : newNodeToLabels
+ .getNodeToLabels().entrySet()) {
+ nodeIdToLabels.put(
+ ConverterUtils.toNodeIdWithDefaultPort(nitle.getKey()),
+ new HashSet<String>(nitle.getValue().getNodeLabels()));
+ }
+
+ return replaceLabelsOnNode(nodeIdToLabels, hsr, "/replace-node-to-labels");
+ }
+
+ @POST
+ @Path("/nodes/{nodeId}/replace-labels")
+ @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+ public Response replaceLabelsOnNode(NodeLabelsInfo newNodeLabelsInfo,
+ @Context HttpServletRequest hsr, @PathParam("nodeId") String nodeId)
+ throws Exception {
+ NodeId nid = ConverterUtils.toNodeIdWithDefaultPort(nodeId);
+ Map<NodeId, Set<String>> newLabelsForNode =
+ new HashMap<NodeId, Set<String>>();
+ newLabelsForNode.put(nid,
+ new HashSet<String>(newNodeLabelsInfo.getNodeLabels()));
+
+ return replaceLabelsOnNode(newLabelsForNode, hsr, "/nodes/nodeid/replace-labels");
+ }
+
+ private Response replaceLabelsOnNode(
+ Map<NodeId, Set<String>> newLabelsForNode, HttpServletRequest hsr,
+ String operation) throws IOException {
init();
-
+
+ checkAndThrowIfDistributedNodeLabelConfEnabled("replaceLabelsOnNode");
+
UserGroupInformation callerUGI = getCallerUserGroupInformation(hsr, true);
if (callerUGI == null) {
- String msg = "Unable to obtain user name, user not authenticated for"
- + " post to .../replace-node-to-labels";
+ String msg =
+ "Unable to obtain user name, user not authenticated for"
+ + " post to ..." + operation;
throw new AuthorizationException(msg);
}
+
if (!rm.getRMContext().getNodeLabelManager().checkAccess(callerUGI)) {
- String msg = "User " + callerUGI.getShortUserName() + " not authorized"
- + " for post to .../replace-node-to-labels ";
+ String msg =
+ "User " + callerUGI.getShortUserName() + " not authorized"
+ + " for post to ..." + operation;
throw new AuthorizationException(msg);
}
-
- Map<NodeId, Set<String>> nodeIdToLabels =
- new HashMap<NodeId, Set<String>>();
- for (Map.Entry<String, NodeLabelsInfo> nitle :
- newNodeToLabels.getNodeToLabels().entrySet()) {
- nodeIdToLabels.put(ConverterUtils.toNodeIdWithDefaultPort(nitle.getKey()),
- new HashSet<String>(nitle.getValue().getNodeLabels()));
- }
-
- rm.getRMContext().getNodeLabelManager().replaceLabelsOnNode(nodeIdToLabels);
+ rm.getRMContext().getNodeLabelManager()
+ .replaceLabelsOnNode(newLabelsForNode);
return Response.status(Status.OK).build();
}
-
+
@GET
@Path("/get-node-labels")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
@@ -897,7 +940,7 @@ public Response removeFromCluserNodeLabels(final NodeLabelsInfo oldNodeLabels,
@Context HttpServletRequest hsr)
throws Exception {
init();
-
+
UserGroupInformation callerUGI = getCallerUserGroupInformation(hsr, true);
if (callerUGI == null) {
String msg = "Unable to obtain user name, user not authenticated for"
@@ -931,40 +974,6 @@ public NodeLabelsInfo getLabelsOnNode(@Context HttpServletRequest hsr,
rm.getRMContext().getNodeLabelManager().getLabelsOnNode(nid));
}
-
- @POST
- @Path("/nodes/{nodeId}/replace-labels")
- @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
- public Response replaceLabelsOnNode(NodeLabelsInfo newNodeLabelsInfo,
- @Context HttpServletRequest hsr, @PathParam("nodeId") String nodeId)
- throws Exception {
- init();
-
- UserGroupInformation callerUGI = getCallerUserGroupInformation(hsr, true);
- if (callerUGI == null) {
- String msg = "Unable to obtain user name, user not authenticated for"
- + " post to .../nodes/nodeid/replace-labels";
- throw new AuthorizationException(msg);
- }
-
- if (!rm.getRMContext().getNodeLabelManager().checkAccess(callerUGI)) {
- String msg = "User " + callerUGI.getShortUserName() + " not authorized"
- + " for post to .../nodes/nodeid/replace-labels";
- throw new AuthorizationException(msg);
- }
-
- NodeId nid = ConverterUtils.toNodeIdWithDefaultPort(nodeId);
-
- Map<NodeId, Set<String>> newLabelsForNode = new HashMap<NodeId,
- Set<String>>();
-
- newLabelsForNode.put(nid, new HashSet<String>(newNodeLabelsInfo.getNodeLabels()));
-
- rm.getRMContext().getNodeLabelManager().replaceLabelsOnNode(newLabelsForNode);
-
- return Response.status(Status.OK).build();
-
- }
protected Response killApp(RMApp app, UserGroupInformation callerUGI,
HttpServletRequest hsr) throws IOException, InterruptedException {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java
index da04c9ec32b11..fe0b8a8d1ff35 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.yarn.server.resourcemanager;
+import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import java.io.DataOutputStream;
@@ -44,6 +45,7 @@
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
import org.apache.hadoop.yarn.api.records.DecommissionType;
+import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.conf.HAUtil;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
@@ -53,6 +55,9 @@
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshServiceAclsRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshSuperUserGroupsConfigurationRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshUserToGroupsMappingsRequest;
+import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsRequest;
+import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeRequest;
+import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration;
import org.junit.After;
@@ -60,6 +65,8 @@
import org.junit.Before;
import org.junit.Test;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
public class TestRMAdminService {
@@ -754,6 +761,67 @@ public void testRMInitialsWithFileSystemBasedConfigurationProvider()
}
}
+ @Test
+ public void testModifyLabelsOnNodesWithDistributedConfigurationDisabled()
+ throws IOException, YarnException {
+ // create RM and set it's ACTIVE
+ MockRM rm = new MockRM();
+ ((RMContextImpl) rm.getRMContext())
+ .setHAServiceState(HAServiceState.ACTIVE);
+ RMNodeLabelsManager labelMgr = rm.rmContext.getNodeLabelManager();
+
+ // by default, distributed configuration for node label is disabled, this
+ // should pass
+ labelMgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x", "y"));
+ rm.adminService.replaceLabelsOnNode(ReplaceLabelsOnNodeRequest
+ .newInstance(ImmutableMap.of(NodeId.newInstance("host", 0),
+ (Set<String>) ImmutableSet.of("x"))));
+ rm.close();
+ }
+
+ @Test(expected = YarnException.class)
+ public void testModifyLabelsOnNodesWithDistributedConfigurationEnabled()
+ throws IOException, YarnException {
+ // create RM and set it's ACTIVE, and set distributed node label
+ // configuration to true
+ MockRM rm = new MockRM();
+ rm.adminService.isDistributedNodeLabelConfiguration = true;
+
+ ((RMContextImpl) rm.getRMContext())
+ .setHAServiceState(HAServiceState.ACTIVE);
+ RMNodeLabelsManager labelMgr = rm.rmContext.getNodeLabelManager();
+
+ // by default, distributed configuration for node label is disabled, this
+ // should pass
+ labelMgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x", "y"));
+ rm.adminService.replaceLabelsOnNode(ReplaceLabelsOnNodeRequest
+ .newInstance(ImmutableMap.of(NodeId.newInstance("host", 0),
+ (Set<String>) ImmutableSet.of("x"))));
+ rm.close();
+ }
+
+ @Test
+ public void testRemoveClusterNodeLabelsWithDistributedConfigurationEnabled()
+ throws IOException, YarnException {
+ // create RM and set it's ACTIVE
+ MockRM rm = new MockRM();
+ ((RMContextImpl) rm.getRMContext())
+ .setHAServiceState(HAServiceState.ACTIVE);
+ RMNodeLabelsManager labelMgr = rm.rmContext.getNodeLabelManager();
+ rm.adminService.isDistributedNodeLabelConfiguration = true;
+
+ // by default, distributed configuration for node label is disabled, this
+ // should pass
+ labelMgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x", "y"));
+ rm.adminService
+ .removeFromClusterNodeLabels(RemoveFromClusterNodeLabelsRequest
+ .newInstance((Set<String>) ImmutableSet.of("x")));
+
+ Set<String> clusterNodeLabels = labelMgr.getClusterNodeLabelNames();
+ assertEquals(1,clusterNodeLabels.size());
+ rm.close();
+ }
+
private String writeConfigurationXML(Configuration conf, String confXMLName)
throws IOException {
DataOutputStream output = null;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/nodelabels/NullRMNodeLabelsManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/nodelabels/NullRMNodeLabelsManager.java
index 9548029d08769..2e21d261f615f 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/nodelabels/NullRMNodeLabelsManager.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/nodelabels/NullRMNodeLabelsManager.java
@@ -40,7 +40,8 @@ public void initNodeLabelStore(Configuration conf) {
this.store = new NodeLabelsStore(this) {
@Override
- public void recover() throws IOException {
+ public void recover(boolean ignoreNodeToLabelsMappings)
+ throws IOException {
// do nothing
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServices.java
index 298246ca301e2..e4614f8c9ec7e 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServices.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServices.java
@@ -51,6 +51,7 @@
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
import org.apache.hadoop.yarn.server.resourcemanager.RMContextImpl;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
+import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
@@ -623,6 +624,7 @@ public void testAppsRace() throws Exception {
null, null, null, null, null);
when(mockRM.getRMContext()).thenReturn(rmContext);
when(mockRM.getClientRMService()).thenReturn(mockClientSvc);
+ rmContext.setNodeLabelManager(mock(RMNodeLabelsManager.class));
RMWebServices webSvc = new RMWebServices(mockRM, new Configuration(),
mock(HttpServletResponse.class));
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesNodeLabels.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesNodeLabels.java
index 40c54a30a6a8d..2d5518dc03cf8 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesNodeLabels.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesNodeLabels.java
@@ -19,10 +19,10 @@
package org.apache.hadoop.yarn.server.resourcemanager.webapp;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
-import java.io.StringReader;
import java.io.StringWriter;
import javax.ws.rs.core.MediaType;
@@ -51,7 +51,6 @@
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.api.json.JSONJAXBContext;
import com.sun.jersey.api.json.JSONMarshaller;
-import com.sun.jersey.api.json.JSONUnmarshaller;
import com.sun.jersey.core.util.MultivaluedMapImpl;
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
import com.sun.jersey.test.framework.WebAppDescriptor;
@@ -66,13 +65,13 @@ public class TestRMWebServicesNodeLabels extends JerseyTestBase {
private String userName;
private String notUserName;
+ private RMWebServices rmWebService;
private Injector injector = Guice.createInjector(new ServletModule() {
+
@Override
protected void configureServlets() {
bind(JAXBContextResolver.class);
- bind(RMWebServices.class);
- bind(GenericExceptionHandler.class);
try {
userName = UserGroupInformation.getCurrentUser().getShortUserName();
} catch (IOException ioe) {
@@ -83,6 +82,9 @@ protected void configureServlets() {
conf = new YarnConfiguration();
conf.set(YarnConfiguration.YARN_ADMIN_ACL, userName);
rm = new MockRM(conf);
+ rmWebService = new RMWebServices(rm,conf);
+ bind(RMWebServices.class).toInstance(rmWebService);
+ bind(GenericExceptionHandler.class);
bind(ResourceManager.class).toInstance(rm);
filter("/*").through(
TestRMWebServicesAppsModification.TestRMCustomAuthFilter.class);
@@ -113,7 +115,6 @@ public void testNodeLabels() throws JSONException, Exception {
ClientResponse response;
JSONObject json;
JSONArray jarr;
- String responseString;
// Add a label
response =
@@ -386,6 +387,93 @@ public void testNodeLabels() throws JSONException, Exception {
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
String res = response.getEntity(String.class);
assertTrue(res.equals("null"));
+
+ // Following test cases are to test replace when distributed node label
+ // configuration is on
+ // Reset for testing : add cluster labels
+ response =
+ r.path("ws")
+ .path("v1")
+ .path("cluster")
+ .path("add-node-labels")
+ .queryParam("user.name", userName)
+ .accept(MediaType.APPLICATION_JSON)
+ .entity("{\"nodeLabels\":[\"x\",\"y\"]}",
+ MediaType.APPLICATION_JSON).post(ClientResponse.class);
+ // Reset for testing : Add labels to a node
+ response =
+ r.path("ws").path("v1").path("cluster").path("nodes").path("nid:0")
+ .path("replace-labels").queryParam("user.name", userName)
+ .accept(MediaType.APPLICATION_JSON)
+ .entity("{\"nodeLabels\": [\"y\"]}", MediaType.APPLICATION_JSON)
+ .post(ClientResponse.class);
+ LOG.info("posted node nodelabel");
+
+ //setting rmWebService for Distributed NodeLabel Configuration
+ rmWebService.isDistributedNodeLabelConfiguration = true;
+
+ // Case1 : Replace labels using node-to-labels
+ ntli = new NodeToLabelsInfo();
+ nli = new NodeLabelsInfo();
+ nli.getNodeLabels().add("x");
+ ntli.getNodeToLabels().put("nid:0", nli);
+ response =
+ r.path("ws")
+ .path("v1")
+ .path("cluster")
+ .path("replace-node-to-labels")
+ .queryParam("user.name", userName)
+ .accept(MediaType.APPLICATION_JSON)
+ .entity(toJson(ntli, NodeToLabelsInfo.class),
+ MediaType.APPLICATION_JSON).post(ClientResponse.class);
+
+ // Verify, using node-to-labels that previous operation has failed
+ response =
+ r.path("ws").path("v1").path("cluster").path("get-node-to-labels")
+ .queryParam("user.name", userName)
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ ntli = response.getEntity(NodeToLabelsInfo.class);
+ nli = ntli.getNodeToLabels().get("nid:0");
+ assertEquals(1, nli.getNodeLabels().size());
+ assertFalse(nli.getNodeLabels().contains("x"));
+
+ // Case2 : failure to Replace labels using replace-labels
+ response =
+ r.path("ws").path("v1").path("cluster").path("nodes").path("nid:0")
+ .path("replace-labels").queryParam("user.name", userName)
+ .accept(MediaType.APPLICATION_JSON)
+ .entity("{\"nodeLabels\": [\"x\"]}", MediaType.APPLICATION_JSON)
+ .post(ClientResponse.class);
+ LOG.info("posted node nodelabel");
+
+ // Verify, using node-to-labels that previous operation has failed
+ response =
+ r.path("ws").path("v1").path("cluster").path("get-node-to-labels")
+ .queryParam("user.name", userName)
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ ntli = response.getEntity(NodeToLabelsInfo.class);
+ nli = ntli.getNodeToLabels().get("nid:0");
+ assertEquals(1, nli.getNodeLabels().size());
+ assertFalse(nli.getNodeLabels().contains("x"));
+
+ // Case3 : Remove cluster label should be successfull
+ response =
+ r.path("ws").path("v1").path("cluster")
+ .path("remove-node-labels")
+ .queryParam("user.name", userName)
+ .accept(MediaType.APPLICATION_JSON)
+ .entity("{\"nodeLabels\":\"x\"}", MediaType.APPLICATION_JSON)
+ .post(ClientResponse.class);
+ // Verify
+ response =
+ r.path("ws").path("v1").path("cluster")
+ .path("get-node-labels").queryParam("user.name", userName)
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ json = response.getEntity(JSONObject.class);
+ assertEquals("y", json.getString("nodeLabels"));
}
@SuppressWarnings("rawtypes")
@@ -396,13 +484,4 @@ private String toJson(Object nsli, Class klass) throws Exception {
jm.marshallToJSON(nsli, sw);
return sw.toString();
}
-
- @SuppressWarnings({ "rawtypes", "unchecked" })
- private Object fromJson(String json, Class klass) throws Exception {
- StringReader sr = new StringReader(json);
- JSONJAXBContext ctx = new JSONJAXBContext(klass);
- JSONUnmarshaller jm = ctx.createJSONUnmarshaller();
- return jm.unmarshalFromJSON(sr, klass);
- }
-
}
|
a8014dec501f50c4a7bb91778ed832676aa6835c
|
kotlin
|
[KT-4124] Add support for simple nested classes--
|
a
|
https://github.com/JetBrains/kotlin
|
diff --git a/js/js.frontend/src/org/jetbrains/kotlin/js/resolve/JsPlatformConfigurator.kt b/js/js.frontend/src/org/jetbrains/kotlin/js/resolve/JsPlatformConfigurator.kt
index a5a04e94ee6ac..f0244a8377330 100644
--- a/js/js.frontend/src/org/jetbrains/kotlin/js/resolve/JsPlatformConfigurator.kt
+++ b/js/js.frontend/src/org/jetbrains/kotlin/js/resolve/JsPlatformConfigurator.kt
@@ -28,7 +28,7 @@ import org.jetbrains.kotlin.types.DynamicTypesAllowed
object JsPlatformConfigurator : PlatformConfigurator(
DynamicTypesAllowed(),
- additionalDeclarationCheckers = listOf(NativeInvokeChecker(), NativeGetterChecker(), NativeSetterChecker(), ClassDeclarationChecker()),
+ additionalDeclarationCheckers = listOf(NativeInvokeChecker(), NativeGetterChecker(), NativeSetterChecker()),
additionalCallCheckers = listOf(),
additionalTypeCheckers = listOf(),
additionalSymbolUsageValidators = listOf(),
diff --git a/js/js.frontend/src/org/jetbrains/kotlin/js/resolve/unsupportedFeatureCheckers.kt b/js/js.frontend/src/org/jetbrains/kotlin/js/resolve/unsupportedFeatureCheckers.kt
deleted file mode 100644
index 0185d5b6b6f78..0000000000000
--- a/js/js.frontend/src/org/jetbrains/kotlin/js/resolve/unsupportedFeatureCheckers.kt
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright 2010-2015 JetBrains s.r.o.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.jetbrains.kotlin.js.resolve
-
-import org.jetbrains.kotlin.descriptors.ClassDescriptor
-import org.jetbrains.kotlin.descriptors.DeclarationDescriptor
-import org.jetbrains.kotlin.descriptors.DeclarationDescriptorWithVisibility
-import org.jetbrains.kotlin.descriptors.Visibilities
-import org.jetbrains.kotlin.diagnostics.DiagnosticSink
-import org.jetbrains.kotlin.diagnostics.rendering.renderKind
-import org.jetbrains.kotlin.diagnostics.rendering.renderKindWithName
-import org.jetbrains.kotlin.js.resolve.diagnostics.ErrorsJs
-import org.jetbrains.kotlin.js.translate.utils.AnnotationsUtils
-import org.jetbrains.kotlin.psi.*
-import org.jetbrains.kotlin.resolve.BindingContext
-import org.jetbrains.kotlin.resolve.DeclarationChecker
-import org.jetbrains.kotlin.resolve.DescriptorUtils
-
-class ClassDeclarationChecker : DeclarationChecker {
- override fun check(
- declaration: KtDeclaration,
- descriptor: DeclarationDescriptor,
- diagnosticHolder: DiagnosticSink,
- bindingContext: BindingContext
- ) {
- if (declaration !is KtClassOrObject || declaration is KtObjectDeclaration || declaration is KtEnumEntry) return
-
- // hack to avoid to get diagnostics when compile kotlin builtins
- val fqNameUnsafe = DescriptorUtils.getFqName(descriptor)
- if (fqNameUnsafe.asString().startsWith("kotlin.")) return
-
- if (!DescriptorUtils.isTopLevelDeclaration(descriptor) && !AnnotationsUtils.isNativeObject(descriptor)) {
- diagnosticHolder.report(ErrorsJs.NON_TOPLEVEL_CLASS_DECLARATION.on(declaration, (descriptor as ClassDescriptor).renderKind()))
- }
- }
-}
diff --git a/js/js.tests/test/org/jetbrains/kotlin/js/test/semantics/NestedTypesTest.java b/js/js.tests/test/org/jetbrains/kotlin/js/test/semantics/NestedTypesTest.java
new file mode 100644
index 0000000000000..278f65a6adf79
--- /dev/null
+++ b/js/js.tests/test/org/jetbrains/kotlin/js/test/semantics/NestedTypesTest.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2010-2016 JetBrains s.r.o.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.jetbrains.kotlin.js.test.semantics;
+
+import org.jetbrains.kotlin.js.test.SingleFileTranslationTest;
+
+public class NestedTypesTest extends SingleFileTranslationTest {
+ public NestedTypesTest() {
+ super("nestedTypes/");
+ }
+
+ public void testNested() throws Exception {
+ checkFooBoxIsOk();
+ }
+
+ public void testInner() throws Exception {
+ checkFooBoxIsOk();
+ }
+}
diff --git a/js/js.translator/src/org/jetbrains/kotlin/js/translate/callTranslator/FunctionCallCases.kt b/js/js.translator/src/org/jetbrains/kotlin/js/translate/callTranslator/FunctionCallCases.kt
index ba4b41a2cf71b..209eefc5a6a13 100644
--- a/js/js.translator/src/org/jetbrains/kotlin/js/translate/callTranslator/FunctionCallCases.kt
+++ b/js/js.translator/src/org/jetbrains/kotlin/js/translate/callTranslator/FunctionCallCases.kt
@@ -23,6 +23,7 @@ import org.jetbrains.kotlin.descriptors.Visibilities
import org.jetbrains.kotlin.js.PredefinedAnnotation
import org.jetbrains.kotlin.js.translate.context.Namer
import org.jetbrains.kotlin.js.translate.context.TranslationContext
+import org.jetbrains.kotlin.js.translate.general.Translation
import org.jetbrains.kotlin.js.translate.operation.OperatorTable
import org.jetbrains.kotlin.js.translate.reference.CallArgumentTranslator
import org.jetbrains.kotlin.js.translate.utils.AnnotationsUtils
@@ -32,6 +33,7 @@ import org.jetbrains.kotlin.lexer.KtTokens
import org.jetbrains.kotlin.psi.*
import org.jetbrains.kotlin.resolve.calls.tasks.isDynamic
import org.jetbrains.kotlin.resolve.descriptorUtil.builtIns
+import org.jetbrains.kotlin.resolve.scopes.receivers.ExpressionReceiver
import org.jetbrains.kotlin.util.OperatorNameConventions
import java.util.ArrayList
@@ -209,13 +211,35 @@ object ConstructorCallCase : FunctionCallCase() {
val functionRef = if (isNative()) fqName else context.aliasOrValue(callableDescriptor) { fqName }
val constructorDescriptor = callableDescriptor as ConstructorDescriptor
- if(constructorDescriptor.isPrimary || AnnotationsUtils.isNativeObject(constructorDescriptor)) {
+ if (constructorDescriptor.isPrimary || AnnotationsUtils.isNativeObject(constructorDescriptor)) {
return JsNew(functionRef, argumentsInfo.translateArguments)
}
else {
return JsInvocation(functionRef, argumentsInfo.translateArguments)
}
}
+
+ override fun FunctionCallInfo.dispatchReceiver(): JsExpression {
+ val fqName = context.getQualifiedReference(callableDescriptor)
+ val functionRef = context.aliasOrValue(callableDescriptor) { fqName }
+
+ val constructorDescriptor = callableDescriptor as ConstructorDescriptor
+ val receiver = this.resolvedCall.dispatchReceiver
+ var allArguments = when (receiver) {
+ is ExpressionReceiver -> {
+ val jsReceiver = Translation.translateAsExpression(receiver.expression, context)
+ (sequenceOf(jsReceiver) + argumentsInfo.translateArguments).toList()
+ }
+ else -> argumentsInfo.translateArguments
+ }
+
+ if (constructorDescriptor.isPrimary || AnnotationsUtils.isNativeObject(constructorDescriptor)) {
+ return JsNew(functionRef, allArguments)
+ }
+ else {
+ return JsInvocation(functionRef, allArguments)
+ }
+ }
}
object SuperCallCase : FunctionCallCase() {
diff --git a/js/js.translator/src/org/jetbrains/kotlin/js/translate/context/StaticContext.java b/js/js.translator/src/org/jetbrains/kotlin/js/translate/context/StaticContext.java
index 0190dea9e8fdd..71a22e1a1ae86 100644
--- a/js/js.translator/src/org/jetbrains/kotlin/js/translate/context/StaticContext.java
+++ b/js/js.translator/src/org/jetbrains/kotlin/js/translate/context/StaticContext.java
@@ -541,6 +541,27 @@ public JsExpression apply(@NotNull DeclarationDescriptor descriptor) {
return null;
}
};
+ Rule<JsExpression> nestedClassesHaveContainerQualifier = new Rule<JsExpression>() {
+ @Nullable
+ @Override
+ public JsExpression apply(@NotNull DeclarationDescriptor descriptor) {
+ if (isNativeObject(descriptor) || isBuiltin(descriptor)) {
+ return null;
+ }
+ if (!(descriptor instanceof ClassDescriptor)) {
+ return null;
+ }
+ ClassDescriptor cls = (ClassDescriptor) descriptor;
+ if (cls.getKind() == ClassKind.ENUM_ENTRY || cls.getKind() == ClassKind.OBJECT) {
+ return null;
+ }
+ DeclarationDescriptor container = descriptor.getContainingDeclaration();
+ if (container == null) {
+ return null;
+ }
+ return getQualifiedReference(container);
+ }
+ };
addRule(libraryObjectsHaveKotlinQualifier);
addRule(constructorOrCompanionObjectHasTheSameQualifierAsTheClass);
@@ -548,6 +569,7 @@ public JsExpression apply(@NotNull DeclarationDescriptor descriptor) {
addRule(packageLevelDeclarationsHaveEnclosingPackagesNamesAsQualifier);
addRule(nativeObjectsHaveNativePartOfFullQualifier);
addRule(staticMembersHaveContainerQualifier);
+ addRule(nestedClassesHaveContainerQualifier);
}
}
diff --git a/js/js.translator/src/org/jetbrains/kotlin/js/translate/context/TranslationContext.java b/js/js.translator/src/org/jetbrains/kotlin/js/translate/context/TranslationContext.java
index 1c461208e665a..3b9dc1a9c9d20 100644
--- a/js/js.translator/src/org/jetbrains/kotlin/js/translate/context/TranslationContext.java
+++ b/js/js.translator/src/org/jetbrains/kotlin/js/translate/context/TranslationContext.java
@@ -21,10 +21,7 @@
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.kotlin.builtins.ReflectionTypes;
-import org.jetbrains.kotlin.descriptors.CallableDescriptor;
-import org.jetbrains.kotlin.descriptors.DeclarationDescriptor;
-import org.jetbrains.kotlin.descriptors.MemberDescriptor;
-import org.jetbrains.kotlin.descriptors.ReceiverParameterDescriptor;
+import org.jetbrains.kotlin.descriptors.*;
import org.jetbrains.kotlin.js.config.Config;
import org.jetbrains.kotlin.js.translate.intrinsic.Intrinsics;
import org.jetbrains.kotlin.js.translate.utils.TranslationUtils;
@@ -32,6 +29,7 @@
import org.jetbrains.kotlin.psi.KtExpression;
import org.jetbrains.kotlin.resolve.BindingContext;
import org.jetbrains.kotlin.resolve.BindingTrace;
+import org.jetbrains.kotlin.resolve.DescriptorUtils;
import java.util.HashMap;
import java.util.Map;
@@ -55,12 +53,16 @@ public class TranslationContext {
private final TranslationContext parent;
@Nullable
private final DefinitionPlace definitionPlace;
+ @Nullable
+ private final DeclarationDescriptor declarationDescriptor;
+ @Nullable
+ private final ClassDescriptor classDescriptor;
@NotNull
public static TranslationContext rootContext(@NotNull StaticContext staticContext, JsFunction rootFunction) {
DynamicContext rootDynamicContext = DynamicContext.rootContext(rootFunction.getScope(), rootFunction.getBody());
AliasingContext rootAliasingContext = AliasingContext.getCleanContext();
- return new TranslationContext(null, staticContext, rootDynamicContext, rootAliasingContext, null, null);
+ return new TranslationContext(null, staticContext, rootDynamicContext, rootAliasingContext, null, null, null);
}
private final Map<JsExpression, TemporaryConstVariable> expressionToTempConstVariableCache = new HashMap<JsExpression, TemporaryConstVariable>();
@@ -71,7 +73,8 @@ private TranslationContext(
@NotNull DynamicContext dynamicContext,
@NotNull AliasingContext aliasingContext,
@Nullable UsageTracker usageTracker,
- @Nullable DefinitionPlace definitionPlace
+ @Nullable DefinitionPlace definitionPlace,
+ @Nullable DeclarationDescriptor declarationDescriptor
) {
this.parent = parent;
this.dynamicContext = dynamicContext;
@@ -79,6 +82,14 @@ private TranslationContext(
this.aliasingContext = aliasingContext;
this.usageTracker = usageTracker;
this.definitionPlace = definitionPlace;
+ this.declarationDescriptor = declarationDescriptor;
+ if (declarationDescriptor instanceof ClassDescriptor
+ && !DescriptorUtils.isAnonymousObject(declarationDescriptor)
+ && !DescriptorUtils.isObject(declarationDescriptor)) {
+ this.classDescriptor = (ClassDescriptor) declarationDescriptor;
+ } else {
+ this.classDescriptor = parent != null ? parent.classDescriptor : null;
+ }
}
@Nullable
@@ -103,19 +114,22 @@ public TranslationContext newFunctionBody(@NotNull JsFunction fun, @Nullable Ali
aliasingContext = this.aliasingContext.inner();
}
- return new TranslationContext(this, this.staticContext, dynamicContext, aliasingContext, this.usageTracker, null);
+ return new TranslationContext(this, this.staticContext, dynamicContext, aliasingContext, this.usageTracker, null,
+ this.declarationDescriptor);
}
@NotNull
public TranslationContext newFunctionBodyWithUsageTracker(@NotNull JsFunction fun, @NotNull MemberDescriptor descriptor) {
DynamicContext dynamicContext = DynamicContext.newContext(fun.getScope(), fun.getBody());
UsageTracker usageTracker = new UsageTracker(this.usageTracker, descriptor, fun.getScope());
- return new TranslationContext(this, this.staticContext, dynamicContext, this.aliasingContext.inner(), usageTracker, this.definitionPlace);
+ return new TranslationContext(this, this.staticContext, dynamicContext, this.aliasingContext.inner(), usageTracker, this.definitionPlace,
+ this.declarationDescriptor);
}
@NotNull
public TranslationContext innerBlock(@NotNull JsBlock block) {
- return new TranslationContext(this, staticContext, dynamicContext.innerBlock(block), aliasingContext, usageTracker, null);
+ return new TranslationContext(this, staticContext, dynamicContext.innerBlock(block), aliasingContext, usageTracker, null,
+ this.declarationDescriptor);
}
@NotNull
@@ -126,12 +140,14 @@ public TranslationContext innerBlock() {
@NotNull
public TranslationContext newDeclaration(@NotNull DeclarationDescriptor descriptor, @Nullable DefinitionPlace place) {
DynamicContext dynamicContext = DynamicContext.newContext(getScopeForDescriptor(descriptor), getBlockForDescriptor(descriptor));
- return new TranslationContext(this, staticContext, dynamicContext, aliasingContext, usageTracker, place);
+ return new TranslationContext(this, staticContext, dynamicContext, aliasingContext, usageTracker, place,
+ descriptor);
}
@NotNull
private TranslationContext innerWithAliasingContext(AliasingContext aliasingContext) {
- return new TranslationContext(this, this.staticContext, this.dynamicContext, aliasingContext, this.usageTracker, null);
+ return new TranslationContext(this, this.staticContext, this.dynamicContext, aliasingContext, this.usageTracker, null,
+ this.declarationDescriptor);
}
@NotNull
@@ -319,7 +335,29 @@ public JsExpression getAliasForDescriptor(@NotNull DeclarationDescriptor descrip
@NotNull
public JsExpression getDispatchReceiver(@NotNull ReceiverParameterDescriptor descriptor) {
JsExpression alias = getAliasForDescriptor(descriptor);
- return alias == null ? JsLiteral.THIS : alias;
+ if (alias != null) {
+ return alias;
+ }
+ return getDispatchReceiverPath(getNearestClass(descriptor));
+ }
+
+ @NotNull
+ private JsExpression getDispatchReceiverPath(@Nullable ClassDescriptor cls) {
+ if (cls != null) {
+ JsExpression alias = getAliasForDescriptor(cls);
+ if (alias != null) {
+ return alias;
+ }
+ }
+ if (cls == classDescriptor || parent == null) {
+ return JsLiteral.THIS;
+ }
+ ClassDescriptor parentDescriptor = parent.classDescriptor;
+ if (classDescriptor != parentDescriptor) {
+ return new JsNameRef("$outer", parent.getDispatchReceiverPath(cls));
+ } else {
+ return parent.getDispatchReceiverPath(cls);
+ }
}
@NotNull
@@ -347,4 +385,17 @@ private JsNameRef captureIfNeedAndGetCapturedName(DeclarationDescriptor descript
return null;
}
+
+ private static ClassDescriptor getNearestClass(DeclarationDescriptor declaration) {
+ while (declaration != null) {
+ if (declaration instanceof ClassDescriptor) {
+ if (!DescriptorUtils.isAnonymousObject(declaration)
+ && !DescriptorUtils.isObject(declaration)) {
+ return (ClassDescriptor) declaration;
+ }
+ }
+ declaration = declaration.getContainingDeclaration();
+ }
+ return null;
+ }
}
diff --git a/js/js.translator/src/org/jetbrains/kotlin/js/translate/declaration/DeclarationBodyVisitor.java b/js/js.translator/src/org/jetbrains/kotlin/js/translate/declaration/DeclarationBodyVisitor.java
index 2280cde4ac63f..02cff3705cf7d 100644
--- a/js/js.translator/src/org/jetbrains/kotlin/js/translate/declaration/DeclarationBodyVisitor.java
+++ b/js/js.translator/src/org/jetbrains/kotlin/js/translate/declaration/DeclarationBodyVisitor.java
@@ -67,7 +67,8 @@ protected Void emptyResult(@NotNull TranslationContext context) {
}
@Override
- public Void visitClass(@NotNull KtClass expression, TranslationContext context) {
+ public Void visitClass(@NotNull KtClass declaration, TranslationContext context) {
+ staticResult.addAll(ClassTranslator.Companion.translate(declaration, context));
return null;
}
diff --git a/js/js.translator/src/org/jetbrains/kotlin/js/translate/declaration/FileDeclaration.kt b/js/js.translator/src/org/jetbrains/kotlin/js/translate/declaration/FileDeclaration.kt
index 67fb980081210..9d313936e61cd 100644
--- a/js/js.translator/src/org/jetbrains/kotlin/js/translate/declaration/FileDeclaration.kt
+++ b/js/js.translator/src/org/jetbrains/kotlin/js/translate/declaration/FileDeclaration.kt
@@ -50,8 +50,8 @@ class FileDeclarationVisitor(
}
}
- override fun visitClass(expression: KtClass, context: TranslationContext?): Void? {
- result.addAll(ClassTranslator.translate(expression, context!!))
+ override fun visitClass(declaration: KtClass, context: TranslationContext?): Void? {
+ result.addAll(ClassTranslator.translate(declaration, context!!))
return null
}
diff --git a/js/js.translator/src/org/jetbrains/kotlin/js/translate/initializer/ClassInitializerTranslator.java b/js/js.translator/src/org/jetbrains/kotlin/js/translate/initializer/ClassInitializerTranslator.java
index df9c13ef6a2bb..580e38a836d98 100644
--- a/js/js.translator/src/org/jetbrains/kotlin/js/translate/initializer/ClassInitializerTranslator.java
+++ b/js/js.translator/src/org/jetbrains/kotlin/js/translate/initializer/ClassInitializerTranslator.java
@@ -84,6 +84,7 @@ private static JsFunction createInitFunction(KtClassOrObject declaration, Transl
@NotNull
public JsFunction generateInitializeMethod(DelegationTranslator delegationTranslator) {
ClassDescriptor classDescriptor = getClassDescriptor(bindingContext(), classDeclaration);
+ addOuterClassReference(classDescriptor);
ConstructorDescriptor primaryConstructor = classDescriptor.getUnsubstitutedPrimaryConstructor();
if (primaryConstructor != null) {
@@ -113,6 +114,22 @@ public JsFunction generateInitializeMethod(DelegationTranslator delegationTransl
return initFunction;
}
+ private void addOuterClassReference(ClassDescriptor classDescriptor) {
+ DeclarationDescriptor container = classDescriptor.getContainingDeclaration();
+ if (!(container instanceof ClassDescriptor) || !classDescriptor.isInner()) {
+ return;
+ }
+
+ // TODO: avoid name clashing
+ JsName outerName = initFunction.getScope().declareName("$outer");
+ initFunction.getParameters().add(0, new JsParameter(outerName));
+
+ JsExpression target = new JsNameRef(outerName, JsLiteral.THIS);
+ JsExpression paramRef = new JsNameRef(outerName);
+ JsExpression assignment = new JsBinaryOperation(JsBinaryOperator.ASG, target, paramRef);
+ initFunction.getBody().getStatements().add(new JsExpressionStatement(assignment));
+ }
+
@NotNull
public JsExpression generateEnumEntryInstanceCreation(@NotNull KotlinType enumClassType) {
ResolvedCall<FunctionDescriptor> superCall = getSuperCall();
diff --git a/js/js.translator/testData/kotlin_lib_ecma5.js b/js/js.translator/testData/kotlin_lib_ecma5.js
index fd799ac5cd851..ce75f44f05197 100644
--- a/js/js.translator/testData/kotlin_lib_ecma5.js
+++ b/js/js.translator/testData/kotlin_lib_ecma5.js
@@ -91,19 +91,22 @@ var Kotlin = {};
}
}
- function computeMetadata(bases, properties) {
+ function computeMetadata(bases, properties, staticProperties) {
var metadata = {};
+ var p, property;
metadata.baseClasses = toArray(bases);
metadata.baseClass = getClass(metadata.baseClasses);
metadata.classIndex = Kotlin.newClassIndex();
metadata.functions = {};
metadata.properties = {};
+ metadata.types = {};
+ metadata.staticMembers = {};
if (!(properties == null)) {
- for (var p in properties) {
+ for (p in properties) {
if (properties.hasOwnProperty(p)) {
- var property = properties[p];
+ property = properties[p];
property.$classIndex$ = metadata.classIndex;
if (typeof property === "function") {
metadata.functions[p] = property;
@@ -114,6 +117,19 @@ var Kotlin = {};
}
}
}
+ if (typeof staticProperties !== 'undefined') {
+ for (p in staticProperties) {
+ if (!staticProperties.hasOwnProperty(p)) {
+ continue;
+ }
+ property = staticProperties[p];
+ if (typeof property === "function" && typeof property.type !== "undefined" && property.type === Kotlin.TYPE.INIT_FUN) {
+ metadata.types[p] = property;
+ } else {
+ metadata.staticMembers[p] = property;
+ }
+ }
+ }
applyExtension(metadata.functions, metadata.baseClasses, function (it) {
return it.$metadata$.functions
});
@@ -146,10 +162,10 @@ var Kotlin = {};
if (constructor == null) {
constructor = emptyFunction();
}
- copyProperties(constructor, staticProperties);
- var metadata = computeMetadata(bases, properties);
+ var metadata = computeMetadata(bases, properties, staticProperties);
metadata.type = Kotlin.TYPE.CLASS;
+ copyProperties(constructor, metadata.staticMembers);
var prototypeObj;
if (metadata.baseClass !== null) {
@@ -161,6 +177,14 @@ var Kotlin = {};
Object.defineProperties(prototypeObj, metadata.properties);
copyProperties(prototypeObj, metadata.functions);
prototypeObj.constructor = constructor;
+ for (var innerType in metadata.types) {
+ if (metadata.types.hasOwnProperty(innerType)) {
+ Object.defineProperty(constructor, innerType, {
+ get: metadata.types[innerType],
+ configurable: true
+ });
+ }
+ }
if (metadata.baseClass != null) {
constructor.baseInitializer = metadata.baseClass;
@@ -181,15 +205,25 @@ var Kotlin = {};
Kotlin.createTraitNow = function (bases, properties, staticProperties) {
var obj = function () {};
- copyProperties(obj, staticProperties);
- obj.$metadata$ = computeMetadata(bases, properties);
+ obj.$metadata$ = computeMetadata(bases, properties, staticProperties);
obj.$metadata$.type = Kotlin.TYPE.TRAIT;
+ copyProperties(obj, obj.$metadata$.staticMembers);
obj.prototype = {};
Object.defineProperties(obj.prototype, obj.$metadata$.properties);
copyProperties(obj.prototype, obj.$metadata$.functions);
Object.defineProperty(obj, "object", {get: class_object, configurable: true});
+
+ for (var innerType in obj.$metadata$.types) {
+ if (obj.$metadata$.types.hasOwnProperty(innerType)) {
+ Object.defineProperty(constructor, innerType, {
+ get: obj.$metadata$.types[innerType],
+ configurable: true
+ });
+ }
+ }
+
return obj;
};
diff --git a/js/js.translator/testData/nestedTypes/cases/inner.kt b/js/js.translator/testData/nestedTypes/cases/inner.kt
new file mode 100644
index 0000000000000..f6b40471caf0c
--- /dev/null
+++ b/js/js.translator/testData/nestedTypes/cases/inner.kt
@@ -0,0 +1,14 @@
+package foo
+
+open class A(val x: Int, val y: Int) {
+ inner class B(val z: Int) {
+ fun foo() = x + y + z
+ }
+}
+
+fun box(): String {
+ val a = A(2, 3)
+ val b = a.B(4)
+ return if (b.foo() == 9) "OK" else "failure"
+}
+
diff --git a/js/js.translator/testData/nestedTypes/cases/nested.kt b/js/js.translator/testData/nestedTypes/cases/nested.kt
new file mode 100644
index 0000000000000..62c75ccbb1364
--- /dev/null
+++ b/js/js.translator/testData/nestedTypes/cases/nested.kt
@@ -0,0 +1,10 @@
+package foo
+
+open class A(val x: Int) {
+ class B : A(5)
+}
+
+fun box(): String {
+ return if (A(7).x + A.B().x == 12) "OK" else "failed"
+}
+
|
062042ba83651b8495bc0330023ae7c7c47a38d4
|
hbase
|
HBASE-1722 Add support for exporting HBase- metrics via JMX--git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@813229 13f79535-47bb-0310-9956-ffa450edef68-
|
a
|
https://github.com/apache/hbase
|
diff --git a/CHANGES.txt b/CHANGES.txt
index 05466fa181fb..d81a17cbca78 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -34,6 +34,7 @@ Release 0.21.0 - Unreleased
HBASE-1800 Too many ZK connections
HBASE-1819 Update to 0.20.1 hadoop and zk 3.2.1
HBASE-1820 Update jruby from 1.2 to 1.3.1
+ HBASE-1722 Add support for exporting HBase metrics via JMX
OPTIMIZATIONS
HBASE-1765 Delay Result deserialization until asked for and permit
diff --git a/src/docs/src/documentation/content/xdocs/metrics.xml b/src/docs/src/documentation/content/xdocs/metrics.xml
index c8744f438de5..b01d7bd06cc7 100644
--- a/src/docs/src/documentation/content/xdocs/metrics.xml
+++ b/src/docs/src/documentation/content/xdocs/metrics.xml
@@ -63,5 +63,118 @@
in ganglia, the stats are aggregated rather than reported per instance.
</p>
</section>
+
+ <section>
+ <title> Using with JMX </title>
+ <p>
+ In addition to the standard output contexts supported by the Hadoop
+ metrics package, you can also export HBase metrics via Java Management
+ Extensions (JMX). This will allow viewing HBase stats in JConsole or
+ any other JMX client.
+ </p>
+ <section>
+ <title>Enable HBase stats collection</title>
+ <p>
+ To enable JMX support in HBase, first edit
+ <code>$HBASE_HOME/conf/hadoop-metrics.properties</code> to support
+ metrics refreshing. (If you've already configured
+ <code>hadoop-metrics.properties</code> for another output context,
+ you can skip this step).
+ </p>
+ <source>
+# Configuration of the "hbase" context for null
+hbase.class=org.apache.hadoop.metrics.spi.NullContextWithUpdateThread
+hbase.period=60
+
+# Configuration of the "jvm" context for null
+jvm.class=org.apache.hadoop.metrics.spi.NullContextWithUpdateThread
+jvm.period=60
+
+# Configuration of the "rpc" context for null
+rpc.class=org.apache.hadoop.metrics.spi.NullContextWithUpdateThread
+rpc.period=60
+ </source>
+ </section>
+ <section>
+ <title>Setup JMX remote access</title>
+ <p>
+ For remote access, you will need to configure JMX remote passwords
+ and access profiles. Create the files:
+ </p>
+ <dl>
+ <dt><code>$HBASE_HOME/conf/jmxremote.passwd</code> (set permissions
+ to 600)</dt>
+ <dd>
+ <source>
+monitorRole monitorpass
+controlRole controlpass
+ </source>
+ </dd>
+
+ <dt><code>$HBASE_HOME/conf/jmxremote.access</code></dt>
+ <dd>
+ <source>
+monitorRole readonly
+controlRole readwrite
+ </source>
+ </dd>
+ </dl>
+ </section>
+ <section>
+ <title>Configure JMX in HBase startup</title>
+ <p>
+ Finally, edit the <code>$HBASE_HOME/conf/hbase-env.sh</code> and
+ <code>$HBASE_HOME/bin/hbase</code> scripts for JMX support:
+ </p>
+ <dl>
+ <dt><code>$HBASE_HOME/conf/hbase-env.sh</code></dt>
+ <dd>
+ <p>Add the lines:</p>
+ <source>
+JMX_OPTS="-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.ssl=false"
+JMX_OPTS="$JMX_OPTS -Dcom.sun.management.jmxremote.password.file=$HBASE_HOME/conf/jmxremote.passwd"
+JMX_OPTS="$JMX_OPTS -Dcom.sun.management.jmxremote.access.file=$HBASE_HOME/conf/jmxremote.access"
+
+export HBASE_MASTER_OPTS="$JMX_OPTS -Dcom.sun.management.jmxremote.port=10101"
+export HBASE_REGIONSERVER_OPTS="$JMX_OPTS -Dcom.sun.management.jmxremote.port=10102"
+ </source>
+ </dd>
+ <dt><code>$HBASE_HOME/bin/hbase</code></dt>
+ <dd>
+ <p>Towards the end of the script, replace the lines:</p>
+ <source>
+ # figure out which class to run
+if [ "$COMMAND" = "shell" ] ; then
+ CLASS="org.jruby.Main ${HBASE_HOME}/bin/hirb.rb"
+elif [ "$COMMAND" = "master" ] ; then
+ CLASS='org.apache.hadoop.hbase.master.HMaster'
+elif [ "$COMMAND" = "regionserver" ] ; then
+ CLASS='org.apache.hadoop.hbase.regionserver.HRegionServer'
+ </source>
+ <p>
+ with the lines: (adding the "HBASE_OPTS=..." lines for "master" and
+ "regionserver" commands)
+ </p>
+ <source>
+ # figure out which class to run
+if [ "$COMMAND" = "shell" ] ; then
+ CLASS="org.jruby.Main ${HBASE_HOME}/bin/hirb.rb"
+elif [ "$COMMAND" = "master" ] ; then
+ CLASS='org.apache.hadoop.hbase.master.HMaster'
+ HBASE_OPTS="$HBASE_OPTS $HBASE_MASTER_OPTS"
+elif [ "$COMMAND" = "regionserver" ] ; then
+ CLASS='org.apache.hadoop.hbase.regionserver.HRegionServer'
+ HBASE_OPTS="$HBASE_OPTS $HBASE_REGIONSERVER_OPTS"
+ </source>
+ </dd>
+ </dl>
+ <p>
+ After restarting the processes you want to monitor, you should now be
+ able to run JConsole (included with the JDK since JDK 5.0) to view
+ the statistics via JMX. HBase MBeans are exported under the
+ <strong><code>hadoop</code></strong> domain in JMX.
+ </p>
+ </section>
+ </section>
</body>
</document>
diff --git a/src/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java b/src/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java
index fcfd13943815..950d02a43900 100644
--- a/src/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java
+++ b/src/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java
@@ -47,6 +47,7 @@
public class HBaseRpcMetrics implements Updater {
private MetricsRecord metricsRecord;
private static Log LOG = LogFactory.getLog(HBaseRpcMetrics.class);
+ private final HBaseRPCStatistics rpcStatistics;
public HBaseRpcMetrics(String hostName, String port) {
MetricsContext context = MetricsUtil.getContext("rpc");
@@ -58,6 +59,8 @@ public HBaseRpcMetrics(String hostName, String port) {
+ hostName + ", port=" + port);
context.registerUpdater(this);
+
+ rpcStatistics = new HBaseRPCStatistics(this.registry, hostName, port);
}
@@ -110,6 +113,7 @@ public void doUpdates(MetricsContext context) {
}
public void shutdown() {
- // Nothing to do
+ if (rpcStatistics != null)
+ rpcStatistics.shutdown();
}
}
\ No newline at end of file
diff --git a/src/java/org/apache/hadoop/hbase/master/metrics/MasterMetrics.java b/src/java/org/apache/hadoop/hbase/master/metrics/MasterMetrics.java
index 4d527b0a5b15..62d7cf3888c4 100644
--- a/src/java/org/apache/hadoop/hbase/master/metrics/MasterMetrics.java
+++ b/src/java/org/apache/hadoop/hbase/master/metrics/MasterMetrics.java
@@ -39,6 +39,7 @@ public class MasterMetrics implements Updater {
private final Log LOG = LogFactory.getLog(this.getClass());
private final MetricsRecord metricsRecord;
private final MetricsRegistry registry = new MetricsRegistry();
+ private final MasterStatistics masterStatistics;
/*
* Count of requests to the cluster since last call to metrics update
*/
@@ -52,11 +53,16 @@ public MasterMetrics() {
metricsRecord.setTag("Master", name);
context.registerUpdater(this);
JvmMetrics.init("Master", name);
+
+ // expose the MBean for metrics
+ masterStatistics = new MasterStatistics(this.registry);
+
LOG.info("Initialized");
}
public void shutdown() {
- // nought to do.
+ if (masterStatistics != null)
+ masterStatistics.shutdown();
}
/**
diff --git a/src/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java b/src/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java
index 52ab21ffffef..49e819724960 100644
--- a/src/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java
+++ b/src/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java
@@ -47,6 +47,7 @@ public class RegionServerMetrics implements Updater {
private long lastUpdate = System.currentTimeMillis();
private static final int MB = 1024*1024;
private MetricsRegistry registry = new MetricsRegistry();
+ private final RegionServerStatistics statistics;
public final MetricsTimeVaryingRate atomicIncrementTime =
new MetricsTimeVaryingRate("atomicIncrementTime", registry);
@@ -112,13 +113,18 @@ public RegionServerMetrics() {
context.registerUpdater(this);
// Add jvmmetrics.
JvmMetrics.init("RegionServer", name);
+
+ // export for JMX
+ statistics = new RegionServerStatistics(this.registry, name);
+
LOG.info("Initialized");
}
-
+
public void shutdown() {
- // nought to do.
+ if (statistics != null)
+ statistics.shutdown();
}
-
+
/**
* Since this object is a registered updater, this method will be called
* periodically, e.g. every 5 seconds.
@@ -141,7 +147,7 @@ public void doUpdates(MetricsContext unused) {
this.metricsRecord.update();
this.lastUpdate = System.currentTimeMillis();
}
-
+
public void resetAllMinMax() {
// Nothing to do
}
|
382bd64cdd015fd035182785291db8122791695e
|
camel
|
CAMEL-1369: Removed @MessageDriven as its- replaced with @Consume.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@749562 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/main/java/org/apache/camel/MessageDriven.java b/camel-core/src/main/java/org/apache/camel/MessageDriven.java
deleted file mode 100644
index 270d03a9d9d34..0000000000000
--- a/camel-core/src/main/java/org/apache/camel/MessageDriven.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.camel;
-
-import java.lang.annotation.Documented;
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- * Used to indicate a method on a POJO which is used as a {@link Consumer} of
- * {@link Exchange} instances to process {@link Message} instances.
- *
- * Either a <a href="http://camel.apache.org/uris.html">URI</a> for an
- * endpoint should be configured, or a name of an endpoint which refers to a
- * Spring bean name in your Spring ApplicationContext.
- *
- * @version $Revision$
- */
-@Retention(RetentionPolicy.RUNTIME)
-@Documented
-@Target({ElementType.FIELD, ElementType.METHOD, ElementType.CONSTRUCTOR })
-public @interface MessageDriven {
- String uri() default "";
-
- String name() default "";
-}
diff --git a/camel-core/src/main/java/org/apache/camel/impl/CamelPostProcessorHelper.java b/camel-core/src/main/java/org/apache/camel/impl/CamelPostProcessorHelper.java
index 28e21a383a575..aa18b9cc14db3 100644
--- a/camel-core/src/main/java/org/apache/camel/impl/CamelPostProcessorHelper.java
+++ b/camel-core/src/main/java/org/apache/camel/impl/CamelPostProcessorHelper.java
@@ -25,7 +25,6 @@
import org.apache.camel.Consume;
import org.apache.camel.Consumer;
import org.apache.camel.Endpoint;
-import org.apache.camel.MessageDriven;
import org.apache.camel.PollingConsumer;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
@@ -67,12 +66,6 @@ public void setCamelContext(CamelContext camelContext) {
}
public void consumerInjection(Method method, Object bean) {
- MessageDriven annotation = method.getAnnotation(MessageDriven.class);
- if (annotation != null) {
- LOG.info("Creating a consumer for: " + annotation);
- subscribeMethod(method, bean, annotation.uri(), annotation.name());
- }
-
Consume consume = method.getAnnotation(Consume.class);
if (consume != null) {
LOG.info("Creating a consumer for: " + consume);
@@ -87,8 +80,10 @@ public void subscribeMethod(Method method, Object bean, String endpointUri, Stri
if (endpoint != null) {
try {
Processor processor = createConsumerProcessor(bean, method, endpoint);
- LOG.info("Created processor: " + processor);
Consumer consumer = endpoint.createConsumer(processor);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Created processor: " + processor + " for consumer: " + consumer);
+ }
startService(consumer);
} catch (Exception e) {
throw ObjectHelper.wrapRuntimeCamelException(e);
@@ -145,7 +140,8 @@ public Object getInjectionValue(Class<?> type, String endpointUri, String endpoi
throw createProxyInstantiationRuntimeException(type, endpoint, e);
}
} else {
- throw new IllegalArgumentException("Invalid type: " + type.getName() + " which cannot be injected via @EndpointInject for " + endpoint);
+ throw new IllegalArgumentException("Invalid type: " + type.getName()
+ + " which cannot be injected via @EndpointInject/@Produce for: " + endpoint);
}
}
return null;
@@ -157,8 +153,7 @@ protected RuntimeException createProxyInstantiationRuntimeException(Class<?> typ
}
/**
- * Factory method to create a started {@link org.apache.camel.PollingConsumer} to be injected
- * into a POJO
+ * Factory method to create a started {@link org.apache.camel.PollingConsumer} to be injected into a POJO
*/
protected PollingConsumer createInjectionPollingConsumer(Endpoint endpoint) {
try {
@@ -171,8 +166,7 @@ protected PollingConsumer createInjectionPollingConsumer(Endpoint endpoint) {
}
/**
- * A Factory method to create a started {@link org.apache.camel.Producer} to be injected into
- * a POJO
+ * A Factory method to create a started {@link org.apache.camel.Producer} to be injected into a POJO
*/
protected Producer createInjectionProducer(Endpoint endpoint) {
try {
diff --git a/components/camel-guice/src/main/java/org/apache/camel/guice/CamelModule.java b/components/camel-guice/src/main/java/org/apache/camel/guice/CamelModule.java
index 193a40126e18c..63ffa9dc058e7 100644
--- a/components/camel-guice/src/main/java/org/apache/camel/guice/CamelModule.java
+++ b/components/camel-guice/src/main/java/org/apache/camel/guice/CamelModule.java
@@ -20,7 +20,6 @@
import com.google.inject.matcher.Matchers;
import org.apache.camel.CamelContext;
import org.apache.camel.Consume;
-import org.apache.camel.MessageDriven;
import org.apache.camel.Routes;
import org.apache.camel.guice.impl.ConsumerInjection;
import org.apache.camel.guice.impl.EndpointInjector;
@@ -58,8 +57,6 @@ protected void configure() {
ConsumerInjection consumerInjection = new ConsumerInjection();
requestInjection(consumerInjection);
-
- bindConstructorInterceptor(Matchers.methodAnnotatedWith(MessageDriven.class), consumerInjection);
bindConstructorInterceptor(Matchers.methodAnnotatedWith(Consume.class), consumerInjection);
}
diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/bind/MyBean.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/bind/MyBean.java
index 4fb9058c4784b..4008039b44618 100644
--- a/components/camel-jms/src/test/java/org/apache/camel/component/jms/bind/MyBean.java
+++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/bind/MyBean.java
@@ -18,9 +18,9 @@
import java.util.Map;
+import org.apache.camel.Consume;
import org.apache.camel.EndpointInject;
import org.apache.camel.Headers;
-import org.apache.camel.MessageDriven;
import org.apache.camel.ProducerTemplate;
/**
@@ -32,7 +32,7 @@ public class MyBean {
@EndpointInject(uri = "mock:result")
private ProducerTemplate producer;
- @MessageDriven(uri = "activemq:Test.BindingQueue")
+ @Consume(uri = "activemq:Test.BindingQueue")
public void myMethod(@Headers Map headers, String body) {
this.headers = headers;
this.body = body;
diff --git a/components/camel-spring/src/main/java/org/apache/camel/spring/CamelBeanPostProcessor.java b/components/camel-spring/src/main/java/org/apache/camel/spring/CamelBeanPostProcessor.java
index 241e261139578..59f421a419efa 100644
--- a/components/camel-spring/src/main/java/org/apache/camel/spring/CamelBeanPostProcessor.java
+++ b/components/camel-spring/src/main/java/org/apache/camel/spring/CamelBeanPostProcessor.java
@@ -43,11 +43,11 @@
/**
* A bean post processor which implements the <a href="http://camel.apache.org/bean-integration.html">Bean Integration</a>
- * features in Camel such as the <a href="http://camel.apache.org/bean-injection.html">Bean Injection</a> of objects like
+ * features in Camel. Features such as the <a href="http://camel.apache.org/bean-injection.html">Bean Injection</a> of objects like
* {@link Endpoint} and
* {@link org.apache.camel.ProducerTemplate} together with support for
* <a href="http://camel.apache.org/pojo-consuming.html">POJO Consuming</a> via the
- * {@link org.apache.camel.Consume} and {@link org.apache.camel.MessageDriven} annotations along with
+ * {@link org.apache.camel.Consume} annotation along with
* <a href="http://camel.apache.org/pojo-producing.html">POJO Producing</a> via the
* {@link org.apache.camel.Produce} annotation along with other annotations such as
* {@link org.apache.camel.RecipientList} for creating <a href="http://camel.apache.org/recipientlist-annotation.html">a Recipient List router via annotations</a>.
@@ -177,32 +177,6 @@ protected void setterInjection(Method method, Object bean, String endpointUri, S
}
}
-
- protected void consumerInjection(final Object bean) {
- org.springframework.util.ReflectionUtils.doWithMethods(bean.getClass(), new org.springframework.util.ReflectionUtils.MethodCallback() {
- @SuppressWarnings("unchecked")
- public void doWith(Method method) throws IllegalArgumentException, IllegalAccessException {
- /*
- * TODO support callbacks? if
- * (method.getAnnotation(Callback.class) != null) { try {
- * Expression e = ExpressionFactory.createExpression(
- * method.getAnnotation(Callback.class).condition());
- * JexlContext jc = JexlHelper.createContext();
- * jc.getVars().put("this", obj); Object r = e.evaluate(jc); if
- * (!(r instanceof Boolean)) { throw new
- * RuntimeException("Expression did not returned a boolean value
- * but: " + r); } Boolean oldVal =
- * req.getCallbacks().get(method); Boolean newVal = (Boolean) r;
- * if ((oldVal == null || !oldVal) && newVal) {
- * req.getCallbacks().put(method, newVal); method.invoke(obj,
- * new Object[0]); // TODO: handle return value and sent it as
- * the answer } } catch (Exception e) { throw new
- * RuntimeException("Unable to invoke callback", e); } }
- */
- }
- });
- }
-
public CamelPostProcessorHelper getPostProcessor() {
ObjectHelper.notNull(postProcessor, "postProcessor");
return postProcessor;
diff --git a/components/camel-spring/src/test/java/org/apache/camel/component/bean/MyBeanBindingConsumer.java b/components/camel-spring/src/test/java/org/apache/camel/component/bean/MyBeanBindingConsumer.java
index e9739e5e66061..a93911f7ffe98 100644
--- a/components/camel-spring/src/test/java/org/apache/camel/component/bean/MyBeanBindingConsumer.java
+++ b/components/camel-spring/src/test/java/org/apache/camel/component/bean/MyBeanBindingConsumer.java
@@ -16,8 +16,8 @@
*/
package org.apache.camel.component.bean;
+import org.apache.camel.Consume;
import org.apache.camel.Header;
-import org.apache.camel.MessageDriven;
import org.apache.camel.ProducerTemplate;
import org.apache.camel.language.Bean;
import org.apache.camel.language.Constant;
@@ -29,22 +29,22 @@ public class MyBeanBindingConsumer {
private ProducerTemplate template;
- @MessageDriven(uri = "direct:startBeanExpression")
+ @Consume(uri = "direct:startBeanExpression")
public void doSomethingBeanExpression(String payload, @Bean("myCounter") int count) {
template.sendBodyAndHeader("mock:result", "Bye " + payload, "count", count);
}
- @MessageDriven(uri = "direct:startConstantExpression")
+ @Consume(uri = "direct:startConstantExpression")
public void doSomethingConstantExpression(String payload, @Constant("5") int count) {
template.sendBodyAndHeader("mock:result", "Bye " + payload, "count", count);
}
- @MessageDriven(uri = "direct:startHeaderExpression")
+ @Consume(uri = "direct:startHeaderExpression")
public void doSomethingHeaderExpression(String payload, @Header("number") int count) {
template.sendBodyAndHeader("mock:result", "Bye " + payload, "count", count);
}
- @MessageDriven(uri = "direct:startMany")
+ @Consume(uri = "direct:startMany")
public void doSomethingManyExpression(String payload, @Constant("5") int count, @Header("number") int number) {
template.sendBodyAndHeader("mock:result", "Bye " + payload, "count", count * number);
}
diff --git a/components/camel-spring/src/test/java/org/apache/camel/component/bean/RouterBean.java b/components/camel-spring/src/test/java/org/apache/camel/component/bean/RouterBean.java
index 0272105a6c13e..1f41b930206c8 100644
--- a/components/camel-spring/src/test/java/org/apache/camel/component/bean/RouterBean.java
+++ b/components/camel-spring/src/test/java/org/apache/camel/component/bean/RouterBean.java
@@ -16,7 +16,7 @@
*/
package org.apache.camel.component.bean;
-import org.apache.camel.MessageDriven;
+import org.apache.camel.Consume;
import org.apache.camel.RecipientList;
/**
@@ -27,7 +27,7 @@
*/
public class RouterBean {
- @MessageDriven(uri = "direct:start")
+ @Consume(uri = "direct:start")
@RecipientList
public String[] route(String body) {
System.out.println("RouteBean called with body: " + body);
diff --git a/components/camel-spring/src/test/java/org/apache/camel/spring/example/MyConsumer.java b/components/camel-spring/src/test/java/org/apache/camel/spring/example/MyConsumer.java
index b6e101a92d8bf..103043609a220 100644
--- a/components/camel-spring/src/test/java/org/apache/camel/spring/example/MyConsumer.java
+++ b/components/camel-spring/src/test/java/org/apache/camel/spring/example/MyConsumer.java
@@ -16,8 +16,8 @@
*/
package org.apache.camel.spring.example;
+import org.apache.camel.Consume;
import org.apache.camel.EndpointInject;
-import org.apache.camel.MessageDriven;
import org.apache.camel.ProducerTemplate;
import org.apache.camel.util.ObjectHelper;
import org.apache.commons.logging.Log;
@@ -33,7 +33,7 @@ public class MyConsumer {
@EndpointInject(uri = "mock:result")
private ProducerTemplate destination;
- @MessageDriven(uri = "direct:start")
+ @Consume(uri = "direct:start")
public void doSomething(String body) {
ObjectHelper.notNull(destination, "destination");
|
64a2fde80a9f3aa71ac5c0e0b479c242bcecb561
|
kotlin
|
Extract Function: Fix signature update on dialog- opening--
|
c
|
https://github.com/JetBrains/kotlin
|
diff --git a/idea/src/org/jetbrains/jet/plugin/refactoring/extractFunction/ui/KotlinExtractFunctionDialog.java b/idea/src/org/jetbrains/jet/plugin/refactoring/extractFunction/ui/KotlinExtractFunctionDialog.java
index e5df89c5e737c..26b599ac49db5 100644
--- a/idea/src/org/jetbrains/jet/plugin/refactoring/extractFunction/ui/KotlinExtractFunctionDialog.java
+++ b/idea/src/org/jetbrains/jet/plugin/refactoring/extractFunction/ui/KotlinExtractFunctionDialog.java
@@ -63,7 +63,7 @@ public KotlinExtractFunctionDialog(Project project, ExtractionDescriptorWithConf
setModal(true);
setTitle(JetRefactoringBundle.message("extract.function"));
init();
- update(false);
+ update();
}
private void createUIComponents() {
@@ -94,10 +94,8 @@ private boolean checkNames() {
return true;
}
- private void update(boolean recreateDescriptor) {
- if (recreateDescriptor) {
- this.currentDescriptor = createDescriptor();
- }
+ private void update() {
+ this.currentDescriptor = createDescriptor();
setOKActionEnabled(checkNames());
signaturePreviewField.setText(
@@ -116,7 +114,7 @@ protected void init() {
new DocumentAdapter() {
@Override
public void documentChanged(DocumentEvent event) {
- update(true);
+ update();
}
}
);
@@ -130,7 +128,7 @@ public void documentChanged(DocumentEvent event) {
new ItemListener() {
@Override
public void itemStateChanged(@NotNull ItemEvent e) {
- update(true);
+ update();
}
}
);
@@ -138,7 +136,7 @@ public void itemStateChanged(@NotNull ItemEvent e) {
parameterTablePanel = new KotlinParameterTablePanel() {
@Override
protected void updateSignature() {
- KotlinExtractFunctionDialog.this.update(true);
+ KotlinExtractFunctionDialog.this.update();
}
@Override
|
69a1dbd753d1f060df38e47e6179cf05b6b62d63
|
ReactiveX-RxJava
|
add synchronous test of resubscribe after error--
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/test/java/rx/operators/OperatorRetryTest.java b/rxjava-core/src/test/java/rx/operators/OperatorRetryTest.java
index 645895817b..a564e71a10 100644
--- a/rxjava-core/src/test/java/rx/operators/OperatorRetryTest.java
+++ b/rxjava-core/src/test/java/rx/operators/OperatorRetryTest.java
@@ -18,6 +18,7 @@
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
@@ -30,6 +31,7 @@
import org.junit.Test;
import org.mockito.InOrder;
+import org.mockito.Mockito;
import rx.Observable;
import rx.Observable.OnSubscribe;
@@ -118,6 +120,50 @@ public void testInfiniteRetry() {
inOrder.verify(observer, times(1)).onCompleted();
inOrder.verifyNoMoreInteractions();
}
+
+ /**
+ * Checks in a simple and synchronous way that retry resubscribes
+ * after error. This test fails against 0.16.1-0.17.4, hangs on 0.17.5 and
+ * passes in 0.17.6 thanks to fix for issue #1027.
+ */
+ @SuppressWarnings("unchecked")
+ @Test
+ public void testRetrySubscribesAgainAfterError() {
+
+ // record emitted values with this action
+ Action1<Integer> record = mock(Action1.class);
+ InOrder inOrder = inOrder(record);
+
+ // always throw an exception with this action
+ Action1<Integer> throwException = mock(Action1.class);
+ doThrow(new RuntimeException()).when(throwException).call(Mockito.anyInt());
+
+ // create a retrying observable based on a PublishSubject
+ PublishSubject<Integer> subject = PublishSubject.create();
+ subject
+ // record item
+ .doOnNext(record)
+ // throw a RuntimeException
+ .doOnNext(throwException)
+ // retry on error
+ .retry()
+ // subscribe and ignore
+ .subscribe();
+
+ inOrder.verifyNoMoreInteractions();
+
+ subject.onNext(1);
+ inOrder.verify(record).call(1);
+
+ subject.onNext(2);
+ inOrder.verify(record).call(2);
+
+ subject.onNext(3);
+ inOrder.verify(record).call(3);
+
+ inOrder.verifyNoMoreInteractions();
+ }
+
public static class FuncWithErrors implements Observable.OnSubscribe<String> {
@@ -356,4 +402,5 @@ public void testTimeoutWithRetry() {
assertEquals("Start 6 threads, retry 5 then fail on 6", 6, so.efforts.get());
}
+
}
|
e33e1ef8d743671b8f43a6434a420253fc3d6fa6
|
kotlin
|
KT-8968 Special completion after "super."-- -KT-8968 Fixed-
|
c
|
https://github.com/JetBrains/kotlin
|
diff --git a/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/CompletionSession.kt b/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/CompletionSession.kt
index 94de219ce86ed..d0ebb525a1e8d 100644
--- a/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/CompletionSession.kt
+++ b/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/CompletionSession.kt
@@ -420,7 +420,7 @@ abstract class CompletionSession(protected val configuration: CompletionSessionC
protected fun createLookupElementFactory(contextVariablesProvider: ContextVariablesProvider): LookupElementFactory {
return LookupElementFactory(basicLookupElementFactory, resolutionFacade, receiverTypes,
- callTypeAndReceiver.callType, contextVariablesProvider)
+ callTypeAndReceiver.callType, inDescriptor, contextVariablesProvider)
}
private fun detectCallTypeAndReceiverTypes(): Pair<CallTypeAndReceiver<*, *>, Collection<KotlinType>?> {
diff --git a/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/CompletionUtils.kt b/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/CompletionUtils.kt
index 35a06ab506ad4..e973faf9ec186 100644
--- a/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/CompletionUtils.kt
+++ b/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/CompletionUtils.kt
@@ -16,7 +16,10 @@
package org.jetbrains.kotlin.idea.completion
-import com.intellij.codeInsight.completion.*
+import com.intellij.codeInsight.completion.CompletionProgressIndicator
+import com.intellij.codeInsight.completion.CompletionService
+import com.intellij.codeInsight.completion.InsertionContext
+import com.intellij.codeInsight.completion.PrefixMatcher
import com.intellij.codeInsight.lookup.*
import com.intellij.openapi.progress.ProcessCanceledException
import com.intellij.openapi.util.Key
@@ -62,6 +65,7 @@ tailrec fun <T : Any> LookupElement.getUserDataDeep(key: Key<T>): T? {
}
enum class ItemPriority {
+ SUPER_METHOD_WITH_ARGUMENTS,
DEFAULT,
IMPLEMENT,
OVERRIDE,
diff --git a/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/LookupElementFactory.kt b/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/LookupElementFactory.kt
index eefacdfa4d4d2..74cd26da33d9e 100644
--- a/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/LookupElementFactory.kt
+++ b/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/LookupElementFactory.kt
@@ -34,11 +34,13 @@ import org.jetbrains.kotlin.renderer.DescriptorRenderer
import org.jetbrains.kotlin.renderer.render
import org.jetbrains.kotlin.resolve.descriptorUtil.hasDefaultValue
import org.jetbrains.kotlin.resolve.descriptorUtil.isExtension
+import org.jetbrains.kotlin.resolve.descriptorUtil.parentsWithSelf
import org.jetbrains.kotlin.synthetic.SamAdapterExtensionFunctionDescriptor
import org.jetbrains.kotlin.synthetic.SyntheticJavaPropertyDescriptor
import org.jetbrains.kotlin.types.KotlinType
import org.jetbrains.kotlin.types.TypeUtils
import org.jetbrains.kotlin.types.typeUtil.isSubtypeOf
+import org.jetbrains.kotlin.utils.addIfNotNull
data /* we need copy() */
class LookupElementFactory(
@@ -46,6 +48,7 @@ class LookupElementFactory(
private val resolutionFacade: ResolutionFacade,
private val receiverTypes: Collection<KotlinType>?,
private val callType: CallType<*>?,
+ private val inDescriptor: DeclarationDescriptor,
private val contextVariablesProvider: ContextVariablesProvider
) {
companion object {
@@ -57,6 +60,14 @@ class LookupElementFactory(
val insertHandlerProvider = basicFactory.insertHandlerProvider
+ private val superFunctions: Set<FunctionDescriptor> by lazy {
+ inDescriptor.parentsWithSelf
+ .filterIsInstance<FunctionDescriptor>()
+ .toList()
+ .flatMap { it.overriddenDescriptors }
+ .toSet()
+ }
+
public fun createStandardLookupElementsForDescriptor(descriptor: DeclarationDescriptor, useReceiverTypes: Boolean): Collection<LookupElement> {
val result = SmartList<LookupElement>()
@@ -66,8 +77,13 @@ class LookupElementFactory(
result.add(lookupElement)
// add special item for function with one argument of function type with more than one parameter
- if (descriptor is FunctionDescriptor && isNormalCall && callType != CallType.SUPER_MEMBERS) {
- result.addSpecialFunctionCallElements(descriptor, useReceiverTypes)
+ if (descriptor is FunctionDescriptor && isNormalCall) {
+ if (callType != CallType.SUPER_MEMBERS) {
+ result.addSpecialFunctionCallElements(descriptor, useReceiverTypes)
+ }
+ else if (useReceiverTypes) {
+ result.addIfNotNull(createSuperFunctionCallWithArguments(descriptor))
+ }
}
return result
@@ -94,7 +110,7 @@ class LookupElementFactory(
val fuzzyParameterType = FuzzyType(parameterType, descriptor.typeParameters)
for ((variable, substitutor) in contextVariablesProvider.functionTypeVariables(fuzzyParameterType)) {
val substitutedDescriptor = descriptor.substitute(substitutor)
- add(createFunctionCallElementWithArgument(substitutedDescriptor, variable.name.render(), useReceiverTypes))
+ add(createFunctionCallElementWithArguments(substitutedDescriptor, variable.name.render(), useReceiverTypes))
}
}
}
@@ -137,21 +153,35 @@ class LookupElementFactory(
return lookupElement
}
- private fun createFunctionCallElementWithArgument(descriptor: FunctionDescriptor, argumentText: String, useReceiverTypes: Boolean): LookupElement {
+ private fun createSuperFunctionCallWithArguments(descriptor: FunctionDescriptor): LookupElement? {
+ if (descriptor.valueParameters.isEmpty()) return null
+ if (descriptor !in superFunctions) return null
+
+ val argumentText = descriptor.valueParameters.map {
+ (if (it.varargElementType != null) "*" else "") + it.name.render()
+ }.joinToString(", ")
+
+ val lookupElement = createFunctionCallElementWithArguments(descriptor, argumentText, true)
+ lookupElement.assignPriority(ItemPriority.SUPER_METHOD_WITH_ARGUMENTS)
+ lookupElement.putUserData(KotlinCompletionCharFilter.SUPPRESS_ITEM_SELECTION_BY_CHARS_ON_TYPING, Unit)
+ return lookupElement
+ }
+
+ private fun createFunctionCallElementWithArguments(descriptor: FunctionDescriptor, argumentText: String, useReceiverTypes: Boolean): LookupElement {
var lookupElement = createLookupElement(descriptor, useReceiverTypes)
val needTypeArguments = (insertHandlerProvider.insertHandler(descriptor) as KotlinFunctionInsertHandler.Normal).inputTypeArguments
- return FunctionCallWithArgumentLookupElement(lookupElement, descriptor, argumentText, needTypeArguments)
+ return FunctionCallWithArgumentsLookupElement(lookupElement, descriptor, argumentText, needTypeArguments)
}
- private inner class FunctionCallWithArgumentLookupElement(
+ private inner class FunctionCallWithArgumentsLookupElement(
originalLookupElement: LookupElement,
private val descriptor: FunctionDescriptor,
private val argumentText: String,
private val needTypeArguments: Boolean
) : LookupElementDecorator<LookupElement>(originalLookupElement) {
- override fun equals(other: Any?) = other is FunctionCallWithArgumentLookupElement && delegate == other.delegate && argumentText == other.argumentText
+ override fun equals(other: Any?) = other is FunctionCallWithArgumentsLookupElement && delegate == other.delegate && argumentText == other.argumentText
override fun hashCode() = delegate.hashCode() * 17 + argumentText.hashCode()
override fun renderElement(presentation: LookupElementPresentation) {
diff --git a/idea/idea-completion/testData/basic/common/SuperMembers3.kt b/idea/idea-completion/testData/basic/common/SuperMembers3.kt
new file mode 100644
index 0000000000000..3674108e631c0
--- /dev/null
+++ b/idea/idea-completion/testData/basic/common/SuperMembers3.kt
@@ -0,0 +1,18 @@
+open class B<T> {
+ open fun xxx_foo(p1: T, vararg p2: String) {}
+ open fun xxx_bar(p1: Int, p2: String) {}
+ open val xxx_val: Int = 0
+}
+
+class C : B<String>() {
+ override fun xxx_foo(p1: String, vararg p2: String) {
+ super.xxx_<caret>
+ }
+}
+
+// WITH_ORDER
+// EXIST: { lookupString: "xxx_foo", itemText: "xxx_foo", tailText: "(p1, *p2)", typeText: "Unit", attributes: "bold" }
+// EXIST: { lookupString: "xxx_foo", itemText: "xxx_foo", tailText: "(p1: String, vararg p2: String)", typeText: "Unit", attributes: "bold" }
+// EXIST: { lookupString: "xxx_val", itemText: "xxx_val", tailText: null, typeText: "Int", attributes: "bold" }
+// EXIST: { lookupString: "xxx_bar", itemText: "xxx_bar", tailText: "(p1: Int, p2: String)", typeText: "Unit", attributes: "bold" }
+// NOTHING_ELSE
diff --git a/idea/idea-completion/testData/basic/common/SuperMembers4.kt b/idea/idea-completion/testData/basic/common/SuperMembers4.kt
new file mode 100644
index 0000000000000..e408e9715d11e
--- /dev/null
+++ b/idea/idea-completion/testData/basic/common/SuperMembers4.kt
@@ -0,0 +1,17 @@
+open class B {
+ open fun foo() {}
+ open fun bar() {}
+}
+
+class C : B() {
+ override fun foo() {
+ super.<caret>
+ }
+}
+
+// EXIST: { lookupString: "foo", itemText: "foo", tailText: "()", typeText: "Unit", attributes: "bold" }
+// EXIST: { lookupString: "bar", itemText: "bar", tailText: "()", typeText: "Unit", attributes: "bold" }
+// EXIST: equals
+// EXIST: hashCode
+// EXIST: toString
+// NOTHING_ELSE
diff --git a/idea/idea-completion/testData/handlers/basic/SuperMethod.kt b/idea/idea-completion/testData/handlers/basic/SuperMethod.kt
index 90da7db5ec4f5..8b889210fe66f 100644
--- a/idea/idea-completion/testData/handlers/basic/SuperMethod.kt
+++ b/idea/idea-completion/testData/handlers/basic/SuperMethod.kt
@@ -8,4 +8,5 @@ class Derived : Base() {
}
}
-// ELEMENT: foo
\ No newline at end of file
+// ELEMENT: foo
+// TAIL_TEXT: "(p: Int)"
\ No newline at end of file
diff --git a/idea/idea-completion/testData/handlers/basic/SuperMethod.kt.after b/idea/idea-completion/testData/handlers/basic/SuperMethod.kt.after
index 33f8f33224cca..956b401a4044a 100644
--- a/idea/idea-completion/testData/handlers/basic/SuperMethod.kt.after
+++ b/idea/idea-completion/testData/handlers/basic/SuperMethod.kt.after
@@ -8,4 +8,5 @@ class Derived : Base() {
}
}
-// ELEMENT: foo
\ No newline at end of file
+// ELEMENT: foo
+// TAIL_TEXT: "(p: Int)"
\ No newline at end of file
diff --git a/idea/idea-completion/testData/handlers/basic/SuperMethod2.kt b/idea/idea-completion/testData/handlers/basic/SuperMethod2.kt
new file mode 100644
index 0000000000000..fb5dc4fc32086
--- /dev/null
+++ b/idea/idea-completion/testData/handlers/basic/SuperMethod2.kt
@@ -0,0 +1,12 @@
+open class Base {
+ open fun foo(p1: Int, vararg p2: Int){}
+}
+
+class Derived : Base() {
+ override fun foo(p1: Int, vararg p2: Int) {
+ super.<caret>
+ }
+}
+
+// ELEMENT: foo
+// TAIL_TEXT: "(p1, *p2)"
\ No newline at end of file
diff --git a/idea/idea-completion/testData/handlers/basic/SuperMethod2.kt.after b/idea/idea-completion/testData/handlers/basic/SuperMethod2.kt.after
new file mode 100644
index 0000000000000..88834bd676b4e
--- /dev/null
+++ b/idea/idea-completion/testData/handlers/basic/SuperMethod2.kt.after
@@ -0,0 +1,12 @@
+open class Base {
+ open fun foo(p1: Int, vararg p2: Int){}
+}
+
+class Derived : Base() {
+ override fun foo(p1: Int, vararg p2: Int) {
+ super.foo(p1, *p2)<caret>
+ }
+}
+
+// ELEMENT: foo
+// TAIL_TEXT: "(p1, *p2)"
\ No newline at end of file
diff --git a/idea/idea-completion/testData/smart/SuperMembers2.kt b/idea/idea-completion/testData/smart/SuperMembers2.kt
new file mode 100644
index 0000000000000..c4a1511e2ccbe
--- /dev/null
+++ b/idea/idea-completion/testData/smart/SuperMembers2.kt
@@ -0,0 +1,16 @@
+open class B {
+ open fun foo(p1: Int, p2: String): Int = 0
+ open fun bar(p1: Int, p2: String): Int = 0
+}
+
+class C : B() {
+ override fun foo(p1: Int, p2: String): Int {
+ return super.<caret>
+ }
+}
+
+// EXIST: { lookupString: "foo", itemText: "foo", tailText: "(p1, p2)", typeText: "Int", attributes: "bold" }
+// EXIST: { lookupString: "foo", itemText: "foo", tailText: "(p1: Int, p2: String)", typeText: "Int", attributes: "bold" }
+// EXIST: { lookupString: "bar", itemText: "bar", tailText: "(p1: Int, p2: String)", typeText: "Int", attributes: "bold" }
+// EXIST: hashCode
+// NOTHING_ELSE
diff --git a/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JSBasicCompletionTestGenerated.java b/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JSBasicCompletionTestGenerated.java
index 924fae8d84c36..eab90b8dcaa88 100644
--- a/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JSBasicCompletionTestGenerated.java
+++ b/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JSBasicCompletionTestGenerated.java
@@ -703,6 +703,18 @@ public void testSuperMembers2() throws Exception {
doTest(fileName);
}
+ @TestMetadata("SuperMembers3.kt")
+ public void testSuperMembers3() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/basic/common/SuperMembers3.kt");
+ doTest(fileName);
+ }
+
+ @TestMetadata("SuperMembers4.kt")
+ public void testSuperMembers4() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/basic/common/SuperMembers4.kt");
+ doTest(fileName);
+ }
+
@TestMetadata("TopLevelClassCompletionInQualifiedCall.kt")
public void testTopLevelClassCompletionInQualifiedCall() throws Exception {
String fileName = KotlinTestUtils.navigationMetadata("idea/idea-completion/testData/basic/common/TopLevelClassCompletionInQualifiedCall.kt");
diff --git a/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JvmBasicCompletionTestGenerated.java b/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JvmBasicCompletionTestGenerated.java
index a292a6d9cc414..d417874819849 100644
--- a/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JvmBasicCompletionTestGenerated.java
+++ b/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JvmBasicCompletionTestGenerated.java
@@ -703,6 +703,18 @@ public void testSuperMembers2() throws Exception {
doTest(fileName);
}
+ @TestMetadata("SuperMembers3.kt")
+ public void testSuperMembers3() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/basic/common/SuperMembers3.kt");
+ doTest(fileName);
+ }
+
+ @TestMetadata("SuperMembers4.kt")
+ public void testSuperMembers4() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/basic/common/SuperMembers4.kt");
+ doTest(fileName);
+ }
+
@TestMetadata("TopLevelClassCompletionInQualifiedCall.kt")
public void testTopLevelClassCompletionInQualifiedCall() throws Exception {
String fileName = KotlinTestUtils.navigationMetadata("idea/idea-completion/testData/basic/common/TopLevelClassCompletionInQualifiedCall.kt");
diff --git a/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JvmSmartCompletionTestGenerated.java b/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JvmSmartCompletionTestGenerated.java
index a7013b9bdecc6..10e38ae578edc 100644
--- a/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JvmSmartCompletionTestGenerated.java
+++ b/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JvmSmartCompletionTestGenerated.java
@@ -485,6 +485,12 @@ public void testSuperMembers() throws Exception {
doTest(fileName);
}
+ @TestMetadata("SuperMembers2.kt")
+ public void testSuperMembers2() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/smart/SuperMembers2.kt");
+ doTest(fileName);
+ }
+
@TestMetadata("ThisConstructorArgument.kt")
public void testThisConstructorArgument() throws Exception {
String fileName = KotlinTestUtils.navigationMetadata("idea/idea-completion/testData/smart/ThisConstructorArgument.kt");
diff --git a/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/handlers/BasicCompletionHandlerTestGenerated.java b/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/handlers/BasicCompletionHandlerTestGenerated.java
index f2620a7cb7df3..28e65ea373a19 100644
--- a/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/handlers/BasicCompletionHandlerTestGenerated.java
+++ b/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/handlers/BasicCompletionHandlerTestGenerated.java
@@ -149,6 +149,12 @@ public void testSuperMethod() throws Exception {
doTest(fileName);
}
+ @TestMetadata("SuperMethod2.kt")
+ public void testSuperMethod2() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/handlers/basic/SuperMethod2.kt");
+ doTest(fileName);
+ }
+
@TestMetadata("SuperTypeArg.kt")
public void testSuperTypeArg() throws Exception {
String fileName = KotlinTestUtils.navigationMetadata("idea/idea-completion/testData/handlers/basic/SuperTypeArg.kt");
|
3f0c9e5fe36d201de021d989b23ebaeb2d9a027b
|
hadoop
|
YARN-3379. Fixed missing data in localityTable and- ResourceRequests table in RM WebUI. Contributed by Xuan Gong (cherry picked- from commit 4e886eb9cbd2dcb128bbfd17309c734083093a4c)--
|
c
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 66079e22c496d..5f9e8dd7ec3bc 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -751,6 +751,9 @@ Release 2.7.0 - UNRELEASED
YARN-3349. Treat all exceptions as failure in
TestFSRMStateStore#testFSRMStateStoreClientRetry. (Zhihai Xu via ozawa)
+ YARN-3379. Fixed missing data in localityTable and ResourceRequests table
+ in RM WebUI. (Xuan Gong via jianhe)
+
Release 2.6.0 - 2014-11-18
INCOMPATIBLE CHANGES
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppAttemptBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppAttemptBlock.java
index eeccf0fc0e30e..dca39d6d3c02c 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppAttemptBlock.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppAttemptBlock.java
@@ -19,12 +19,6 @@
import static org.apache.hadoop.yarn.util.StringHelper.join;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.APPLICATION_ATTEMPT_ID;
-import static org.apache.hadoop.yarn.webapp.YarnWebParams.WEB_UI_TYPE;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI._EVEN;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI._INFO_WRAP;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI._ODD;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI._TH;
-
import java.security.PrivilegedExceptionAction;
import java.util.Collection;
@@ -43,20 +37,18 @@
import org.apache.hadoop.yarn.server.webapp.dao.AppAttemptInfo;
import org.apache.hadoop.yarn.server.webapp.dao.ContainerInfo;
import org.apache.hadoop.yarn.util.ConverterUtils;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import org.apache.hadoop.yarn.webapp.view.InfoBlock;
-
import com.google.inject.Inject;
public class AppAttemptBlock extends HtmlBlock {
private static final Log LOG = LogFactory.getLog(AppAttemptBlock.class);
protected ApplicationBaseProtocol appBaseProt;
+ protected ApplicationAttemptId appAttemptId = null;
@Inject
public AppAttemptBlock(ApplicationBaseProtocol appBaseProt, ViewContext ctx) {
@@ -66,14 +58,12 @@ public AppAttemptBlock(ApplicationBaseProtocol appBaseProt, ViewContext ctx) {
@Override
protected void render(Block html) {
- String webUiType = $(WEB_UI_TYPE);
String attemptid = $(APPLICATION_ATTEMPT_ID);
if (attemptid.isEmpty()) {
puts("Bad request: requires application attempt ID");
return;
}
- ApplicationAttemptId appAttemptId = null;
try {
appAttemptId = ConverterUtils.toApplicationAttemptId(attemptid);
} catch (IllegalArgumentException e) {
@@ -183,17 +173,7 @@ public Collection<ContainerReport> run() throws Exception {
return;
}
- // TODO need to render applicationHeadRoom value from
- // ApplicationAttemptMetrics after YARN-3284
- if (webUiType.equals(YarnWebParams.RM_WEB_UI)) {
- if (!isApplicationInFinalState(appAttempt.getAppAttemptState())) {
- DIV<Hamlet> pdiv = html._(InfoBlock.class).div(_INFO_WRAP);
- info("Application Attempt Overview").clear();
- info("Application Attempt Metrics")._(
- "Application Attempt Headroom : ", 0);
- pdiv._();
- }
- }
+ createAttemptHeadRoomTable(html);
html._(InfoBlock.class);
// Container Table
@@ -236,45 +216,6 @@ public Collection<ContainerReport> run() throws Exception {
._("var containersTableData=" + containersTableData)._();
tbody._()._();
-
- if (webUiType.equals(YarnWebParams.RM_WEB_UI)) {
- createContainerLocalityTable(html); // TODO:YARN-3284
- }
- }
-
- //TODO: YARN-3284
- //The containerLocality metrics will be exposed from AttemptReport
- private void createContainerLocalityTable(Block html) {
- int totalAllocatedContainers = 0; //TODO: YARN-3284
- int[][] localityStatistics = new int[0][0];//TODO:YARN-3284
- DIV<Hamlet> div = html.div(_INFO_WRAP);
- TABLE<DIV<Hamlet>> table =
- div.h3(
- "Total Allocated Containers: "
- + totalAllocatedContainers).h3("Each table cell"
- + " represents the number of NodeLocal/RackLocal/OffSwitch containers"
- + " satisfied by NodeLocal/RackLocal/OffSwitch resource requests.").table(
- "#containerLocality");
- table.
- tr().
- th(_TH, "").
- th(_TH, "Node Local Request").
- th(_TH, "Rack Local Request").
- th(_TH, "Off Switch Request").
- _();
-
- String[] containersType =
- { "Num Node Local Containers (satisfied by)", "Num Rack Local Containers (satisfied by)",
- "Num Off Switch Containers (satisfied by)" };
- boolean odd = false;
- for (int i = 0; i < localityStatistics.length; i++) {
- table.tr((odd = !odd) ? _ODD : _EVEN).td(containersType[i])
- .td(String.valueOf(localityStatistics[i][0]))
- .td(i == 0 ? "" : String.valueOf(localityStatistics[i][1]))
- .td(i <= 1 ? "" : String.valueOf(localityStatistics[i][2]))._();
- }
- table._();
- div._();
}
private boolean hasAMContainer(ContainerId containerId,
@@ -286,10 +227,8 @@ private boolean hasAMContainer(ContainerId containerId,
}
return false;
}
-
- private boolean isApplicationInFinalState(YarnApplicationAttemptState state) {
- return state == YarnApplicationAttemptState.FINISHED
- || state == YarnApplicationAttemptState.FAILED
- || state == YarnApplicationAttemptState.KILLED;
+
+ protected void createAttemptHeadRoomTable(Block html) {
+
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppBlock.java
index 5fc5fa06ac0fc..abb6b9cebf381 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppBlock.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppBlock.java
@@ -21,11 +21,8 @@
import static org.apache.hadoop.yarn.util.StringHelper.join;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.APPLICATION_ID;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.WEB_UI_TYPE;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI._INFO_WRAP;
import java.security.PrivilegedExceptionAction;
import java.util.Collection;
-import java.util.List;
-
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -39,12 +36,9 @@
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerReport;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
-import org.apache.hadoop.yarn.api.records.Resource;
-import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.ContainerNotFoundException;
@@ -53,10 +47,8 @@
import org.apache.hadoop.yarn.server.webapp.dao.ContainerInfo;
import org.apache.hadoop.yarn.util.Apps;
import org.apache.hadoop.yarn.util.Times;
-import org.apache.hadoop.yarn.util.resource.Resources;
import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
@@ -69,9 +61,11 @@ public class AppBlock extends HtmlBlock {
private static final Log LOG = LogFactory.getLog(AppBlock.class);
protected ApplicationBaseProtocol appBaseProt;
protected Configuration conf;
+ protected ApplicationId appID = null;
@Inject
- AppBlock(ApplicationBaseProtocol appBaseProt, ViewContext ctx, Configuration conf) {
+ protected AppBlock(ApplicationBaseProtocol appBaseProt, ViewContext ctx,
+ Configuration conf) {
super(ctx);
this.appBaseProt = appBaseProt;
this.conf = conf;
@@ -86,7 +80,6 @@ protected void render(Block html) {
return;
}
- ApplicationId appID = null;
try {
appID = Apps.toAppID(aid);
} catch (Exception e) {
@@ -213,31 +206,7 @@ public Collection<ApplicationAttemptReport> run() throws Exception {
return;
}
- //TODO:YARN-3284
- //The preemption metrics will be exposed from ApplicationReport
- // and ApplicationAttemptReport
- ApplicationResourceUsageReport usageReport =
- appReport.getApplicationResourceUsageReport();
- DIV<Hamlet> pdiv = html.
- _(InfoBlock.class).
- div(_INFO_WRAP);
- info("Application Overview").clear();
- info("Application Metrics")
- ._("Total Resource Preempted:",
- Resources.none()) // TODO: YARN-3284
- ._("Total Number of Non-AM Containers Preempted:",
- String.valueOf(0)) // TODO: YARN-3284
- ._("Total Number of AM Containers Preempted:",
- String.valueOf(0)) // TODO: YARN-3284
- ._("Resource Preempted from Current Attempt:",
- Resources.none()) // TODO: YARN-3284
- ._("Number of Non-AM Containers Preempted from Current Attempt:",
- 0) // TODO: YARN-3284
- ._("Aggregate Resource Allocation:",
- String.format("%d MB-seconds, %d vcore-seconds", usageReport == null
- ? 0 : usageReport.getMemorySeconds(), usageReport == null ? 0
- : usageReport.getVcoreSeconds()));
- pdiv._();
+ createApplicationMetricsTable(html);
html._(InfoBlock.class);
@@ -319,49 +288,6 @@ public ContainerReport run() throws Exception {
._("var attemptsTableData=" + attemptsTableData)._();
tbody._()._();
-
- if (webUiType != null && webUiType.equals(YarnWebParams.RM_WEB_UI)) {
- createResourceRequestsTable(html, null); // TODO:YARN-3284
- }
- }
-
- //TODO:YARN-3284
- //The resource requests metrics will be exposed from attemptReport
- private void createResourceRequestsTable(Block html, List<ResourceRequest> resouceRequests) {
- TBODY<TABLE<Hamlet>> tbody =
- html.table("#ResourceRequests").thead().tr()
- .th(".priority", "Priority")
- .th(".resourceName", "ResourceName")
- .th(".totalResource", "Capability")
- .th(".numContainers", "NumContainers")
- .th(".relaxLocality", "RelaxLocality")
- .th(".nodeLabelExpression", "NodeLabelExpression")._()._().tbody();
-
- Resource totalResource = Resource.newInstance(0, 0);
- if (resouceRequests != null) {
- for (ResourceRequest request : resouceRequests) {
- if (request.getNumContainers() == 0) {
- continue;
- }
-
- tbody.tr()
- .td(String.valueOf(request.getPriority()))
- .td(request.getResourceName())
- .td(String.valueOf(request.getCapability()))
- .td(String.valueOf(request.getNumContainers()))
- .td(String.valueOf(request.getRelaxLocality()))
- .td(request.getNodeLabelExpression() == null ? "N/A" : request
- .getNodeLabelExpression())._();
- if (request.getResourceName().equals(ResourceRequest.ANY)) {
- Resources.addTo(totalResource,
- Resources.multiply(request.getCapability(),
- request.getNumContainers()));
- }
- }
- }
- html.div().$class("totalResourceRequests")
- .h3("Total Outstanding Resource Requests: " + totalResource)._();
- tbody._()._();
}
private String clarifyAppState(YarnApplicationState state) {
@@ -389,4 +315,9 @@ private String clairfyAppFinalStatus(FinalApplicationStatus status) {
}
return status.toString();
}
+
+ // The preemption metrics only need to be shown in RM WebUI
+ protected void createApplicationMetricsTable(Block html) {
+
+ }
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppAttemptPage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppAttemptPage.java
index 6e4cfade9ff05..df5fb9e8a5984 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppAttemptPage.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppAttemptPage.java
@@ -23,7 +23,6 @@
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
-import org.apache.hadoop.yarn.server.webapp.AppAttemptBlock;
import org.apache.hadoop.yarn.server.webapp.WebPageUtils;
import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.YarnWebParams;
@@ -51,7 +50,7 @@ protected void preHead(Page.HTML<_> html) {
@Override
protected Class<? extends SubView> content() {
- return AppAttemptBlock.class;
+ return RMAppAttemptBlock.class;
}
}
\ No newline at end of file
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppPage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppPage.java
index 9f9b7c969d2b9..0c5516a304ade 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppPage.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppPage.java
@@ -23,7 +23,6 @@
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
-import org.apache.hadoop.yarn.server.webapp.AppBlock;
import org.apache.hadoop.yarn.server.webapp.WebPageUtils;
import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.YarnWebParams;
@@ -50,6 +49,6 @@ protected void preHead(Page.HTML<_> html) {
@Override
protected Class<? extends SubView> content() {
- return AppBlock.class;
+ return RMAppBlock.class;
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppAttemptBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppAttemptBlock.java
new file mode 100644
index 0000000000000..419c0ce67539e
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppAttemptBlock.java
@@ -0,0 +1,177 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.resourcemanager.webapp;
+
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI._EVEN;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI._INFO_WRAP;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI._ODD;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI._TH;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hadoop.yarn.api.records.ResourceRequest;
+import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;
+import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
+import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
+import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
+import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptMetrics;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppInfo;
+import org.apache.hadoop.yarn.server.webapp.AppAttemptBlock;
+import org.apache.hadoop.yarn.util.resource.Resources;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY;
+import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
+import org.apache.hadoop.yarn.webapp.view.InfoBlock;
+import com.google.inject.Inject;
+
+public class RMAppAttemptBlock extends AppAttemptBlock{
+
+ private final ResourceManager rm;
+ protected Configuration conf;
+
+ @Inject
+ RMAppAttemptBlock(ViewContext ctx, ResourceManager rm, Configuration conf) {
+ super(rm.getClientRMService(), ctx);
+ this.rm = rm;
+ this.conf = conf;
+ }
+
+ @Override
+ protected void render(Block html) {
+ super.render(html);
+ createContainerLocalityTable(html);
+ createResourceRequestsTable(html);
+ }
+
+ private void createResourceRequestsTable(Block html) {
+ AppInfo app =
+ new AppInfo(rm, rm.getRMContext().getRMApps()
+ .get(this.appAttemptId.getApplicationId()), true,
+ WebAppUtils.getHttpSchemePrefix(conf));
+ TBODY<TABLE<Hamlet>> tbody =
+ html.table("#ResourceRequests").thead().tr()
+ .th(".priority", "Priority")
+ .th(".resourceName", "ResourceName")
+ .th(".totalResource", "Capability")
+ .th(".numContainers", "NumContainers")
+ .th(".relaxLocality", "RelaxLocality")
+ .th(".nodeLabelExpression", "NodeLabelExpression")._()._().tbody();
+
+ Resource totalResource = Resource.newInstance(0, 0);
+ if (app.getResourceRequests() != null) {
+ for (ResourceRequest request : app.getResourceRequests()) {
+ if (request.getNumContainers() == 0) {
+ continue;
+ }
+
+ tbody.tr()
+ .td(String.valueOf(request.getPriority()))
+ .td(request.getResourceName())
+ .td(String.valueOf(request.getCapability()))
+ .td(String.valueOf(request.getNumContainers()))
+ .td(String.valueOf(request.getRelaxLocality()))
+ .td(request.getNodeLabelExpression() == null ? "N/A" : request
+ .getNodeLabelExpression())._();
+ if (request.getResourceName().equals(ResourceRequest.ANY)) {
+ Resources.addTo(totalResource,
+ Resources.multiply(request.getCapability(),
+ request.getNumContainers()));
+ }
+ }
+ }
+ html.div().$class("totalResourceRequests")
+ .h3("Total Outstanding Resource Requests: " + totalResource)._();
+ tbody._()._();
+ }
+
+ private void createContainerLocalityTable(Block html) {
+ RMAppAttemptMetrics attemptMetrics = null;
+ RMAppAttempt attempt = getRMAppAttempt();
+ if (attempt != null) {
+ attemptMetrics = attempt.getRMAppAttemptMetrics();
+ }
+
+ if (attemptMetrics == null) {
+ return;
+ }
+
+ DIV<Hamlet> div = html.div(_INFO_WRAP);
+ TABLE<DIV<Hamlet>> table =
+ div.h3(
+ "Total Allocated Containers: "
+ + attemptMetrics.getTotalAllocatedContainers()).h3("Each table cell"
+ + " represents the number of NodeLocal/RackLocal/OffSwitch containers"
+ + " satisfied by NodeLocal/RackLocal/OffSwitch resource requests.").table(
+ "#containerLocality");
+ table.
+ tr().
+ th(_TH, "").
+ th(_TH, "Node Local Request").
+ th(_TH, "Rack Local Request").
+ th(_TH, "Off Switch Request").
+ _();
+
+ String[] containersType =
+ { "Num Node Local Containers (satisfied by)", "Num Rack Local Containers (satisfied by)",
+ "Num Off Switch Containers (satisfied by)" };
+ boolean odd = false;
+ for (int i = 0; i < attemptMetrics.getLocalityStatistics().length; i++) {
+ table.tr((odd = !odd) ? _ODD : _EVEN).td(containersType[i])
+ .td(String.valueOf(attemptMetrics.getLocalityStatistics()[i][0]))
+ .td(i == 0 ? "" : String.valueOf(attemptMetrics.getLocalityStatistics()[i][1]))
+ .td(i <= 1 ? "" : String.valueOf(attemptMetrics.getLocalityStatistics()[i][2]))._();
+ }
+ table._();
+ div._();
+ }
+
+ private boolean isApplicationInFinalState(YarnApplicationAttemptState state) {
+ return state == YarnApplicationAttemptState.FINISHED
+ || state == YarnApplicationAttemptState.FAILED
+ || state == YarnApplicationAttemptState.KILLED;
+ }
+
+ @Override
+ protected void createAttemptHeadRoomTable(Block html) {
+ RMAppAttempt attempt = getRMAppAttempt();
+ if (attempt != null) {
+ if (!isApplicationInFinalState(YarnApplicationAttemptState
+ .valueOf(attempt.getAppAttemptState().toString()))) {
+ DIV<Hamlet> pdiv = html._(InfoBlock.class).div(_INFO_WRAP);
+ info("Application Attempt Overview").clear();
+ info("Application Attempt Metrics")._(
+ "Application Attempt Headroom : ", 0);
+ pdiv._();
+ }
+ }
+ }
+
+ private RMAppAttempt getRMAppAttempt() {
+ ApplicationId appId = this.appAttemptId.getApplicationId();
+ RMAppAttempt attempt = null;
+ RMApp rmApp = rm.getRMContext().getRMApps().get(appId);
+ if (rmApp != null) {
+ attempt = rmApp.getAppAttempts().get(appAttemptId);
+ }
+ return attempt;
+ }
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppBlock.java
new file mode 100644
index 0000000000000..64c57476f7951
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppBlock.java
@@ -0,0 +1,94 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.resourcemanager.webapp;
+
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI._INFO_WRAP;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
+import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
+import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppMetrics;
+import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptMetrics;
+import org.apache.hadoop.yarn.server.webapp.AppBlock;
+import org.apache.hadoop.yarn.util.resource.Resources;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
+import org.apache.hadoop.yarn.webapp.view.InfoBlock;
+
+import com.google.inject.Inject;
+
+public class RMAppBlock extends AppBlock{
+
+ private final ResourceManager rm;
+
+ @Inject
+ RMAppBlock(ViewContext ctx, Configuration conf, ResourceManager rm) {
+ super(rm.getClientRMService(), ctx, conf);
+ this.rm = rm;
+ }
+
+ @Override
+ protected void render(Block html) {
+ super.render(html);
+ }
+
+ @Override
+ protected void createApplicationMetricsTable(Block html){
+ RMApp rmApp = this.rm.getRMContext().getRMApps().get(appID);
+ RMAppMetrics appMetrics = rmApp == null ? null : rmApp.getRMAppMetrics();
+ // Get attempt metrics and fields, it is possible currentAttempt of RMApp is
+ // null. In that case, we will assume resource preempted and number of Non
+ // AM container preempted on that attempt is 0
+ RMAppAttemptMetrics attemptMetrics;
+ if (rmApp == null || null == rmApp.getCurrentAppAttempt()) {
+ attemptMetrics = null;
+ } else {
+ attemptMetrics = rmApp.getCurrentAppAttempt().getRMAppAttemptMetrics();
+ }
+ Resource attemptResourcePreempted =
+ attemptMetrics == null ? Resources.none() : attemptMetrics
+ .getResourcePreempted();
+ int attemptNumNonAMContainerPreempted =
+ attemptMetrics == null ? 0 : attemptMetrics
+ .getNumNonAMContainersPreempted();
+ DIV<Hamlet> pdiv = html.
+ _(InfoBlock.class).
+ div(_INFO_WRAP);
+ info("Application Overview").clear();
+ info("Application Metrics")
+ ._("Total Resource Preempted:",
+ appMetrics == null ? "N/A" : appMetrics.getResourcePreempted())
+ ._("Total Number of Non-AM Containers Preempted:",
+ appMetrics == null ? "N/A"
+ : appMetrics.getNumNonAMContainersPreempted())
+ ._("Total Number of AM Containers Preempted:",
+ appMetrics == null ? "N/A"
+ : appMetrics.getNumAMContainersPreempted())
+ ._("Resource Preempted from Current Attempt:",
+ attemptResourcePreempted)
+ ._("Number of Non-AM Containers Preempted from Current Attempt:",
+ attemptNumNonAMContainerPreempted)
+ ._("Aggregate Resource Allocation:",
+ String.format("%d MB-seconds, %d vcore-seconds",
+ appMetrics == null ? "N/A" : appMetrics.getMemorySeconds(),
+ appMetrics == null ? "N/A" : appMetrics.getVcoreSeconds()));
+ pdiv._();
+ }
+}
|
f9ce11eef8b05e7e31b45a428d63ae35eed8ed42
|
spring-framework
|
Provide controller level Cache-Control support--Prior to this commit, Cache-Control HTTP headers could be set using-a WebContentInterceptor and configured cache mappings.--This commit adds support for cache-related HTTP headers at the controller-method level, by returning a ResponseEntity instance:--ResponseEntity.status(HttpStatus.OK)- .cacheControl(CacheControl.maxAge(1, TimeUnit.HOURS).cachePublic())- .eTag("deadb33f8badf00d")- .body(entity);--Also, this change now automatically checks the "ETag" and-"Last-Modified" headers in ResponseEntity, in order to respond HTTP-"304 - Not Modified" if necessary.--Issue: SPR-8550-
|
p
|
https://github.com/spring-projects/spring-framework
|
diff --git a/spring-web/src/main/java/org/springframework/http/ResponseEntity.java b/spring-web/src/main/java/org/springframework/http/ResponseEntity.java
index 840f696bd735..5d00716d5f94 100644
--- a/spring-web/src/main/java/org/springframework/http/ResponseEntity.java
+++ b/spring-web/src/main/java/org/springframework/http/ResponseEntity.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2014 the original author or authors.
+ * Copyright 2002-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -59,6 +59,7 @@
* </pre>
*
* @author Arjen Poutsma
+ * @author Brian Clozel
* @since 3.0.2
* @see #getStatusCode()
*/
@@ -318,6 +319,20 @@ public interface HeadersBuilder<B extends HeadersBuilder<B>> {
*/
B location(URI location);
+ /**
+ * Set the caching directives for the resource, as specified by the
+ * {@code Cache-Control} header.
+ *
+ * <p>A {@code CacheControl} instance can be built like
+ * {@code CacheControl.maxAge(3600).cachePublic().noTransform()}.
+ *
+ * @param cacheControl the instance that builds cache related HTTP response headers
+ * @return this builder
+ * @see <a href="https://tools.ietf.org/html/rfc7234#section-5.2">RFC-7234 Section 5.2</a>
+ * @since 4.2
+ */
+ B cacheControl(CacheControl cacheControl);
+
/**
* Build the response entity with no body.
* @return the response entity
@@ -423,6 +438,15 @@ public BodyBuilder location(URI location) {
return this;
}
+ @Override
+ public BodyBuilder cacheControl(CacheControl cacheControl) {
+ String ccValue = cacheControl.getHeaderValue();
+ if(ccValue != null) {
+ this.headers.setCacheControl(cacheControl.getHeaderValue());
+ }
+ return this;
+ }
+
@Override
public ResponseEntity<Void> build() {
return new ResponseEntity<Void>(null, this.headers, this.status);
diff --git a/spring-web/src/test/java/org/springframework/http/ResponseEntityTests.java b/spring-web/src/test/java/org/springframework/http/ResponseEntityTests.java
index bb54104652ab..5ce43e8dad0d 100644
--- a/spring-web/src/test/java/org/springframework/http/ResponseEntityTests.java
+++ b/spring-web/src/test/java/org/springframework/http/ResponseEntityTests.java
@@ -19,11 +19,14 @@
import java.net.URI;
import java.net.URISyntaxException;
import java.util.List;
+import java.util.concurrent.TimeUnit;
+import org.hamcrest.Matchers;
import org.junit.Test;
import static org.junit.Assert.*;
+
/**
* @author Arjen Poutsma
* @author Marcel Overdijk
@@ -163,7 +166,7 @@ public void headers() throws URISyntaxException {
}
@Test
- public void headersCopy(){
+ public void headersCopy() {
HttpHeaders customHeaders = new HttpHeaders();
customHeaders.set("X-CustomHeader", "vale");
@@ -178,7 +181,7 @@ public void headersCopy(){
}
@Test // SPR-12792
- public void headersCopyWithEmptyAndNull(){
+ public void headersCopyWithEmptyAndNull() {
ResponseEntity<Void> responseEntityWithEmptyHeaders =
ResponseEntity.ok().headers(new HttpHeaders()).build();
ResponseEntity<Void> responseEntityWithNullHeaders =
@@ -189,4 +192,58 @@ public void headersCopyWithEmptyAndNull(){
assertEquals(responseEntityWithEmptyHeaders.toString(), responseEntityWithNullHeaders.toString());
}
+ @Test
+ public void emptyCacheControl() {
+
+ Integer entity = new Integer(42);
+
+ ResponseEntity<Integer> responseEntity =
+ ResponseEntity.status(HttpStatus.OK)
+ .cacheControl(CacheControl.empty())
+ .body(entity);
+
+ assertNotNull(responseEntity);
+ assertEquals(HttpStatus.OK, responseEntity.getStatusCode());
+ assertFalse(responseEntity.getHeaders().containsKey(HttpHeaders.CACHE_CONTROL));
+ assertEquals(entity, responseEntity.getBody());
+ }
+
+ @Test
+ public void cacheControl() {
+
+ Integer entity = new Integer(42);
+
+ ResponseEntity<Integer> responseEntity =
+ ResponseEntity.status(HttpStatus.OK)
+ .cacheControl(CacheControl.maxAge(1, TimeUnit.HOURS).cachePrivate().
+ mustRevalidate().proxyRevalidate().sMaxAge(30, TimeUnit.MINUTES))
+ .body(entity);
+
+ assertNotNull(responseEntity);
+ assertEquals(HttpStatus.OK, responseEntity.getStatusCode());
+ assertTrue(responseEntity.getHeaders().containsKey(HttpHeaders.CACHE_CONTROL));
+ assertEquals(entity, responseEntity.getBody());
+ String cacheControlHeader = responseEntity.getHeaders().getCacheControl();
+ assertThat(cacheControlHeader, Matchers.equalTo("max-age=3600, must-revalidate, private, proxy-revalidate, s-maxage=1800"));
+ }
+
+ @Test
+ public void cacheControlNoCache() {
+
+ Integer entity = new Integer(42);
+
+ ResponseEntity<Integer> responseEntity =
+ ResponseEntity.status(HttpStatus.OK)
+ .cacheControl(CacheControl.noStore())
+ .body(entity);
+
+ assertNotNull(responseEntity);
+ assertEquals(HttpStatus.OK, responseEntity.getStatusCode());
+ assertTrue(responseEntity.getHeaders().containsKey(HttpHeaders.CACHE_CONTROL));
+ assertEquals(entity, responseEntity.getBody());
+
+ String cacheControlHeader = responseEntity.getHeaders().getCacheControl();
+ assertThat(cacheControlHeader, Matchers.equalTo("no-store"));
+ }
+
}
diff --git a/spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/HttpEntityMethodProcessor.java b/spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/HttpEntityMethodProcessor.java
index 27989f45a36b..90b7b059fc10 100644
--- a/spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/HttpEntityMethodProcessor.java
+++ b/spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/HttpEntityMethodProcessor.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2014 the original author or authors.
+ * Copyright 2002-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -31,6 +31,7 @@
import org.springframework.http.server.ServletServerHttpRequest;
import org.springframework.http.server.ServletServerHttpResponse;
import org.springframework.util.Assert;
+import org.springframework.util.StringUtils;
import org.springframework.web.HttpMediaTypeNotSupportedException;
import org.springframework.web.accept.ContentNegotiationManager;
import org.springframework.web.bind.support.WebDataBinderFactory;
@@ -139,12 +140,36 @@ public void handleReturnValue(Object returnValue, MethodParameter returnType,
}
Object body = responseEntity.getBody();
+ if (responseEntity instanceof ResponseEntity) {
+ if (isResourceNotModified(webRequest, (ResponseEntity<?>) responseEntity)) {
+ // Ensure headers are flushed, no body should be written
+ outputMessage.flush();
+ // skip call to converters, as they may update the body
+ return;
+ }
+ }
// Try even with null body. ResponseBodyAdvice could get involved.
writeWithMessageConverters(body, returnType, inputMessage, outputMessage);
// Ensure headers are flushed even if no body was written
- outputMessage.getBody();
+ outputMessage.flush();
+ }
+
+ private boolean isResourceNotModified(NativeWebRequest webRequest, ResponseEntity<?> responseEntity) {
+ String eTag = responseEntity.getHeaders().getETag();
+ long lastModified = responseEntity.getHeaders().getLastModified();
+ boolean notModified = false;
+ if (lastModified != -1 && StringUtils.hasLength(eTag)) {
+ notModified = webRequest.checkNotModified(eTag, lastModified);
+ }
+ else if (lastModified != -1) {
+ notModified = webRequest.checkNotModified(lastModified);
+ }
+ else if (StringUtils.hasLength(eTag)) {
+ notModified = webRequest.checkNotModified(eTag);
+ }
+ return notModified;
}
@Override
diff --git a/spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/HttpEntityMethodProcessorMockTests.java b/spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/HttpEntityMethodProcessorMockTests.java
index 273283fdbca0..dc3e70b3c2a0 100644
--- a/spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/HttpEntityMethodProcessorMockTests.java
+++ b/spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/HttpEntityMethodProcessorMockTests.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2014 the original author or authors.
+ * Copyright 2002-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -18,8 +18,12 @@
import java.lang.reflect.Method;
import java.net.URI;
+import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Collections;
+import java.util.Date;
+import java.util.Locale;
+import java.util.TimeZone;
import org.junit.Before;
import org.junit.Test;
@@ -106,7 +110,7 @@ public void setUp() throws Exception {
returnTypeInt = new MethodParameter(getClass().getMethod("handle3"), -1);
mavContainer = new ModelAndViewContainer();
- servletRequest = new MockHttpServletRequest();
+ servletRequest = new MockHttpServletRequest("GET", "/foo");
servletResponse = new MockHttpServletResponse();
webRequest = new ServletWebRequest(servletRequest, servletResponse);
}
@@ -320,6 +324,98 @@ public void responseHeaderAndBody() throws Exception {
assertEquals("headerValue", outputMessage.getValue().getHeaders().get("header").get(0));
}
+ @Test
+ public void handleReturnTypeLastModified() throws Exception {
+ long currentTime = new Date().getTime();
+ long oneMinuteAgo = currentTime - (1000 * 60);
+ servletRequest.addHeader(HttpHeaders.IF_MODIFIED_SINCE, currentTime);
+ HttpHeaders responseHeaders = new HttpHeaders();
+ responseHeaders.setDate(HttpHeaders.LAST_MODIFIED, oneMinuteAgo);
+ ResponseEntity<String> returnValue = new ResponseEntity<String>("body", responseHeaders, HttpStatus.OK);
+
+ given(messageConverter.canWrite(String.class, null)).willReturn(true);
+ given(messageConverter.getSupportedMediaTypes()).willReturn(Collections.singletonList(MediaType.TEXT_PLAIN));
+ given(messageConverter.canWrite(String.class, MediaType.TEXT_PLAIN)).willReturn(true);
+
+ processor.handleReturnValue(returnValue, returnTypeResponseEntity, mavContainer, webRequest);
+
+ assertTrue(mavContainer.isRequestHandled());
+ assertEquals(HttpStatus.NOT_MODIFIED.value(), servletResponse.getStatus());
+ assertEquals(oneMinuteAgo/1000 * 1000, Long.parseLong(servletResponse.getHeader(HttpHeaders.LAST_MODIFIED)));
+ assertEquals(0, servletResponse.getContentAsByteArray().length);
+ }
+
+ @Test
+ public void handleReturnTypeEtag() throws Exception {
+ String etagValue = "\"deadb33f8badf00d\"";
+ servletRequest.addHeader(HttpHeaders.IF_NONE_MATCH, etagValue);
+ HttpHeaders responseHeaders = new HttpHeaders();
+ responseHeaders.set(HttpHeaders.ETAG, etagValue);
+ ResponseEntity<String> returnValue = new ResponseEntity<String>("body", responseHeaders, HttpStatus.OK);
+
+ given(messageConverter.canWrite(String.class, null)).willReturn(true);
+ given(messageConverter.getSupportedMediaTypes()).willReturn(Collections.singletonList(MediaType.TEXT_PLAIN));
+ given(messageConverter.canWrite(String.class, MediaType.TEXT_PLAIN)).willReturn(true);
+
+ processor.handleReturnValue(returnValue, returnTypeResponseEntity, mavContainer, webRequest);
+
+ assertTrue(mavContainer.isRequestHandled());
+ assertEquals(HttpStatus.NOT_MODIFIED.value(), servletResponse.getStatus());
+ assertEquals(etagValue, servletResponse.getHeader(HttpHeaders.ETAG));
+ assertEquals(0, servletResponse.getContentAsByteArray().length);
+ }
+
+ @Test
+ public void handleReturnTypeETagAndLastModified() throws Exception {
+ long currentTime = new Date().getTime();
+ long oneMinuteAgo = currentTime - (1000 * 60);
+ String etagValue = "\"deadb33f8badf00d\"";
+ servletRequest.addHeader(HttpHeaders.IF_MODIFIED_SINCE, currentTime);
+ servletRequest.addHeader(HttpHeaders.IF_NONE_MATCH, etagValue);
+ HttpHeaders responseHeaders = new HttpHeaders();
+ responseHeaders.setDate(HttpHeaders.LAST_MODIFIED, oneMinuteAgo);
+ responseHeaders.set(HttpHeaders.ETAG, etagValue);
+ ResponseEntity<String> returnValue = new ResponseEntity<String>("body", responseHeaders, HttpStatus.OK);
+
+ given(messageConverter.canWrite(String.class, null)).willReturn(true);
+ given(messageConverter.getSupportedMediaTypes()).willReturn(Collections.singletonList(MediaType.TEXT_PLAIN));
+ given(messageConverter.canWrite(String.class, MediaType.TEXT_PLAIN)).willReturn(true);
+
+ processor.handleReturnValue(returnValue, returnTypeResponseEntity, mavContainer, webRequest);
+
+ assertTrue(mavContainer.isRequestHandled());
+ assertEquals(HttpStatus.NOT_MODIFIED.value(), servletResponse.getStatus());
+ assertEquals(oneMinuteAgo/1000 * 1000, Long.parseLong(servletResponse.getHeader(HttpHeaders.LAST_MODIFIED)));
+ assertEquals(etagValue, servletResponse.getHeader(HttpHeaders.ETAG));
+ assertEquals(0, servletResponse.getContentAsByteArray().length);
+ }
+
+ @Test
+ public void handleReturnTypeChangedETagAndLastModified() throws Exception {
+ long currentTime = new Date().getTime();
+ long oneMinuteAgo = currentTime - (1000 * 60);
+ String etagValue = "\"deadb33f8badf00d\"";
+ String changedEtagValue = "\"changed-etag-value\"";
+ servletRequest.addHeader(HttpHeaders.IF_MODIFIED_SINCE, currentTime);
+ servletRequest.addHeader(HttpHeaders.IF_NONE_MATCH, etagValue);
+ HttpHeaders responseHeaders = new HttpHeaders();
+ responseHeaders.setDate(HttpHeaders.LAST_MODIFIED, oneMinuteAgo);
+ responseHeaders.set(HttpHeaders.ETAG, changedEtagValue);
+ ResponseEntity<String> returnValue = new ResponseEntity<String>("body", responseHeaders, HttpStatus.OK);
+
+ given(messageConverter.canWrite(String.class, null)).willReturn(true);
+ given(messageConverter.getSupportedMediaTypes()).willReturn(Collections.singletonList(MediaType.TEXT_PLAIN));
+ given(messageConverter.canWrite(String.class, MediaType.TEXT_PLAIN)).willReturn(true);
+
+ processor.handleReturnValue(returnValue, returnTypeResponseEntity, mavContainer, webRequest);
+
+ assertTrue(mavContainer.isRequestHandled());
+ assertEquals(HttpStatus.OK.value(), servletResponse.getStatus());
+ assertEquals(oneMinuteAgo/1000 * 1000, Long.parseLong(servletResponse.getHeader(HttpHeaders.LAST_MODIFIED)));
+ assertEquals(changedEtagValue, servletResponse.getHeader(HttpHeaders.ETAG));
+ assertEquals(0, servletResponse.getContentAsByteArray().length);
+ }
+
public ResponseEntity<String> handle1(HttpEntity<String> httpEntity, ResponseEntity<String> responseEntity, int i, RequestEntity<String> requestEntity) {
return responseEntity;
}
|
83d5b1e6a0280cc78625bacc2d3f7d1676c7385e
|
kotlin
|
Supported propagation for subclass of- j.u.Collection and similar classes.--
|
a
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/JavaToKotlinMethodMap.java b/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/JavaToKotlinMethodMap.java
new file mode 100644
index 0000000000000..cea6587792692
--- /dev/null
+++ b/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/JavaToKotlinMethodMap.java
@@ -0,0 +1,149 @@
+/*
+ * Copyright 2010-2012 JetBrains s.r.o.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.jetbrains.jet.lang.resolve.java;
+
+import com.google.common.collect.*;
+import com.intellij.openapi.util.Pair;
+import com.intellij.psi.PsiMethod;
+import com.intellij.psi.util.PsiFormatUtil;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.jet.lang.descriptors.ClassDescriptor;
+import org.jetbrains.jet.lang.descriptors.FunctionDescriptor;
+import org.jetbrains.jet.lang.resolve.DescriptorUtils;
+import org.jetbrains.jet.lang.resolve.name.Name;
+import org.jetbrains.jet.lang.types.JetType;
+import org.jetbrains.jet.lang.types.TypeUtils;
+import org.jetbrains.jet.lang.types.lang.KotlinBuiltIns;
+import org.jetbrains.jet.resolve.DescriptorRenderer;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+public class JavaToKotlinMethodMap {
+ public static final JavaToKotlinMethodMap INSTANCE = new JavaToKotlinMethodMap();
+
+ private final JavaToKotlinMethodMapGenerated mapContainer = new JavaToKotlinMethodMapGenerated();
+
+ private JavaToKotlinMethodMap() {
+ }
+
+ @NotNull
+ private static Set<ClassDescriptor> getAllSuperClasses(@NotNull ClassDescriptor klass) {
+ Set<JetType> allSupertypes = TypeUtils.getAllSupertypes(klass.getDefaultType());
+ Set<ClassDescriptor> allSuperclasses = Sets.newHashSet();
+ for (JetType supertype : allSupertypes) {
+ ClassDescriptor superclass = TypeUtils.getClassDescriptor(supertype);
+ assert superclass != null;
+ allSuperclasses.add(superclass);
+ }
+ return allSuperclasses;
+ }
+
+ @NotNull
+ public List<FunctionDescriptor> getFunctions(@NotNull PsiMethod psiMethod, @NotNull ClassDescriptor containingClass) {
+ ImmutableCollection<ClassData> classDatas = mapContainer.map.get(psiMethod.getContainingClass().getQualifiedName());
+
+ List<FunctionDescriptor> result = Lists.newArrayList();
+
+ Set<ClassDescriptor> allSuperClasses = getAllSuperClasses(containingClass);
+
+ String serializedPsiMethod = serializePsiMethod(psiMethod);
+ for (ClassData classData : classDatas) {
+ String expectedSerializedFunction = classData.method2Function.get(serializedPsiMethod);
+ if (expectedSerializedFunction == null) continue;
+
+ ClassDescriptor kotlinClass = classData.kotlinClass;
+ if (!allSuperClasses.contains(kotlinClass)) continue;
+
+
+ Collection<FunctionDescriptor> functions =
+ kotlinClass.getDefaultType().getMemberScope().getFunctions(Name.identifier(psiMethod.getName()));
+
+ for (FunctionDescriptor function : functions) {
+ if (expectedSerializedFunction.equals(serializeFunction(function))) {
+ result.add(function);
+ }
+ }
+ }
+
+ return result;
+ }
+
+ @NotNull
+ public static String serializePsiMethod(@NotNull PsiMethod psiMethod) {
+ String externalName = PsiFormatUtil.getExternalName(psiMethod);
+ assert externalName != null : "couldn't find external name for " + psiMethod.getText();
+ return externalName;
+ }
+
+ @NotNull
+ public static String serializeFunction(@NotNull FunctionDescriptor fun) {
+ return DescriptorRenderer.TEXT.render(fun);
+ }
+
+ // used in generated code
+ static Pair<String, String> pair(String a, String b) {
+ return Pair.create(a, b);
+ }
+
+ // used in generated code
+ static void put(
+ ImmutableMultimap.Builder<String, ClassData> builder,
+ String javaFqName,
+ String kotlinQualifiedName,
+ Pair<String, String>... methods2Functions
+ ) {
+ ImmutableMap<String, String> methods2FunctionsMap = pairs2Map(methods2Functions);
+
+ ClassDescriptor kotlinClass;
+ if (kotlinQualifiedName.contains(".")) { // Map.Entry and MutableMap.MutableEntry
+ String[] kotlinNames = kotlinQualifiedName.split("\\.");
+ assert kotlinNames.length == 2 : "unexpected qualified name " + kotlinQualifiedName;
+
+ ClassDescriptor outerClass = KotlinBuiltIns.getInstance().getBuiltInClassByName(Name.identifier(kotlinNames[0]));
+ kotlinClass = DescriptorUtils.getInnerClassByName(outerClass, kotlinNames[1]);
+ assert kotlinClass != null : "Class not found: " + kotlinQualifiedName;
+ }
+ else {
+ kotlinClass = KotlinBuiltIns.getInstance().getBuiltInClassByName(Name.identifier(kotlinQualifiedName));
+ }
+
+ builder.put(javaFqName, new ClassData(kotlinClass, methods2FunctionsMap));
+ }
+
+ private static ImmutableMap<String, String> pairs2Map(Pair<String, String>[] pairs) {
+ ImmutableMap.Builder<String, String> builder = ImmutableMap.builder();
+ for (Pair<String, String> pair : pairs) {
+ builder.put(pair.first, pair.second);
+ }
+ return builder.build();
+ }
+
+ static class ClassData {
+ @NotNull
+ public final ClassDescriptor kotlinClass;
+ @NotNull
+ public Map<String, String> method2Function;
+
+ public ClassData(@NotNull ClassDescriptor kotlinClass, @NotNull Map<String, String> method2Function) {
+ this.kotlinClass = kotlinClass;
+ this.method2Function = method2Function;
+ }
+ }
+}
diff --git a/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/JavaToKotlinMethodMapGenerated.java b/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/JavaToKotlinMethodMapGenerated.java
new file mode 100644
index 0000000000000..70de02d0f8a10
--- /dev/null
+++ b/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/JavaToKotlinMethodMapGenerated.java
@@ -0,0 +1,245 @@
+/*
+ * Copyright 2010-2012 JetBrains s.r.o.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.jetbrains.jet.lang.resolve.java;
+
+import com.google.common.collect.ImmutableMultimap;
+
+import static org.jetbrains.jet.lang.resolve.java.JavaToKotlinMethodMap.*;
+
+/* This file is generated by org.jetbrains.jet.generators.GenerateJavaToKotlinMethodMap. DO NOT EDIT! */
+@SuppressWarnings("unchecked")
+class JavaToKotlinMethodMapGenerated {
+ final ImmutableMultimap<String, JavaToKotlinMethodMap.ClassData> map;
+
+ JavaToKotlinMethodMapGenerated() {
+ ImmutableMultimap.Builder<String, JavaToKotlinMethodMap.ClassData> b = ImmutableMultimap.builder();
+
+ put(b, "java.lang.String", "String",
+ pair("java.lang.String int compareTo(java.lang.String)", "public open fun compareTo(that : jet.String) : jet.Int defined in jet.String"),
+ pair("java.lang.String boolean equals(java.lang.Object)", "public final fun equals(other : jet.Any?) : jet.Boolean defined in jet.String"),
+ pair("java.lang.String java.lang.String toString()", "public open fun toString() : jet.String defined in jet.String")
+ );
+
+ put(b, "java.lang.CharSequence", "CharSequence",
+ pair("java.lang.CharSequence java.lang.String toString()", "public abstract fun toString() : jet.String defined in jet.CharSequence")
+ );
+
+ put(b, "java.lang.Throwable", "Throwable",
+ pair("java.lang.Throwable java.lang.Throwable getCause()", "public final fun getCause() : jet.Throwable? defined in jet.Throwable"),
+ pair("java.lang.Throwable java.lang.String getMessage()", "public final fun getMessage() : jet.String? defined in jet.Throwable"),
+ pair("java.lang.Throwable void printStackTrace()", "public final fun printStackTrace() : Unit defined in jet.Throwable")
+ );
+
+ put(b, "java.lang.Comparable", "Comparable",
+ pair("java.lang.Comparable int compareTo(T)", "public abstract fun compareTo(other : T) : jet.Int defined in jet.Comparable")
+ );
+
+ put(b, "java.lang.Enum", "Enum",
+ pair("java.lang.Enum java.lang.String name()", "public final fun name() : jet.String defined in jet.Enum"),
+ pair("java.lang.Enum int ordinal()", "public final fun ordinal() : jet.Int defined in jet.Enum")
+ );
+
+ put(b, "java.lang.Iterable", "Iterable",
+ pair("java.lang.Iterable java.util.Iterator<T> iterator()", "public abstract fun iterator() : jet.Iterator<T> defined in jet.Iterable")
+ );
+
+ put(b, "java.lang.Iterable", "MutableIterable",
+ pair("java.lang.Iterable java.util.Iterator<T> iterator()", "public abstract fun iterator() : jet.MutableIterator<T> defined in jet.MutableIterable")
+ );
+
+ put(b, "java.util.Iterator", "Iterator",
+ pair("java.util.Iterator boolean hasNext()", "public abstract fun hasNext() : jet.Boolean defined in jet.Iterator"),
+ pair("java.util.Iterator E next()", "public abstract fun next() : T defined in jet.Iterator")
+ );
+
+ put(b, "java.util.Iterator", "MutableIterator",
+ pair("java.util.Iterator boolean hasNext()", "public abstract fun hasNext() : jet.Boolean defined in jet.MutableIterator"),
+ pair("java.util.Iterator E next()", "public abstract fun next() : T defined in jet.MutableIterator"),
+ pair("java.util.Iterator void remove()", "public abstract fun remove() : Unit defined in jet.MutableIterator")
+ );
+
+ put(b, "java.util.Collection", "Collection",
+ pair("java.util.Collection boolean contains(java.lang.Object)", "public abstract fun contains(o : jet.Any?) : jet.Boolean defined in jet.Collection"),
+ pair("java.util.Collection boolean containsAll(java.util.Collection<?>)", "public abstract fun containsAll(c : jet.Collection<jet.Any?>) : jet.Boolean defined in jet.Collection"),
+ pair("java.util.Collection boolean equals(java.lang.Object)", "public abstract fun equals(other : jet.Any?) : jet.Boolean defined in jet.Collection"),
+ pair("java.util.Collection int hashCode()", "public abstract fun hashCode() : jet.Int defined in jet.Collection"),
+ pair("java.util.Collection boolean isEmpty()", "public abstract fun isEmpty() : jet.Boolean defined in jet.Collection"),
+ pair("java.util.Collection java.util.Iterator<E> iterator()", "public abstract fun iterator() : jet.Iterator<E> defined in jet.Collection"),
+ pair("java.util.Collection int size()", "public abstract fun size() : jet.Int defined in jet.Collection"),
+ pair("java.util.Collection T[] toArray(T[])", "public abstract fun <T> toArray(a : jet.Array<out T>) : jet.Array<T> defined in jet.Collection"),
+ pair("java.util.Collection java.lang.Object[] toArray()", "public abstract fun toArray() : jet.Array<jet.Any?> defined in jet.Collection")
+ );
+
+ put(b, "java.util.Collection", "MutableCollection",
+ pair("java.util.Collection boolean add(E)", "public abstract fun add(e : E) : jet.Boolean defined in jet.MutableCollection"),
+ pair("java.util.Collection boolean addAll(java.util.Collection<? extends E>)", "public abstract fun addAll(c : jet.Collection<E>) : jet.Boolean defined in jet.MutableCollection"),
+ pair("java.util.Collection void clear()", "public abstract fun clear() : Unit defined in jet.MutableCollection"),
+ pair("java.util.Collection boolean contains(java.lang.Object)", "public abstract fun contains(o : jet.Any?) : jet.Boolean defined in jet.MutableCollection"),
+ pair("java.util.Collection boolean containsAll(java.util.Collection<?>)", "public abstract fun containsAll(c : jet.Collection<jet.Any?>) : jet.Boolean defined in jet.MutableCollection"),
+ pair("java.util.Collection boolean equals(java.lang.Object)", "public abstract fun equals(other : jet.Any?) : jet.Boolean defined in jet.MutableCollection"),
+ pair("java.util.Collection int hashCode()", "public abstract fun hashCode() : jet.Int defined in jet.MutableCollection"),
+ pair("java.util.Collection boolean isEmpty()", "public abstract fun isEmpty() : jet.Boolean defined in jet.MutableCollection"),
+ pair("java.util.Collection java.util.Iterator<E> iterator()", "public abstract fun iterator() : jet.MutableIterator<E> defined in jet.MutableCollection"),
+ pair("java.util.Collection boolean remove(java.lang.Object)", "public abstract fun remove(o : jet.Any?) : jet.Boolean defined in jet.MutableCollection"),
+ pair("java.util.Collection boolean removeAll(java.util.Collection<?>)", "public abstract fun removeAll(c : jet.Collection<jet.Any?>) : jet.Boolean defined in jet.MutableCollection"),
+ pair("java.util.Collection boolean retainAll(java.util.Collection<?>)", "public abstract fun retainAll(c : jet.Collection<jet.Any?>) : jet.Boolean defined in jet.MutableCollection"),
+ pair("java.util.Collection int size()", "public abstract fun size() : jet.Int defined in jet.MutableCollection"),
+ pair("java.util.Collection T[] toArray(T[])", "public abstract fun <T> toArray(a : jet.Array<out T>) : jet.Array<T> defined in jet.MutableCollection"),
+ pair("java.util.Collection java.lang.Object[] toArray()", "public abstract fun toArray() : jet.Array<jet.Any?> defined in jet.MutableCollection")
+ );
+
+ put(b, "java.util.List", "List",
+ pair("java.util.List boolean contains(java.lang.Object)", "public abstract fun contains(o : jet.Any?) : jet.Boolean defined in jet.List"),
+ pair("java.util.List boolean containsAll(java.util.Collection<?>)", "public abstract fun containsAll(c : jet.Collection<jet.Any?>) : jet.Boolean defined in jet.List"),
+ pair("java.util.List boolean equals(java.lang.Object)", "public abstract fun equals(other : jet.Any?) : jet.Boolean defined in jet.List"),
+ pair("java.util.List E get(int)", "public abstract fun get(index : jet.Int) : E defined in jet.List"),
+ pair("java.util.List int hashCode()", "public abstract fun hashCode() : jet.Int defined in jet.List"),
+ pair("java.util.List int indexOf(java.lang.Object)", "public abstract fun indexOf(o : jet.Any?) : jet.Int defined in jet.List"),
+ pair("java.util.List boolean isEmpty()", "public abstract fun isEmpty() : jet.Boolean defined in jet.List"),
+ pair("java.util.List java.util.Iterator<E> iterator()", "public abstract fun iterator() : jet.Iterator<E> defined in jet.List"),
+ pair("java.util.List int lastIndexOf(java.lang.Object)", "public abstract fun lastIndexOf(o : jet.Any?) : jet.Int defined in jet.List"),
+ pair("java.util.List java.util.ListIterator<E> listIterator()", "public abstract fun listIterator() : jet.ListIterator<E> defined in jet.List"),
+ pair("java.util.List java.util.ListIterator<E> listIterator(int)", "public abstract fun listIterator(index : jet.Int) : jet.ListIterator<E> defined in jet.List"),
+ pair("java.util.List int size()", "public abstract fun size() : jet.Int defined in jet.List"),
+ pair("java.util.List java.util.List<E> subList(int, int)", "public abstract fun subList(fromIndex : jet.Int, toIndex : jet.Int) : jet.List<E> defined in jet.List"),
+ pair("java.util.List T[] toArray(T[])", "public abstract fun <T> toArray(a : jet.Array<out T>) : jet.Array<T> defined in jet.List"),
+ pair("java.util.List java.lang.Object[] toArray()", "public abstract fun toArray() : jet.Array<jet.Any?> defined in jet.List")
+ );
+
+ put(b, "java.util.List", "MutableList",
+ pair("java.util.List boolean add(E)", "public abstract fun add(e : E) : jet.Boolean defined in jet.MutableList"),
+ pair("java.util.List void add(int, E)", "public abstract fun add(index : jet.Int, element : E) : Unit defined in jet.MutableList"),
+ pair("java.util.List boolean addAll(int, java.util.Collection<? extends E>)", "public abstract fun addAll(index : jet.Int, c : jet.Collection<E>) : jet.Boolean defined in jet.MutableList"),
+ pair("java.util.List boolean addAll(java.util.Collection<? extends E>)", "public abstract fun addAll(c : jet.Collection<E>) : jet.Boolean defined in jet.MutableList"),
+ pair("java.util.List void clear()", "public abstract fun clear() : Unit defined in jet.MutableList"),
+ pair("java.util.List boolean contains(java.lang.Object)", "public abstract fun contains(o : jet.Any?) : jet.Boolean defined in jet.MutableList"),
+ pair("java.util.List boolean containsAll(java.util.Collection<?>)", "public abstract fun containsAll(c : jet.Collection<jet.Any?>) : jet.Boolean defined in jet.MutableList"),
+ pair("java.util.List boolean equals(java.lang.Object)", "public abstract fun equals(other : jet.Any?) : jet.Boolean defined in jet.MutableList"),
+ pair("java.util.List E get(int)", "public abstract fun get(index : jet.Int) : E defined in jet.MutableList"),
+ pair("java.util.List int hashCode()", "public abstract fun hashCode() : jet.Int defined in jet.MutableList"),
+ pair("java.util.List int indexOf(java.lang.Object)", "public abstract fun indexOf(o : jet.Any?) : jet.Int defined in jet.MutableList"),
+ pair("java.util.List boolean isEmpty()", "public abstract fun isEmpty() : jet.Boolean defined in jet.MutableList"),
+ pair("java.util.List java.util.Iterator<E> iterator()", "public abstract fun iterator() : jet.Iterator<E> defined in jet.MutableList"),
+ pair("java.util.List int lastIndexOf(java.lang.Object)", "public abstract fun lastIndexOf(o : jet.Any?) : jet.Int defined in jet.MutableList"),
+ pair("java.util.List java.util.ListIterator<E> listIterator()", "public abstract fun listIterator() : jet.MutableListIterator<E> defined in jet.MutableList"),
+ pair("java.util.List java.util.ListIterator<E> listIterator(int)", "public abstract fun listIterator(index : jet.Int) : jet.MutableListIterator<E> defined in jet.MutableList"),
+ pair("java.util.List E remove(int)", "public abstract fun remove(index : jet.Int) : E defined in jet.MutableList"),
+ pair("java.util.List boolean remove(java.lang.Object)", "public abstract fun remove(o : jet.Any?) : jet.Boolean defined in jet.MutableList"),
+ pair("java.util.List boolean removeAll(java.util.Collection<?>)", "public abstract fun removeAll(c : jet.Collection<jet.Any?>) : jet.Boolean defined in jet.MutableList"),
+ pair("java.util.List boolean retainAll(java.util.Collection<?>)", "public abstract fun retainAll(c : jet.Collection<jet.Any?>) : jet.Boolean defined in jet.MutableList"),
+ pair("java.util.List E set(int, E)", "public abstract fun set(index : jet.Int, element : E) : E defined in jet.MutableList"),
+ pair("java.util.List int size()", "public abstract fun size() : jet.Int defined in jet.MutableList"),
+ pair("java.util.List java.util.List<E> subList(int, int)", "public abstract fun subList(fromIndex : jet.Int, toIndex : jet.Int) : jet.MutableList<E> defined in jet.MutableList"),
+ pair("java.util.List T[] toArray(T[])", "public abstract fun <T> toArray(a : jet.Array<out T>) : jet.Array<T> defined in jet.MutableList"),
+ pair("java.util.List java.lang.Object[] toArray()", "public abstract fun toArray() : jet.Array<jet.Any?> defined in jet.MutableList")
+ );
+
+ put(b, "java.util.Set", "Set",
+ pair("java.util.Set boolean contains(java.lang.Object)", "public abstract fun contains(o : jet.Any?) : jet.Boolean defined in jet.Set"),
+ pair("java.util.Set boolean containsAll(java.util.Collection<?>)", "public abstract fun containsAll(c : jet.Collection<jet.Any?>) : jet.Boolean defined in jet.Set"),
+ pair("java.util.Set boolean equals(java.lang.Object)", "public abstract fun equals(other : jet.Any?) : jet.Boolean defined in jet.Set"),
+ pair("java.util.Set int hashCode()", "public abstract fun hashCode() : jet.Int defined in jet.Set"),
+ pair("java.util.Set boolean isEmpty()", "public abstract fun isEmpty() : jet.Boolean defined in jet.Set"),
+ pair("java.util.Set java.util.Iterator<E> iterator()", "public abstract fun iterator() : jet.Iterator<E> defined in jet.Set"),
+ pair("java.util.Set int size()", "public abstract fun size() : jet.Int defined in jet.Set"),
+ pair("java.util.Set T[] toArray(T[])", "public abstract fun <T> toArray(a : jet.Array<out T>) : jet.Array<T> defined in jet.Set"),
+ pair("java.util.Set java.lang.Object[] toArray()", "public abstract fun toArray() : jet.Array<jet.Any?> defined in jet.Set")
+ );
+
+ put(b, "java.util.Set", "MutableSet",
+ pair("java.util.Set boolean add(E)", "public abstract fun add(e : E) : jet.Boolean defined in jet.MutableSet"),
+ pair("java.util.Set boolean addAll(java.util.Collection<? extends E>)", "public abstract fun addAll(c : jet.Collection<E>) : jet.Boolean defined in jet.MutableSet"),
+ pair("java.util.Set void clear()", "public abstract fun clear() : Unit defined in jet.MutableSet"),
+ pair("java.util.Set boolean contains(java.lang.Object)", "public abstract fun contains(o : jet.Any?) : jet.Boolean defined in jet.MutableSet"),
+ pair("java.util.Set boolean containsAll(java.util.Collection<?>)", "public abstract fun containsAll(c : jet.Collection<jet.Any?>) : jet.Boolean defined in jet.MutableSet"),
+ pair("java.util.Set boolean equals(java.lang.Object)", "public abstract fun equals(other : jet.Any?) : jet.Boolean defined in jet.MutableSet"),
+ pair("java.util.Set int hashCode()", "public abstract fun hashCode() : jet.Int defined in jet.MutableSet"),
+ pair("java.util.Set boolean isEmpty()", "public abstract fun isEmpty() : jet.Boolean defined in jet.MutableSet"),
+ pair("java.util.Set java.util.Iterator<E> iterator()", "public abstract fun iterator() : jet.MutableIterator<E> defined in jet.MutableSet"),
+ pair("java.util.Set boolean remove(java.lang.Object)", "public abstract fun remove(o : jet.Any?) : jet.Boolean defined in jet.MutableSet"),
+ pair("java.util.Set boolean removeAll(java.util.Collection<?>)", "public abstract fun removeAll(c : jet.Collection<jet.Any?>) : jet.Boolean defined in jet.MutableSet"),
+ pair("java.util.Set boolean retainAll(java.util.Collection<?>)", "public abstract fun retainAll(c : jet.Collection<jet.Any?>) : jet.Boolean defined in jet.MutableSet"),
+ pair("java.util.Set int size()", "public abstract fun size() : jet.Int defined in jet.MutableSet"),
+ pair("java.util.Set T[] toArray(T[])", "public abstract fun <T> toArray(a : jet.Array<out T>) : jet.Array<T> defined in jet.MutableSet"),
+ pair("java.util.Set java.lang.Object[] toArray()", "public abstract fun toArray() : jet.Array<jet.Any?> defined in jet.MutableSet")
+ );
+
+ put(b, "java.util.Map", "Map",
+ pair("java.util.Map boolean containsKey(java.lang.Object)", "public abstract fun containsKey(key : jet.Any?) : jet.Boolean defined in jet.Map"),
+ pair("java.util.Map boolean containsValue(java.lang.Object)", "public abstract fun containsValue(value : jet.Any?) : jet.Boolean defined in jet.Map"),
+ pair("java.util.Map java.util.Set<java.util.Map.Entry<K,V>> entrySet()", "public abstract fun entrySet() : jet.Set<jet.Map.Entry<K, V>> defined in jet.Map"),
+ pair("java.util.Map V get(java.lang.Object)", "public abstract fun get(key : jet.Any?) : V? defined in jet.Map"),
+ pair("java.util.Map boolean isEmpty()", "public abstract fun isEmpty() : jet.Boolean defined in jet.Map"),
+ pair("java.util.Map java.util.Set<K> keySet()", "public abstract fun keySet() : jet.Set<K> defined in jet.Map"),
+ pair("java.util.Map int size()", "public abstract fun size() : jet.Int defined in jet.Map"),
+ pair("java.util.Map java.util.Collection<V> values()", "public abstract fun values() : jet.Collection<V> defined in jet.Map")
+ );
+
+ put(b, "java.util.Map", "MutableMap",
+ pair("java.util.Map void clear()", "public abstract fun clear() : Unit defined in jet.MutableMap"),
+ pair("java.util.Map boolean containsKey(java.lang.Object)", "public abstract fun containsKey(key : jet.Any?) : jet.Boolean defined in jet.MutableMap"),
+ pair("java.util.Map boolean containsValue(java.lang.Object)", "public abstract fun containsValue(value : jet.Any?) : jet.Boolean defined in jet.MutableMap"),
+ pair("java.util.Map java.util.Set<java.util.Map.Entry<K,V>> entrySet()", "public abstract fun entrySet() : jet.MutableSet<jet.MutableMap.MutableEntry<K, V>> defined in jet.MutableMap"),
+ pair("java.util.Map V get(java.lang.Object)", "public abstract fun get(key : jet.Any?) : V? defined in jet.MutableMap"),
+ pair("java.util.Map boolean isEmpty()", "public abstract fun isEmpty() : jet.Boolean defined in jet.MutableMap"),
+ pair("java.util.Map java.util.Set<K> keySet()", "public abstract fun keySet() : jet.MutableSet<K> defined in jet.MutableMap"),
+ pair("java.util.Map V put(K, V)", "public abstract fun put(key : K, value : V) : V? defined in jet.MutableMap"),
+ pair("java.util.Map void putAll(java.util.Map<? extends K,? extends V>)", "public abstract fun putAll(m : jet.Map<out K, out V>) : Unit defined in jet.MutableMap"),
+ pair("java.util.Map V remove(java.lang.Object)", "public abstract fun remove(key : jet.Any?) : V? defined in jet.MutableMap"),
+ pair("java.util.Map int size()", "public abstract fun size() : jet.Int defined in jet.MutableMap"),
+ pair("java.util.Map java.util.Collection<V> values()", "public abstract fun values() : jet.MutableCollection<V> defined in jet.MutableMap")
+ );
+
+ put(b, "java.util.Map.Entry", "Map.Entry",
+ pair("java.util.Map.Entry boolean equals(java.lang.Object)", "public abstract fun equals(other : jet.Any?) : jet.Boolean defined in jet.Map.Entry"),
+ pair("java.util.Map.Entry K getKey()", "public abstract fun getKey() : K defined in jet.Map.Entry"),
+ pair("java.util.Map.Entry V getValue()", "public abstract fun getValue() : V defined in jet.Map.Entry"),
+ pair("java.util.Map.Entry int hashCode()", "public abstract fun hashCode() : jet.Int defined in jet.Map.Entry")
+ );
+
+ put(b, "java.util.Map.Entry", "MutableMap.MutableEntry",
+ pair("java.util.Map.Entry boolean equals(java.lang.Object)", "public abstract fun equals(other : jet.Any?) : jet.Boolean defined in jet.MutableMap.MutableEntry"),
+ pair("java.util.Map.Entry K getKey()", "public abstract fun getKey() : K defined in jet.MutableMap.MutableEntry"),
+ pair("java.util.Map.Entry V getValue()", "public abstract fun getValue() : V defined in jet.MutableMap.MutableEntry"),
+ pair("java.util.Map.Entry int hashCode()", "public abstract fun hashCode() : jet.Int defined in jet.MutableMap.MutableEntry"),
+ pair("java.util.Map.Entry V setValue(V)", "public abstract fun setValue(value : V) : V defined in jet.MutableMap.MutableEntry")
+ );
+
+ put(b, "java.util.ListIterator", "ListIterator",
+ pair("java.util.ListIterator boolean hasNext()", "public abstract fun hasNext() : jet.Boolean defined in jet.ListIterator"),
+ pair("java.util.ListIterator boolean hasPrevious()", "public abstract fun hasPrevious() : jet.Boolean defined in jet.ListIterator"),
+ pair("java.util.ListIterator E next()", "public abstract fun next() : T defined in jet.ListIterator"),
+ pair("java.util.ListIterator int nextIndex()", "public abstract fun nextIndex() : jet.Int defined in jet.ListIterator"),
+ pair("java.util.ListIterator E previous()", "public abstract fun previous() : T defined in jet.ListIterator"),
+ pair("java.util.ListIterator int previousIndex()", "public abstract fun previousIndex() : jet.Int defined in jet.ListIterator")
+ );
+
+ put(b, "java.util.ListIterator", "MutableListIterator",
+ pair("java.util.ListIterator void add(E)", "public abstract fun add(e : T) : Unit defined in jet.MutableListIterator"),
+ pair("java.util.ListIterator boolean hasNext()", "public abstract fun hasNext() : jet.Boolean defined in jet.MutableListIterator"),
+ pair("java.util.ListIterator boolean hasPrevious()", "public abstract fun hasPrevious() : jet.Boolean defined in jet.MutableListIterator"),
+ pair("java.util.ListIterator E next()", "public abstract fun next() : T defined in jet.MutableListIterator"),
+ pair("java.util.ListIterator int nextIndex()", "public abstract fun nextIndex() : jet.Int defined in jet.MutableListIterator"),
+ pair("java.util.ListIterator E previous()", "public abstract fun previous() : T defined in jet.MutableListIterator"),
+ pair("java.util.ListIterator int previousIndex()", "public abstract fun previousIndex() : jet.Int defined in jet.MutableListIterator"),
+ pair("java.util.ListIterator void remove()", "public abstract fun remove() : Unit defined in jet.MutableListIterator"),
+ pair("java.util.ListIterator void set(E)", "public abstract fun set(e : T) : Unit defined in jet.MutableListIterator")
+ );
+
+ map = b.build();
+ }
+}
diff --git a/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/kotlinSignature/SignaturesPropagationData.java b/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/kotlinSignature/SignaturesPropagationData.java
index 4eecef9f415d4..238e5343ed568 100644
--- a/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/kotlinSignature/SignaturesPropagationData.java
+++ b/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/kotlinSignature/SignaturesPropagationData.java
@@ -28,7 +28,10 @@
import org.jetbrains.jet.lang.resolve.BindingTrace;
import org.jetbrains.jet.lang.resolve.java.CollectionClassMapping;
import org.jetbrains.jet.lang.resolve.java.JavaDescriptorResolver;
+import org.jetbrains.jet.lang.resolve.java.JavaToKotlinClassMap;
+import org.jetbrains.jet.lang.resolve.java.JavaToKotlinMethodMap;
import org.jetbrains.jet.lang.resolve.java.wrapper.PsiMethodWrapper;
+import org.jetbrains.jet.lang.resolve.name.FqName;
import org.jetbrains.jet.lang.resolve.name.FqNameUnsafe;
import org.jetbrains.jet.lang.resolve.scopes.JetScope;
import org.jetbrains.jet.lang.types.*;
@@ -50,13 +53,14 @@ public class SignaturesPropagationData {
private final Map<TypeParameterDescriptor, TypeParameterDescriptorImpl> autoTypeParameterToModified;
public SignaturesPropagationData(
+ @NotNull ClassDescriptor containingClass,
@NotNull JetType autoReturnType, // type built by JavaTypeTransformer from Java signature and @NotNull annotations
@NotNull JavaDescriptorResolver.ValueParameterDescriptors autoValueParameters, // descriptors built by parameters resolver
@NotNull List<TypeParameterDescriptor> autoTypeParameters, // descriptors built by signature resolver
@NotNull PsiMethodWrapper method,
@NotNull BindingTrace trace
) {
- superFunctions = getSuperFunctionsForMethod(method, trace);
+ superFunctions = getSuperFunctionsForMethod(method, trace, containingClass);
autoTypeParameterToModified = SignaturesUtil.recreateTypeParametersAndReturnMapping(autoTypeParameters);
@@ -187,7 +191,8 @@ public JetType fun(FunctionDescriptor superFunction) {
private static List<FunctionDescriptor> getSuperFunctionsForMethod(
@NotNull PsiMethodWrapper method,
- @NotNull BindingTrace trace
+ @NotNull BindingTrace trace,
+ @NotNull ClassDescriptor containingClass
) {
List<FunctionDescriptor> superFunctions = Lists.newArrayList();
for (HierarchicalMethodSignature superSignature : method.getPsiMethod().getHierarchicalMethodSignature().getSuperSignatures()) {
@@ -196,15 +201,22 @@ private static List<FunctionDescriptor> getSuperFunctionsForMethod(
superFunctions.add(((FunctionDescriptor) superFun));
}
else {
- // TODO assert is temporarily disabled
- // It fails because of bug in IDEA on Mac: it adds invalid roots to JDK classpath and it leads to the problem that
- // getHierarchicalMethodSignature() returns elements from invalid virtual files
-
- // Function descriptor can't be find iff superclass is java.lang.Collection or similar (translated to jet.* collections)
- //assert !JavaToKotlinClassMap.getInstance().mapPlatformClass(
- // new FqName(superSignature.getMethod().getContainingClass().getQualifiedName())).isEmpty():
- // "Can't find super function for " + method.getPsiMethod() + " defined in "
- // + method.getPsiMethod().getContainingClass();
+ String fqName = superSignature.getMethod().getContainingClass().getQualifiedName();
+ assert fqName != null;
+ Collection<ClassDescriptor> platformClasses = JavaToKotlinClassMap.getInstance().mapPlatformClass(new FqName(fqName));
+ if (platformClasses.isEmpty()) {
+ // TODO assert is temporarily disabled
+ // It fails because of bug in IDEA on Mac: it adds invalid roots to JDK classpath and it leads to the problem that
+ // getHierarchicalMethodSignature() returns elements from invalid virtual files
+
+ //assert false : "Can't find super function for " + method.getPsiMethod() +
+ // " defined in " + method.getPsiMethod().getContainingClass()
+ }
+ else {
+ List<FunctionDescriptor> funsFromMap =
+ JavaToKotlinMethodMap.INSTANCE.getFunctions(superSignature.getMethod(), containingClass);
+ superFunctions.addAll(funsFromMap);
+ }
}
}
diff --git a/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/resolver/JavaFunctionResolver.java b/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/resolver/JavaFunctionResolver.java
index fb1c73aca457c..a528a0577ec0c 100644
--- a/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/resolver/JavaFunctionResolver.java
+++ b/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/resolver/JavaFunctionResolver.java
@@ -144,8 +144,8 @@ private SimpleFunctionDescriptor resolveMethodToFunctionDescriptor(
List<FunctionDescriptor> superFunctions;
if (ownerDescriptor instanceof ClassDescriptor) {
- SignaturesPropagationData signaturesPropagationData =
- new SignaturesPropagationData(returnType, valueParameterDescriptors, methodTypeParameters, method, trace);
+ SignaturesPropagationData signaturesPropagationData = new SignaturesPropagationData(
+ (ClassDescriptor) ownerDescriptor, returnType, valueParameterDescriptors, methodTypeParameters, method, trace);
superFunctions = signaturesPropagationData.getSuperFunctions();
returnType = signaturesPropagationData.getModifiedReturnType();
@@ -214,6 +214,9 @@ private static void checkFunctionsOverrideCorrectly(
((ClassDescriptor) functionDescriptor.getContainingDeclaration()).getDefaultType());
FunctionDescriptor superFunctionSubstituted = superFunction.substitute(substitutor);
+ assert superFunctionSubstituted != null :
+ "Couldn't substitute super function: " + superFunction + ", substitutor = " + substitutor;
+
OverrideCompatibilityInfo.Result overridableResult =
isOverridableBy(superFunctionSubstituted, functionDescriptor).getResult();
boolean paramsOk = overridableResult == OverrideCompatibilityInfo.Result.OVERRIDABLE;
diff --git a/compiler/frontend/src/org/jetbrains/jet/lang/types/lang/KotlinBuiltIns.java b/compiler/frontend/src/org/jetbrains/jet/lang/types/lang/KotlinBuiltIns.java
index 75e97187252d0..c8d2153ce2534 100644
--- a/compiler/frontend/src/org/jetbrains/jet/lang/types/lang/KotlinBuiltIns.java
+++ b/compiler/frontend/src/org/jetbrains/jet/lang/types/lang/KotlinBuiltIns.java
@@ -334,12 +334,17 @@ public JetScope getBuiltInsScope() {
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
@NotNull
- private ClassDescriptor getBuiltInClassByName(@NotNull String simpleName) {
- ClassifierDescriptor classifier = getBuiltInsScope().getClassifier(Name.identifier(simpleName));
+ public ClassDescriptor getBuiltInClassByName(@NotNull Name simpleName) {
+ ClassifierDescriptor classifier = getBuiltInsScope().getClassifier(simpleName);
assert classifier instanceof ClassDescriptor : "Must be a class descriptor " + simpleName + ", but was " + classifier;
return (ClassDescriptor) classifier;
}
+ @NotNull
+ private ClassDescriptor getBuiltInClassByName(@NotNull String simpleName) {
+ return getBuiltInClassByName(Name.identifier(simpleName));
+ }
+
// Special
@NotNull
diff --git a/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfCollection.java b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfCollection.java
new file mode 100644
index 0000000000000..d5e36c9d1635b
--- /dev/null
+++ b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfCollection.java
@@ -0,0 +1,8 @@
+package test;
+
+import java.util.*;
+
+public interface SubclassOfCollection<E> extends Collection<E> {
+ Iterator<E> iterator();
+
+}
diff --git a/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfCollection.kt b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfCollection.kt
new file mode 100644
index 0000000000000..d4159b0dd5dc3
--- /dev/null
+++ b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfCollection.kt
@@ -0,0 +1,5 @@
+package test
+
+public trait SubclassOfCollection<E>: MutableCollection<E> {
+ override fun iterator() : MutableIterator<E>
+}
diff --git a/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfCollection.txt b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfCollection.txt
new file mode 100644
index 0000000000000..c5005631e02fd
--- /dev/null
+++ b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfCollection.txt
@@ -0,0 +1,17 @@
+namespace test
+
+public abstract trait test.SubclassOfCollection</*0*/ E : jet.Any?> : jet.MutableCollection<E> {
+ public abstract override /*1*/ /*fake_override*/ fun add(/*0*/ e: E): jet.Boolean
+ public abstract override /*1*/ /*fake_override*/ fun addAll(/*0*/ c: jet.Collection<E>): jet.Boolean
+ public abstract override /*1*/ /*fake_override*/ fun clear(): jet.Tuple0
+ public abstract override /*1*/ /*fake_override*/ fun contains(/*0*/ o: jet.Any?): jet.Boolean
+ public abstract override /*1*/ /*fake_override*/ fun containsAll(/*0*/ c: jet.Collection<jet.Any?>): jet.Boolean
+ public abstract override /*1*/ /*fake_override*/ fun isEmpty(): jet.Boolean
+ public abstract override /*1*/ fun iterator(): jet.MutableIterator<E>
+ public abstract override /*1*/ /*fake_override*/ fun remove(/*0*/ o: jet.Any?): jet.Boolean
+ public abstract override /*1*/ /*fake_override*/ fun removeAll(/*0*/ c: jet.Collection<jet.Any?>): jet.Boolean
+ public abstract override /*1*/ /*fake_override*/ fun retainAll(/*0*/ c: jet.Collection<jet.Any?>): jet.Boolean
+ public abstract override /*1*/ /*fake_override*/ fun size(): jet.Int
+ public abstract override /*1*/ /*fake_override*/ fun toArray(): jet.Array<jet.Any?>
+ public abstract override /*1*/ /*fake_override*/ fun </*0*/ T : jet.Any?>toArray(/*0*/ a: jet.Array<out T>): jet.Array<T>
+}
diff --git a/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfMapEntry.java b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfMapEntry.java
new file mode 100644
index 0000000000000..0924783402ae3
--- /dev/null
+++ b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfMapEntry.java
@@ -0,0 +1,7 @@
+package test;
+
+import java.util.*;
+
+public interface SubclassOfMapEntry<K, V> extends Map.Entry<K, V> {
+ V setValue(V v);
+}
diff --git a/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfMapEntry.kt b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfMapEntry.kt
new file mode 100644
index 0000000000000..73127c5cab896
--- /dev/null
+++ b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfMapEntry.kt
@@ -0,0 +1,5 @@
+package test
+
+public trait SubclassOfMapEntry<K, V>: MutableMap.MutableEntry<K, V> {
+ override fun setValue(p0: V) : V
+}
diff --git a/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfMapEntry.txt b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfMapEntry.txt
new file mode 100644
index 0000000000000..0c728cfaf3dc9
--- /dev/null
+++ b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfMapEntry.txt
@@ -0,0 +1,7 @@
+namespace test
+
+public abstract trait test.SubclassOfMapEntry</*0*/ K : jet.Any?, /*1*/ V : jet.Any?> : jet.MutableMap.MutableEntry<K, V> {
+ public abstract override /*1*/ /*fake_override*/ fun getKey(): K
+ public abstract override /*1*/ /*fake_override*/ fun getValue(): V
+ public abstract override /*1*/ fun setValue(/*0*/ p0: V): V
+}
diff --git a/compiler/testData/loadJava/modality/ModalityOfFakeOverrides.txt b/compiler/testData/loadJava/modality/ModalityOfFakeOverrides.txt
index 0f79be88c2f99..aaf74fc13a53b 100644
--- a/compiler/testData/loadJava/modality/ModalityOfFakeOverrides.txt
+++ b/compiler/testData/loadJava/modality/ModalityOfFakeOverrides.txt
@@ -18,7 +18,7 @@ public open class test.ModalityOfFakeOverrides : java.util.AbstractList<jet.Stri
public open override /*1*/ /*fake_override*/ fun listIterator(/*0*/ p0: jet.Int): jet.MutableListIterator<jet.String>
protected final override /*1*/ /*fake_override*/ var modCount: jet.Int
public open override /*1*/ /*fake_override*/ fun remove(/*0*/ p0: jet.Any?): jet.Boolean
- public open override /*1*/ /*fake_override*/ fun remove(/*0*/ p0: jet.Int): jet.String?
+ public open override /*1*/ /*fake_override*/ fun remove(/*0*/ p0: jet.Int): jet.String
public open override /*1*/ /*fake_override*/ fun removeAll(/*0*/ p0: jet.Collection<jet.Any?>): jet.Boolean
protected open override /*1*/ /*fake_override*/ fun removeRange(/*0*/ p0: jet.Int, /*1*/ p1: jet.Int): jet.Tuple0
public open override /*1*/ /*fake_override*/ fun retainAll(/*0*/ p0: jet.Collection<jet.Any?>): jet.Boolean
diff --git a/compiler/tests/org/jetbrains/jet/jvm/compiler/LoadJavaTestGenerated.java b/compiler/tests/org/jetbrains/jet/jvm/compiler/LoadJavaTestGenerated.java
index 09f489924fed1..cd3dd08bbb775 100644
--- a/compiler/tests/org/jetbrains/jet/jvm/compiler/LoadJavaTestGenerated.java
+++ b/compiler/tests/org/jetbrains/jet/jvm/compiler/LoadJavaTestGenerated.java
@@ -649,6 +649,16 @@ public void testSameProjectionKind() throws Exception {
doTest("compiler/testData/loadJava/kotlinSignature/propagation/return/SameProjectionKind.java");
}
+ @TestMetadata("SubclassOfCollection.java")
+ public void testSubclassOfCollection() throws Exception {
+ doTest("compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfCollection.java");
+ }
+
+ @TestMetadata("SubclassOfMapEntry.java")
+ public void testSubclassOfMapEntry() throws Exception {
+ doTest("compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfMapEntry.java");
+ }
+
@TestMetadata("TwoSuperclassesConflictingProjectionKinds.java")
public void testTwoSuperclassesConflictingProjectionKinds() throws Exception {
doTest("compiler/testData/loadJava/kotlinSignature/propagation/return/TwoSuperclassesConflictingProjectionKinds.java");
diff --git a/compiler/tests/org/jetbrains/jet/lang/resolve/lazy/LazyResolveNamespaceComparingTestGenerated.java b/compiler/tests/org/jetbrains/jet/lang/resolve/lazy/LazyResolveNamespaceComparingTestGenerated.java
index c22111f4f4093..736054fa0d33d 100644
--- a/compiler/tests/org/jetbrains/jet/lang/resolve/lazy/LazyResolveNamespaceComparingTestGenerated.java
+++ b/compiler/tests/org/jetbrains/jet/lang/resolve/lazy/LazyResolveNamespaceComparingTestGenerated.java
@@ -1539,6 +1539,16 @@ public void testSameProjectionKind() throws Exception {
doTestSinglePackage("compiler/testData/loadJava/kotlinSignature/propagation/return/SameProjectionKind.kt");
}
+ @TestMetadata("SubclassOfCollection.kt")
+ public void testSubclassOfCollection() throws Exception {
+ doTestSinglePackage("compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfCollection.kt");
+ }
+
+ @TestMetadata("SubclassOfMapEntry.kt")
+ public void testSubclassOfMapEntry() throws Exception {
+ doTestSinglePackage("compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfMapEntry.kt");
+ }
+
@TestMetadata("TwoSuperclassesConflictingProjectionKinds.kt")
public void testTwoSuperclassesConflictingProjectionKinds() throws Exception {
doTestSinglePackage("compiler/testData/loadJava/kotlinSignature/propagation/return/TwoSuperclassesConflictingProjectionKinds.kt");
diff --git a/generators/generators.iml b/generators/generators.iml
index a02cc242438e9..06ad1f8bf7f10 100644
--- a/generators/generators.iml
+++ b/generators/generators.iml
@@ -14,6 +14,7 @@
<orderEntry type="module" module-name="frontend" scope="TEST" />
<orderEntry type="module" module-name="frontend.java" scope="TEST" />
<orderEntry type="module" module-name="injector-generator" scope="TEST" />
+ <orderEntry type="module" module-name="cli" scope="TEST" />
</component>
</module>
diff --git a/generators/org/jetbrains/jet/generators/GenerateJavaToKotlinMethodMap.java b/generators/org/jetbrains/jet/generators/GenerateJavaToKotlinMethodMap.java
new file mode 100644
index 0000000000000..7cfbc1d08c962
--- /dev/null
+++ b/generators/org/jetbrains/jet/generators/GenerateJavaToKotlinMethodMap.java
@@ -0,0 +1,248 @@
+/*
+ * Copyright 2010-2012 JetBrains s.r.o.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.jetbrains.jet.generators;
+
+import com.google.common.collect.Lists;
+import com.intellij.openapi.components.ServiceManager;
+import com.intellij.openapi.project.Project;
+import com.intellij.openapi.util.Pair;
+import com.intellij.openapi.util.io.FileUtil;
+import com.intellij.psi.PsiClass;
+import com.intellij.psi.PsiMethod;
+import com.intellij.psi.impl.file.impl.JavaFileManager;
+import com.intellij.psi.search.GlobalSearchScope;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+import org.jetbrains.jet.CompileCompilerDependenciesTest;
+import org.jetbrains.jet.ConfigurationKind;
+import org.jetbrains.jet.TestJdkKind;
+import org.jetbrains.jet.cli.jvm.compiler.CompileEnvironmentUtil;
+import org.jetbrains.jet.cli.jvm.compiler.JetCoreEnvironment;
+import org.jetbrains.jet.lang.descriptors.ClassDescriptor;
+import org.jetbrains.jet.lang.descriptors.DeclarationDescriptor;
+import org.jetbrains.jet.lang.descriptors.FunctionDescriptor;
+import org.jetbrains.jet.lang.resolve.DescriptorUtils;
+import org.jetbrains.jet.lang.resolve.java.JavaToKotlinClassMapBuilder;
+import org.jetbrains.jet.lang.types.lang.KotlinBuiltIns;
+import org.jetbrains.jet.resolve.DescriptorRenderer;
+import org.jetbrains.jet.utils.Printer;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+import static org.jetbrains.jet.lang.resolve.java.JavaToKotlinMethodMap.serializeFunction;
+import static org.jetbrains.jet.lang.resolve.java.JavaToKotlinMethodMap.serializePsiMethod;
+
+public class GenerateJavaToKotlinMethodMap {
+
+ public static final String BUILTINS_FQNAME_PREFIX = KotlinBuiltIns.BUILT_INS_PACKAGE_FQ_NAME.getFqName() + ".";
+
+ public static void main(String[] args) throws IOException {
+ JetCoreEnvironment coreEnvironment = new JetCoreEnvironment(
+ CompileEnvironmentUtil.createMockDisposable(),
+ CompileCompilerDependenciesTest.compilerConfigurationForTests(ConfigurationKind.JDK_ONLY, TestJdkKind.FULL_JDK));
+
+ StringBuilder buf = new StringBuilder();
+ Printer printer = new Printer(buf);
+
+ printer.print(FileUtil.loadFile(new File("injector-generator/copyright.txt")))
+ .println()
+ .println("package org.jetbrains.jet.lang.resolve.java;")
+ .println()
+ .println("import com.google.common.collect.ImmutableMultimap;")
+ .println()
+ .println("import static org.jetbrains.jet.lang.resolve.java.JavaToKotlinMethodMap.*;")
+ .println()
+ .println("/* This file is generated by ", GenerateJavaToKotlinMethodMap.class.getName(), ". DO NOT EDIT! */")
+ .println("@SuppressWarnings(\"unchecked\")")
+ .println("class JavaToKotlinMethodMapGenerated {").pushIndent()
+ .println("final ImmutableMultimap<String, JavaToKotlinMethodMap.ClassData> map;")
+ .println()
+ .println("JavaToKotlinMethodMapGenerated() {").pushIndent()
+ .println("ImmutableMultimap.Builder<String, JavaToKotlinMethodMap.ClassData> b = ImmutableMultimap.builder();")
+ .println();
+
+ MyMapBuilder builder = new MyMapBuilder(coreEnvironment.getProject());
+ printer.printWithNoIndent(builder.toString());
+
+ printer.println("map = b.build();");
+ printer.popIndent().println("}");
+ printer.popIndent().println("}");
+
+ //noinspection IOResourceOpenedButNotSafelyClosed
+ FileWriter out =
+ new FileWriter("compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/JavaToKotlinMethodMapGenerated.java");
+
+ out.write(buf.toString());
+ out.close();
+ }
+
+ private static class MyMapBuilder extends JavaToKotlinClassMapBuilder {
+ private final Project project;
+ private final StringBuilder buf = new StringBuilder();
+ private final Printer printer = new Printer(buf).pushIndent().pushIndent();
+
+ public MyMapBuilder(@NotNull Project project) {
+ this.project = project;
+ init();
+ }
+
+ @Override
+ protected void register(@NotNull Class<?> javaClass, @NotNull ClassDescriptor kotlinDescriptor, @NotNull Direction direction) {
+ processClass(javaClass, kotlinDescriptor);
+ }
+
+ @Override
+ protected void register(@NotNull Class<?> javaClass,
+ @NotNull ClassDescriptor kotlinDescriptor,
+ @NotNull ClassDescriptor kotlinMutableDescriptor,
+ @NotNull Direction direction) {
+ processClass(javaClass, kotlinDescriptor);
+ processClass(javaClass, kotlinMutableDescriptor);
+ }
+
+ private void processClass(@NotNull Class<?> javaClass, @NotNull ClassDescriptor kotlinClass) {
+ JavaFileManager javaFileManager = ServiceManager.getService(project, JavaFileManager.class);
+ PsiClass psiClass = javaFileManager.findClass(javaClass.getCanonicalName(), GlobalSearchScope.allScope(project));
+ assert psiClass != null;
+
+ List<Pair<PsiMethod, FunctionDescriptor>> methods2Functions = getClassMethods2Functions(kotlinClass, psiClass);
+ if (!methods2Functions.isEmpty()) {
+ appendBeforeClass(kotlinClass, psiClass);
+ appendClass(methods2Functions);
+ appendAfterClass();
+ }
+ }
+
+ private static List<Pair<PsiMethod, FunctionDescriptor>> getClassMethods2Functions(
+ @NotNull ClassDescriptor kotlinClass,
+ @NotNull PsiClass psiClass
+ ) {
+ PsiMethod[] methods = psiClass.getMethods();
+
+ List<Pair<PsiMethod, FunctionDescriptor>> result = Lists.newArrayList();
+
+ for (DeclarationDescriptor member : kotlinClass.getDefaultType().getMemberScope().getAllDescriptors()) {
+ if (!(member instanceof FunctionDescriptor) || member.getContainingDeclaration() != kotlinClass) {
+ continue;
+ }
+
+ FunctionDescriptor fun = (FunctionDescriptor) member;
+ PsiMethod foundMethod = findMethod(methods, fun);
+ if (foundMethod != null) {
+ result.add(Pair.create(foundMethod, fun));
+ }
+ }
+
+ Collections.sort(result, new Comparator<Pair<PsiMethod, FunctionDescriptor>>() {
+ @Override
+ public int compare(Pair<PsiMethod, FunctionDescriptor> pair1, Pair<PsiMethod, FunctionDescriptor> pair2) {
+ PsiMethod method1 = pair1.first;
+ PsiMethod method2 = pair2.first;
+
+ String name1 = method1.getName();
+ String name2 = method2.getName();
+ if (!name1.equals(name2)) {
+ return name1.compareTo(name2);
+ }
+
+ String serialized1 = serializePsiMethod(method1);
+ String serialized2 = serializePsiMethod(method2);
+ return serialized1.compareTo(serialized2);
+ }
+ });
+ return result;
+ }
+
+ private static boolean match(@NotNull PsiMethod method, @NotNull FunctionDescriptor fun) {
+ // Compare method an function by name and parameters count. For all methods except one (List.remove) it is enough.
+ // If this changes, there will be assertion error in findMethod()
+ if (method.getName().equals(fun.getName().getIdentifier())
+ && method.getParameterList().getParametersCount() == fun.getValueParameters().size()) {
+
+ // "special case": remove(Int) and remove(Any?) in MutableList
+ if (method.getName().equals("remove") && method.getContainingClass().getName().equals("List")) {
+ String psiType = method.getParameterList().getParameters()[0].getType().getPresentableText();
+ String jetType = DescriptorRenderer.TEXT.renderTypeWithShortNames(fun.getValueParameters().get(0).getType());
+ String string = psiType + "|" + jetType;
+
+ return "int|Int".equals(string) || "Object|Any?".equals(string);
+ }
+
+ return true;
+ }
+ return false;
+ }
+
+ @Nullable
+ private static PsiMethod findMethod(@NotNull PsiMethod[] methods, @NotNull FunctionDescriptor fun) {
+ PsiMethod found = null;
+ for (PsiMethod method : methods) {
+ if (match(method, fun)) {
+ if (found != null) {
+ throw new AssertionError("Duplicate for " + fun);
+ }
+
+ found = method;
+ }
+ }
+
+ return found;
+ }
+
+ private void appendBeforeClass(@NotNull ClassDescriptor kotlinClass, @NotNull PsiClass psiClass) {
+ String psiFqName = psiClass.getQualifiedName();
+ String kotlinFqName = DescriptorUtils.getFQName(kotlinClass).toSafe().getFqName();
+
+ assert kotlinFqName.startsWith(BUILTINS_FQNAME_PREFIX);
+ String kotlinSubQualifiedName = kotlinFqName.substring(BUILTINS_FQNAME_PREFIX.length());
+ printer.println("put(b, \"", psiFqName, "\", \"", kotlinSubQualifiedName, "\",").pushIndent();
+ }
+
+ private void appendClass(@NotNull List<Pair<PsiMethod, FunctionDescriptor>> methods2Functions) {
+ int index = 0;
+ for (Pair<PsiMethod, FunctionDescriptor> method2Function : methods2Functions) {
+ printer.print("pair(\"", serializePsiMethod(method2Function.first), "\", \"", serializeFunction(method2Function.second),
+ "\")");
+
+ if (index != methods2Functions.size() - 1) {
+ printer.printWithNoIndent(",");
+ }
+
+ printer.println();
+
+ index++;
+ }
+ }
+
+ private void appendAfterClass() {
+ printer.popIndent().println(");").println();
+ }
+
+
+ public String toString() {
+ return buf.toString();
+ }
+ }
+
+ private GenerateJavaToKotlinMethodMap() {
+ }
+}
diff --git a/jdk-annotations/java/util/annotations.xml b/jdk-annotations/java/util/annotations.xml
index 3251649264e78..c1f62eced16e3 100644
--- a/jdk-annotations/java/util/annotations.xml
+++ b/jdk-annotations/java/util/annotations.xml
@@ -1,4 +1,10 @@
<root>
+ <item name='java.util.Dictionary V put(K, V)'>
+ <annotation name='jet.runtime.typeinfo.KotlinSignature'>
+ <val name="value" val=""fun put(key : K, value : V) : V?""/>
+ </annotation>
+ </item>
+
<item name='java.util.AbstractList boolean add(E)'>
<annotation name='jet.runtime.typeinfo.KotlinSignature'>
<val name="value" val=""fun add(e : E) : Boolean""/>
@@ -679,12 +685,12 @@
</item>
<item name='java.util.AbstractMap java.util.Set<java.util.Map.Entry<K,V>> entrySet()'>
<annotation name='jet.runtime.typeinfo.KotlinSignature'>
- <val name="value" val=""fun entrySet() : Set<Map.Entry<K, V>>""/>
+ <val name="value" val=""fun entrySet() : MutableSet<MutableMap.MutableEntry<K, V>>""/>
</annotation>
</item>
<item name='java.util.AbstractMap java.util.Set<K> keySet()'>
<annotation name='jet.runtime.typeinfo.KotlinSignature'>
- <val name="value" val=""fun keySet() : Set<K>""/>
+ <val name="value" val=""fun keySet() : MutableSet<K>""/>
</annotation>
</item>
<item name='java.util.AbstractMap V put(K, V)'>
@@ -699,7 +705,7 @@
</item>
<item name='java.util.AbstractMap java.util.Collection<V> values()'>
<annotation name='jet.runtime.typeinfo.KotlinSignature'>
- <val name="value" val=""fun values() : Collection<V>""/>
+ <val name="value" val=""fun values() : MutableCollection<V>""/>
</annotation>
</item>
<item name='java.util.AbstractSequentialList void add(int, E)'>
|
391e4b4c4ce7d9be1c0051614b970c85cce91c9f
|
restlet-framework-java
|
- Fixed plugin descriptors for some extensions.--
|
c
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/build.number b/build.number
index 7a31427663..43097911f6 100644
--- a/build.number
+++ b/build.number
@@ -1,3 +1,3 @@
#Build Number for ANT. Do not edit!
-#Sun Oct 15 18:28:20 CEST 2006
-build.number=340
+#Mon Oct 16 14:37:01 CEST 2006
+build.number=341
diff --git a/plugins/internal/com.noelios.restlet.example/META-INF/MANIFEST.MF b/plugins/internal/com.noelios.restlet.example/META-INF/MANIFEST.MF
index 3a3d86e53f..e79eac8b14 100644
--- a/plugins/internal/com.noelios.restlet.example/META-INF/MANIFEST.MF
+++ b/plugins/internal/com.noelios.restlet.example/META-INF/MANIFEST.MF
@@ -8,8 +8,8 @@ Bundle-Localization: plugin
Require-Bundle: org.restlet,
com.noelios.restlet,
com.noelios.restlet.ext.net,
- com.noelios.restlet.ext.jetty6;resolution:=optional,
com.noelios.restlet.ext.simple;resolution:=optional,
+ com.noelios.restlet.ext.jetty6;resolution:=optional,
com.noelios.restlet.ext.asyncweb;resolution:=optional,
org.apache.mina;resolution:=optional,
org.apache.commons.logging;resolution:=optional,
diff --git a/plugins/internal/com.noelios.restlet.example/src/com/noelios/restlet/example/misc/SimpleServer.java b/plugins/internal/com.noelios.restlet.example/src/com/noelios/restlet/example/misc/SimpleServer.java
index 4ef292b45c..2bd921295f 100644
--- a/plugins/internal/com.noelios.restlet.example/src/com/noelios/restlet/example/misc/SimpleServer.java
+++ b/plugins/internal/com.noelios.restlet.example/src/com/noelios/restlet/example/misc/SimpleServer.java
@@ -23,7 +23,6 @@
package com.noelios.restlet.example.misc;
import org.restlet.Container;
-import org.restlet.Context;
import org.restlet.Restlet;
import org.restlet.data.Form;
import org.restlet.data.MediaType;
@@ -43,16 +42,15 @@ public static void main(String[] args)
try
{
// Create a new Restlet container
- Container myContainer = new Container();
- Context myContext = myContainer.getContext();
+ Container container = new Container();
// Create the HTTP server connector, then add it as a server
// connector to the Restlet container. Note that the container
// is the call restlet.
- myContainer.getServers().add(Protocol.HTTP, 9876);
+ container.getServers().add(Protocol.HTTP, 9876);
// Prepare and attach a test Restlet
- Restlet testRestlet = new Restlet(myContext)
+ Restlet handler = new Restlet(container.getContext())
{
public void handlePut(Request request, Response response)
{
@@ -78,10 +76,10 @@ public void handlePut(Request request, Response response)
}
};
- myContainer.getDefaultHost().attach("/test", testRestlet);
+ container.getDefaultHost().attach("/test", handler);
// Now, start the container
- myContainer.start();
+ container.start();
}
catch(Exception e)
{
diff --git a/plugins/internal/com.noelios.restlet.example/src/com/noelios/restlet/example/tutorial/Tutorial05.java b/plugins/internal/com.noelios.restlet.example/src/com/noelios/restlet/example/tutorial/Tutorial05.java
index a65dec98c5..795198ccb3 100644
--- a/plugins/internal/com.noelios.restlet.example/src/com/noelios/restlet/example/tutorial/Tutorial05.java
+++ b/plugins/internal/com.noelios.restlet.example/src/com/noelios/restlet/example/tutorial/Tutorial05.java
@@ -38,11 +38,11 @@ public class Tutorial05
public static void main(String[] args) throws Exception
{
// Create a new Restlet container and add a HTTP server connector to it
- Container myContainer = new Container();
- myContainer.getServers().add(Protocol.HTTP, 8182);
+ Container container = new Container();
+ container.getServers().add(Protocol.HTTP, 8182);
// Create a new Restlet that will display some path information.
- Restlet myRestlet = new Restlet()
+ Restlet handler = new Restlet()
{
public void handleGet(Request request, Response response)
{
@@ -56,11 +56,11 @@ public void handleGet(Request request, Response response)
};
// Then attach it to the local host
- myContainer.getDefaultHost().attach("/trace", myRestlet);
+ container.getDefaultHost().attach("/trace", handler);
// Now, let's start the container!
// Note that the HTTP server connector is also automatically started.
- myContainer.start();
+ container.start();
}
}
diff --git a/plugins/internal/com.noelios.restlet.ext.jetty_6.0/META-INF/MANIFEST.MF b/plugins/internal/com.noelios.restlet.ext.jetty_6.0/META-INF/MANIFEST.MF
index a45210028b..a2c10fbba7 100644
--- a/plugins/internal/com.noelios.restlet.ext.jetty_6.0/META-INF/MANIFEST.MF
+++ b/plugins/internal/com.noelios.restlet.ext.jetty_6.0/META-INF/MANIFEST.MF
@@ -9,3 +9,4 @@ Require-Bundle: org.restlet,
com.noelios.restlet,
javax.servlet5,
org.mortbay.jetty6
+Export-Package: com.noelios.restlet.ext.jetty
diff --git a/plugins/internal/com.noelios.restlet.ext.net/META-INF/MANIFEST.MF b/plugins/internal/com.noelios.restlet.ext.net/META-INF/MANIFEST.MF
index c772339c84..d896bf6f3a 100644
--- a/plugins/internal/com.noelios.restlet.ext.net/META-INF/MANIFEST.MF
+++ b/plugins/internal/com.noelios.restlet.ext.net/META-INF/MANIFEST.MF
@@ -7,3 +7,4 @@ Bundle-Vendor: Noelios Consulting
Bundle-Localization: plugin
Require-Bundle: org.restlet,
com.noelios.restlet
+Export-Package: com.noelios.restlet.ext.net
diff --git a/plugins/internal/com.noelios.restlet.ext.servlet_2.4/META-INF/MANIFEST.MF b/plugins/internal/com.noelios.restlet.ext.servlet_2.4/META-INF/MANIFEST.MF
index f80e299ed6..fa7b7d1afe 100644
--- a/plugins/internal/com.noelios.restlet.ext.servlet_2.4/META-INF/MANIFEST.MF
+++ b/plugins/internal/com.noelios.restlet.ext.servlet_2.4/META-INF/MANIFEST.MF
@@ -8,3 +8,4 @@ Bundle-Localization: plugin
Require-Bundle: org.restlet,
com.noelios.restlet,
javax.servlet4
+Export-Package: com.noelios.restlet.ext.servlet
diff --git a/plugins/internal/com.noelios.restlet.ext.servlet_2.4/src/com/noelios/restlet/ext/servlet/ServerServlet.java b/plugins/internal/com.noelios.restlet.ext.servlet_2.4/src/com/noelios/restlet/ext/servlet/ServerServlet.java
index a3d2d56df1..b1701d3c28 100644
--- a/plugins/internal/com.noelios.restlet.ext.servlet_2.4/src/com/noelios/restlet/ext/servlet/ServerServlet.java
+++ b/plugins/internal/com.noelios.restlet.ext.servlet_2.4/src/com/noelios/restlet/ext/servlet/ServerServlet.java
@@ -43,22 +43,21 @@
* <?xml version="1.0" encoding="ISO-8859-1"?>
* <!DOCTYPE web-app PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN" "http://java.sun.com/dtd/web-app_2_3.dtd">
* <web-app>
- * <display-name>Server Servlet</display-name>
- * <description>Servlet acting as a Restlet server connector</description>
+ * <display-name>Restlet adapter</display-name>
*
- * <!-- Application class handling calls -->
+ * <!-- Your application class name -->
* <context-param>
* <param-name>org.restlet.application</param-name>
* <param-value>com.noelios.restlet.test.TraceApplication</param-value>
* </context-param>
*
- * <!-- ServerServlet class or a subclass -->
+ * <!-- Restlet adapter -->
* <servlet>
* <servlet-name>ServerServlet</servlet-name>
* <servlet-class>com.noelios.restlet.ext.servlet.ServerServlet</servlet-class>
* </servlet>
*
- * <!-- Mapping of requests to the ServerServlet -->
+ * <!-- Catch all requests -->
* <servlet-mapping>
* <servlet-name>ServerServlet</servlet-name>
* <url-pattern>/*</url-pattern>
diff --git a/plugins/internal/com.noelios.restlet.ext.simple_3.1/META-INF/MANIFEST.MF b/plugins/internal/com.noelios.restlet.ext.simple_3.1/META-INF/MANIFEST.MF
index f8146a7664..cb9f582bcd 100644
--- a/plugins/internal/com.noelios.restlet.ext.simple_3.1/META-INF/MANIFEST.MF
+++ b/plugins/internal/com.noelios.restlet.ext.simple_3.1/META-INF/MANIFEST.MF
@@ -8,3 +8,4 @@ Bundle-Localization: plugin
Require-Bundle: org.restlet,
com.noelios.restlet,
org.simpleframework;bundle-version="3.1.0"
+Export-Package: com.noelios.restlet.ext.simple
diff --git a/plugins/internal/com.noelios.restlet.test/META-INF/MANIFEST.MF b/plugins/internal/com.noelios.restlet.test/META-INF/MANIFEST.MF
index 248ea19fa2..4a1b9b2663 100644
--- a/plugins/internal/com.noelios.restlet.test/META-INF/MANIFEST.MF
+++ b/plugins/internal/com.noelios.restlet.test/META-INF/MANIFEST.MF
@@ -13,3 +13,4 @@ Require-Bundle: org.restlet,
com.noelios.restlet.ext.simple;resolution:=optional,
com.noelios.restlet.ext.asyncweb;resolution:=optional,
com.noelios.restlet.ext.atom
+Export-Package: com.noelios.restlet.test
|
5ebbbc8a3c45010b2f52f14477785a1dd9fd02b8
|
drools
|
[DROOLS-812] properly close InputStreams--
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/main/java/org/drools/compiler/rule/builder/dialect/java/JavaRuleBuilderHelper.java b/drools-compiler/src/main/java/org/drools/compiler/rule/builder/dialect/java/JavaRuleBuilderHelper.java
index 1aeb7b2e87c..adbbfed3de5 100644
--- a/drools-compiler/src/main/java/org/drools/compiler/rule/builder/dialect/java/JavaRuleBuilderHelper.java
+++ b/drools-compiler/src/main/java/org/drools/compiler/rule/builder/dialect/java/JavaRuleBuilderHelper.java
@@ -19,22 +19,24 @@
import org.drools.compiler.compiler.DescrBuildError;
import org.drools.compiler.lang.descr.BaseDescr;
import org.drools.compiler.lang.descr.RuleDescr;
+import org.drools.compiler.rule.builder.RuleBuildContext;
import org.drools.core.definitions.rule.impl.RuleImpl;
-import org.drools.core.util.StringUtils;
import org.drools.core.reteoo.RuleTerminalNode;
import org.drools.core.rule.Declaration;
import org.drools.core.rule.JavaDialectRuntimeData;
-import org.drools.compiler.rule.builder.RuleBuildContext;
import org.drools.core.spi.AcceptsClassObjectType;
import org.drools.core.spi.KnowledgeHelper;
-import org.mvel2.ParserConfiguration;
-import org.mvel2.ParserContext;
+import org.drools.core.util.StringUtils;
import org.mvel2.integration.impl.MapVariableResolverFactory;
import org.mvel2.templates.SimpleTemplateRegistry;
import org.mvel2.templates.TemplateCompiler;
import org.mvel2.templates.TemplateRegistry;
import org.mvel2.templates.TemplateRuntime;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import java.io.IOException;
+import java.io.InputStream;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
@@ -42,6 +44,8 @@
public final class JavaRuleBuilderHelper {
+ private static final Logger logger = LoggerFactory.getLogger(JavaRuleBuilderHelper.class);
+
protected static TemplateRegistry RULE_REGISTRY = new SimpleTemplateRegistry();
protected static TemplateRegistry INVOKER_REGISTRY = new SimpleTemplateRegistry();
@@ -51,6 +55,7 @@ public final class JavaRuleBuilderHelper {
public static void setConsequenceTemplate( String name ) {
JAVA_RULE_MVEL = name;
RULE_REGISTRY = new SimpleTemplateRegistry();
+
}
public static void setInvokerTemplate( String name ) {
@@ -60,8 +65,14 @@ public static void setInvokerTemplate( String name ) {
public static synchronized TemplateRegistry getRuleTemplateRegistry(ClassLoader cl) {
if ( !RULE_REGISTRY.contains( "rules" ) ) {
+ InputStream javaRuleMvelStream = JavaRuleBuilderHelper.class.getResourceAsStream( JAVA_RULE_MVEL );
RULE_REGISTRY.addNamedTemplate( "rules",
- TemplateCompiler.compileTemplate( JavaRuleBuilderHelper.class.getResourceAsStream( JAVA_RULE_MVEL ) ) );
+ TemplateCompiler.compileTemplate( javaRuleMvelStream ) );
+ try {
+ javaRuleMvelStream.close();
+ } catch ( IOException ex ) {
+ logger.debug( "Failed to close stream!", ex );
+ }
TemplateRuntime.execute( RULE_REGISTRY.getNamedTemplate( "rules" ),
null,
RULE_REGISTRY );
@@ -72,8 +83,14 @@ public static synchronized TemplateRegistry getRuleTemplateRegistry(ClassLoader
public static synchronized TemplateRegistry getInvokerTemplateRegistry(ClassLoader cl) {
if ( !INVOKER_REGISTRY.contains( "invokers" ) ) {
+ InputStream javaInvokersMvelStream = JavaRuleBuilderHelper.class.getResourceAsStream( JAVA_INVOKERS_MVEL );
INVOKER_REGISTRY.addNamedTemplate( "invokers",
- TemplateCompiler.compileTemplate( JavaRuleBuilderHelper.class.getResourceAsStream( JAVA_INVOKERS_MVEL ) ) );
+ TemplateCompiler.compileTemplate( javaInvokersMvelStream ) );
+ try {
+ javaInvokersMvelStream.close();
+ } catch ( IOException ex ) {
+ logger.debug( "Failed to close stream!", ex );
+ }
TemplateRuntime.execute( INVOKER_REGISTRY.getNamedTemplate( "invokers" ),
null,
INVOKER_REGISTRY );
|
a57debf4fe3380f7ad9d46db90d083d979541ebb
|
intellij-community
|
fragments with differences in comments only are- considered equivalent--
|
a
|
https://github.com/JetBrains/intellij-community
|
diff --git a/codeInsight/openapi/com/intellij/codeInsight/PsiEquivalenceUtil.java b/codeInsight/openapi/com/intellij/codeInsight/PsiEquivalenceUtil.java
index 2c103ee2b9ab6..ed5db910a0736 100644
--- a/codeInsight/openapi/com/intellij/codeInsight/PsiEquivalenceUtil.java
+++ b/codeInsight/openapi/com/intellij/codeInsight/PsiEquivalenceUtil.java
@@ -7,6 +7,7 @@
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiReference;
import com.intellij.psi.PsiWhiteSpace;
+import com.intellij.psi.PsiComment;
import com.intellij.psi.util.PsiTreeUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
@@ -61,7 +62,7 @@ private static PsiElement[] getFilteredChildren(PsiElement element1) {
PsiElement[] children1 = element1.getChildren();
ArrayList<PsiElement> array = new ArrayList<PsiElement>();
for (PsiElement child : children1) {
- if (!(child instanceof PsiWhiteSpace)) {
+ if (!(child instanceof PsiWhiteSpace) && !(child instanceof PsiComment)) {
array.add(child);
}
}
@@ -95,7 +96,7 @@ private static void addRangeDuplicates(final PsiElement scope,
i = j + 1;
continue NextChild;
}
- next = PsiTreeUtil.skipSiblingsForward(next, new Class[]{PsiWhiteSpace.class});
+ next = PsiTreeUtil.skipSiblingsForward(next, PsiWhiteSpace.class);
}
while (true);
|
23f836454d9c5a495111b068f45d6aa89a2a724a
|
hbase
|
HADOOP-1424. TestHBaseCluster fails with- IllegalMonitorStateException. Fix regression introduced by HADOOP-1397.--git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk/src/contrib/hbase@541095 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/CHANGES.txt b/CHANGES.txt
index 65fd5cb1c100..092e9a0505a4 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -14,3 +14,5 @@ Trunk (unreleased changes)
'Performance Evaluation', etc.
7. HADOOP-1420, HADOOP-1423. Findbugs changes, remove reference to removed
class HLocking.
+ 8. HADOOP-1424. TestHBaseCluster fails with IllegalMonitorStateException. Fix
+ regression introduced by HADOOP-1397.
diff --git a/src/java/org/apache/hadoop/hbase/HLocking.java b/src/java/org/apache/hadoop/hbase/HLocking.java
new file mode 100644
index 000000000000..8031caf99b58
--- /dev/null
+++ b/src/java/org/apache/hadoop/hbase/HLocking.java
@@ -0,0 +1,101 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * HLocking is a set of lock primitives that does not rely on a
+ * particular thread holding the monitor for an object. This is
+ * especially important when a lock must persist over multiple RPC's
+ * since there is no guarantee that the same Server thread will handle
+ * all the RPC's until the lock is released. Not requiring that the locker
+ * thread is same as unlocking thread is the key distinction between this
+ * class and {@link java.util.concurrent.locks.ReentrantReadWriteLock}.
+ *
+ * <p>For each independent entity that needs locking, create a new HLocking
+ * instance.
+ */
+public class HLocking {
+ private Integer mutex;
+
+ // If lockers == 0, the lock is unlocked
+ // If lockers > 0, locked for read
+ // If lockers == -1 locked for write
+
+ private AtomicInteger lockers;
+
+ /** Constructor */
+ public HLocking() {
+ this.mutex = new Integer(0);
+ this.lockers = new AtomicInteger(0);
+ }
+
+ /**
+ * Caller needs the nonexclusive read-lock
+ */
+ public void obtainReadLock() {
+ synchronized(mutex) {
+ while(lockers.get() < 0) {
+ try {
+ mutex.wait();
+ } catch(InterruptedException ie) {
+ }
+ }
+ lockers.incrementAndGet();
+ mutex.notifyAll();
+ }
+ }
+
+ /**
+ * Caller is finished with the nonexclusive read-lock
+ */
+ public void releaseReadLock() {
+ synchronized(mutex) {
+ if(lockers.decrementAndGet() < 0) {
+ throw new IllegalStateException("lockers: " + lockers);
+ }
+ mutex.notifyAll();
+ }
+ }
+
+ /**
+ * Caller needs the exclusive write-lock
+ */
+ public void obtainWriteLock() {
+ synchronized(mutex) {
+ while(!lockers.compareAndSet(0, -1)) {
+ try {
+ mutex.wait();
+ } catch (InterruptedException ie) {
+ }
+ }
+ mutex.notifyAll();
+ }
+ }
+
+ /**
+ * Caller is finished with the write lock
+ */
+ public void releaseWriteLock() {
+ synchronized(mutex) {
+ if(!lockers.compareAndSet(-1, 0)) {
+ throw new IllegalStateException("lockers: " + lockers);
+ }
+ mutex.notifyAll();
+ }
+ }
+}
diff --git a/src/java/org/apache/hadoop/hbase/HMemcache.java b/src/java/org/apache/hadoop/hbase/HMemcache.java
index 87616e25f2d1..740caf1d323c 100644
--- a/src/java/org/apache/hadoop/hbase/HMemcache.java
+++ b/src/java/org/apache/hadoop/hbase/HMemcache.java
@@ -15,14 +15,17 @@
*/
package org.apache.hadoop.hbase;
-import org.apache.hadoop.io.*;
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.TreeMap;
+import java.util.Vector;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-
-import java.io.*;
-import java.util.*;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
/*******************************************************************************
* The HMemcache holds in-memory modifications to the HRegion. This is really a
@@ -39,7 +42,7 @@ public class HMemcache {
TreeMap<HStoreKey, BytesWritable> snapshot = null;
- ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
+ private final HLocking lock = new HLocking();
public HMemcache() {
super();
@@ -70,7 +73,7 @@ public Snapshot() {
public Snapshot snapshotMemcacheForLog(HLog log) throws IOException {
Snapshot retval = new Snapshot();
- this.lock.writeLock().lock();
+ this.lock.obtainWriteLock();
try {
if(snapshot != null) {
throw new IOException("Snapshot in progress!");
@@ -99,7 +102,7 @@ public Snapshot snapshotMemcacheForLog(HLog log) throws IOException {
return retval;
} finally {
- this.lock.writeLock().unlock();
+ this.lock.releaseWriteLock();
}
}
@@ -109,7 +112,7 @@ public Snapshot snapshotMemcacheForLog(HLog log) throws IOException {
* Modifying the structure means we need to obtain a writelock.
*/
public void deleteSnapshot() throws IOException {
- this.lock.writeLock().lock();
+ this.lock.obtainWriteLock();
try {
if(snapshot == null) {
@@ -135,7 +138,7 @@ public void deleteSnapshot() throws IOException {
}
} finally {
- this.lock.writeLock().unlock();
+ this.lock.releaseWriteLock();
}
}
@@ -145,14 +148,14 @@ public void deleteSnapshot() throws IOException {
* Operation uses a write lock.
*/
public void add(Text row, TreeMap<Text, BytesWritable> columns, long timestamp) {
- this.lock.writeLock().lock();
+ this.lock.obtainWriteLock();
try {
for (Map.Entry<Text, BytesWritable> es: columns.entrySet()) {
HStoreKey key = new HStoreKey(row, es.getKey(), timestamp);
memcache.put(key, es.getValue());
}
} finally {
- this.lock.writeLock().unlock();
+ this.lock.releaseWriteLock();
}
}
@@ -163,7 +166,7 @@ public void add(Text row, TreeMap<Text, BytesWritable> columns, long timestamp)
*/
public BytesWritable[] get(HStoreKey key, int numVersions) {
Vector<BytesWritable> results = new Vector<BytesWritable>();
- this.lock.readLock().lock();
+ this.lock.obtainReadLock();
try {
Vector<BytesWritable> result = get(memcache, key, numVersions-results.size());
results.addAll(0, result);
@@ -180,7 +183,7 @@ public BytesWritable[] get(HStoreKey key, int numVersions) {
return (results.size() == 0)?
null: results.toArray(new BytesWritable[results.size()]);
} finally {
- this.lock.readLock().unlock();
+ this.lock.releaseReadLock();
}
}
@@ -192,7 +195,7 @@ public BytesWritable[] get(HStoreKey key, int numVersions) {
*/
public TreeMap<Text, BytesWritable> getFull(HStoreKey key) {
TreeMap<Text, BytesWritable> results = new TreeMap<Text, BytesWritable>();
- this.lock.readLock().lock();
+ this.lock.obtainReadLock();
try {
internalGetFull(memcache, key, results);
for(int i = history.size()-1; i >= 0; i--) {
@@ -202,7 +205,7 @@ public TreeMap<Text, BytesWritable> getFull(HStoreKey key) {
return results;
} finally {
- this.lock.readLock().unlock();
+ this.lock.releaseReadLock();
}
}
@@ -275,7 +278,7 @@ public HMemcacheScanner(long timestamp, Text targetCols[], Text firstRow)
super(timestamp, targetCols);
- lock.readLock().lock();
+ lock.obtainReadLock();
try {
this.backingMaps = new TreeMap[history.size() + 1];
@@ -367,7 +370,7 @@ public void close() {
}
} finally {
- lock.readLock().unlock();
+ lock.releaseReadLock();
scannerClosed = true;
}
}
diff --git a/src/java/org/apache/hadoop/hbase/HRegion.java b/src/java/org/apache/hadoop/hbase/HRegion.java
index b5d000a19735..3cdb8f4cd0af 100644
--- a/src/java/org/apache/hadoop/hbase/HRegion.java
+++ b/src/java/org/apache/hadoop/hbase/HRegion.java
@@ -23,7 +23,6 @@
import java.io.*;
import java.util.*;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
/**
* HRegion stores data for a certain region of a table. It stores all columns
@@ -283,7 +282,7 @@ public WriteState() {
int maxUnflushedEntries = 0;
int compactionThreshold = 0;
- private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
+ private final HLocking lock = new HLocking();
//////////////////////////////////////////////////////////////////////////////
// Constructor
@@ -398,7 +397,7 @@ public void closeAndDelete() throws IOException {
* time-sensitive thread.
*/
public Vector<HStoreFile> close() throws IOException {
- lock.writeLock().lock();
+ lock.obtainWriteLock();
try {
boolean shouldClose = false;
synchronized(writestate) {
@@ -438,7 +437,7 @@ public Vector<HStoreFile> close() throws IOException {
}
}
} finally {
- lock.writeLock().unlock();
+ lock.releaseWriteLock();
}
}
@@ -614,7 +613,7 @@ public FileSystem getFilesystem() {
* @return - true if the region should be split
*/
public boolean needsSplit(Text midKey) {
- lock.readLock().lock();
+ lock.obtainReadLock();
try {
Text key = new Text();
@@ -632,7 +631,7 @@ public boolean needsSplit(Text midKey) {
return (maxSize > (DESIRED_MAX_FILE_SIZE + (DESIRED_MAX_FILE_SIZE / 2)));
} finally {
- lock.readLock().unlock();
+ lock.releaseReadLock();
}
}
@@ -641,7 +640,7 @@ public boolean needsSplit(Text midKey) {
*/
public boolean needsCompaction() {
boolean needsCompaction = false;
- lock.readLock().lock();
+ lock.obtainReadLock();
try {
for(Iterator<HStore> i = stores.values().iterator(); i.hasNext(); ) {
if(i.next().getNMaps() > compactionThreshold) {
@@ -650,7 +649,7 @@ public boolean needsCompaction() {
}
}
} finally {
- lock.readLock().unlock();
+ lock.releaseReadLock();
}
return needsCompaction;
}
@@ -670,7 +669,7 @@ public boolean needsCompaction() {
*/
public boolean compactStores() throws IOException {
boolean shouldCompact = false;
- lock.readLock().lock();
+ lock.obtainReadLock();
try {
synchronized(writestate) {
if((! writestate.writesOngoing)
@@ -683,32 +682,30 @@ public boolean compactStores() throws IOException {
}
}
} finally {
- lock.readLock().unlock();
+ lock.releaseReadLock();
}
if(! shouldCompact) {
LOG.info("not compacting region " + this.regionInfo.regionName);
- return false;
-
- } else {
- lock.writeLock().lock();
- try {
- LOG.info("starting compaction on region " + this.regionInfo.regionName);
- for(Iterator<HStore> it = stores.values().iterator(); it.hasNext(); ) {
- HStore store = it.next();
- store.compact();
- }
- LOG.info("compaction completed on region " + this.regionInfo.regionName);
- return true;
-
- } finally {
- synchronized(writestate) {
- writestate.writesOngoing = false;
- recentCommits = 0;
- writestate.notifyAll();
- }
- lock.writeLock().unlock();
+ return false;
+ }
+ lock.obtainWriteLock();
+ try {
+ LOG.info("starting compaction on region " + this.regionInfo.regionName);
+ for (Iterator<HStore> it = stores.values().iterator(); it.hasNext();) {
+ HStore store = it.next();
+ store.compact();
+ }
+ LOG.info("compaction completed on region " + this.regionInfo.regionName);
+ return true;
+
+ } finally {
+ synchronized (writestate) {
+ writestate.writesOngoing = false;
+ recentCommits = 0;
+ writestate.notifyAll();
}
+ lock.releaseWriteLock();
}
}
@@ -928,7 +925,7 @@ public BytesWritable[] get(Text row, Text column, long timestamp, int numVersion
private BytesWritable[] get(HStoreKey key, int numVersions) throws IOException {
- lock.readLock().lock();
+ lock.obtainReadLock();
try {
// Check the memcache
@@ -948,7 +945,7 @@ private BytesWritable[] get(HStoreKey key, int numVersions) throws IOException {
return targetStore.get(key, numVersions);
} finally {
- lock.readLock().unlock();
+ lock.releaseReadLock();
}
}
@@ -965,7 +962,7 @@ private BytesWritable[] get(HStoreKey key, int numVersions) throws IOException {
public TreeMap<Text, BytesWritable> getFull(Text row) throws IOException {
HStoreKey key = new HStoreKey(row, System.currentTimeMillis());
- lock.readLock().lock();
+ lock.obtainReadLock();
try {
TreeMap<Text, BytesWritable> memResult = memcache.getFull(key);
for(Iterator<Text> it = stores.keySet().iterator(); it.hasNext(); ) {
@@ -976,7 +973,7 @@ public TreeMap<Text, BytesWritable> getFull(Text row) throws IOException {
return memResult;
} finally {
- lock.readLock().unlock();
+ lock.releaseReadLock();
}
}
@@ -985,7 +982,7 @@ public TreeMap<Text, BytesWritable> getFull(Text row) throws IOException {
* columns. This Iterator must be closed by the caller.
*/
public HInternalScannerInterface getScanner(Text[] cols, Text firstRow) throws IOException {
- lock.readLock().lock();
+ lock.obtainReadLock();
try {
TreeSet<Text> families = new TreeSet<Text>();
for(int i = 0; i < cols.length; i++) {
@@ -1001,7 +998,7 @@ public HInternalScannerInterface getScanner(Text[] cols, Text firstRow) throws I
return new HScanner(cols, firstRow, memcache, storelist);
} finally {
- lock.readLock().unlock();
+ lock.releaseReadLock();
}
}
@@ -1024,11 +1021,11 @@ public long startUpdate(Text row) throws IOException {
// We obtain a per-row lock, so other clients will
// block while one client performs an update.
- lock.readLock().lock();
+ lock.obtainReadLock();
try {
return obtainLock(row);
} finally {
- lock.readLock().unlock();
+ lock.releaseReadLock();
}
}
diff --git a/src/java/org/apache/hadoop/hbase/HStore.java b/src/java/org/apache/hadoop/hbase/HStore.java
index aa3b64d6cc65..7669747b5219 100644
--- a/src/java/org/apache/hadoop/hbase/HStore.java
+++ b/src/java/org/apache/hadoop/hbase/HStore.java
@@ -23,7 +23,6 @@
import java.util.Random;
import java.util.TreeMap;
import java.util.Vector;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -64,7 +63,7 @@ public class HStore {
Integer compactLock = 0;
Integer flushLock = 0;
- private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
+ private final HLocking lock = new HLocking();
TreeMap<Long, MapFile.Reader> maps = new TreeMap<Long, MapFile.Reader>();
TreeMap<Long, HStoreFile> mapFiles = new TreeMap<Long, HStoreFile>();
@@ -237,7 +236,7 @@ public HStore(Path dir, Text regionName, Text colFamily, int maxVersions,
/** Turn off all the MapFile readers */
public void close() throws IOException {
LOG.info("closing HStore for " + this.regionName + "/" + this.colFamily);
- this.lock.writeLock().lock();
+ this.lock.obtainWriteLock();
try {
for (MapFile.Reader map: maps.values()) {
map.close();
@@ -247,7 +246,7 @@ public void close() throws IOException {
LOG.info("HStore closed for " + this.regionName + "/" + this.colFamily);
} finally {
- this.lock.writeLock().unlock();
+ this.lock.releaseWriteLock();
}
}
@@ -319,7 +318,7 @@ Vector<HStoreFile> flushCacheHelper(TreeMap<HStoreKey, BytesWritable> inputCache
// C. Finally, make the new MapFile available.
if(addToAvailableMaps) {
- this.lock.writeLock().lock();
+ this.lock.obtainWriteLock();
try {
maps.put(logCacheFlushId, new MapFile.Reader(fs, mapfile.toString(), conf));
@@ -330,7 +329,7 @@ Vector<HStoreFile> flushCacheHelper(TreeMap<HStoreKey, BytesWritable> inputCache
}
} finally {
- this.lock.writeLock().unlock();
+ this.lock.releaseWriteLock();
}
}
return getAllMapFiles();
@@ -338,12 +337,12 @@ Vector<HStoreFile> flushCacheHelper(TreeMap<HStoreKey, BytesWritable> inputCache
}
public Vector<HStoreFile> getAllMapFiles() {
- this.lock.readLock().lock();
+ this.lock.obtainReadLock();
try {
return new Vector<HStoreFile>(mapFiles.values());
} finally {
- this.lock.readLock().unlock();
+ this.lock.releaseReadLock();
}
}
@@ -385,12 +384,12 @@ void compactHelper(boolean deleteSequenceInfo) throws IOException {
// Grab a list of files to compact.
Vector<HStoreFile> toCompactFiles = null;
- this.lock.writeLock().lock();
+ this.lock.obtainWriteLock();
try {
toCompactFiles = new Vector<HStoreFile>(mapFiles.values());
} finally {
- this.lock.writeLock().unlock();
+ this.lock.releaseWriteLock();
}
// Compute the max-sequenceID seen in any of the to-be-compacted TreeMaps
@@ -627,7 +626,7 @@ void processReadyCompaction() throws IOException {
Path curCompactStore = HStoreFile.getHStoreDir(compactdir, regionName, colFamily);
- this.lock.writeLock().lock();
+ this.lock.obtainWriteLock();
try {
Path doneFile = new Path(curCompactStore, COMPACTION_DONE);
if(! fs.exists(doneFile)) {
@@ -744,7 +743,7 @@ void processReadyCompaction() throws IOException {
// 7. Releasing the write-lock
- this.lock.writeLock().unlock();
+ this.lock.releaseWriteLock();
}
}
@@ -760,7 +759,7 @@ void processReadyCompaction() throws IOException {
* The returned object should map column names to byte arrays (byte[]).
*/
public void getFull(HStoreKey key, TreeMap<Text, BytesWritable> results) throws IOException {
- this.lock.readLock().lock();
+ this.lock.obtainReadLock();
try {
MapFile.Reader[] maparray
= maps.values().toArray(new MapFile.Reader[maps.size()]);
@@ -789,7 +788,7 @@ public void getFull(HStoreKey key, TreeMap<Text, BytesWritable> results) throws
}
} finally {
- this.lock.readLock().unlock();
+ this.lock.releaseReadLock();
}
}
@@ -805,7 +804,7 @@ public BytesWritable[] get(HStoreKey key, int numVersions) throws IOException {
}
Vector<BytesWritable> results = new Vector<BytesWritable>();
- this.lock.readLock().lock();
+ this.lock.obtainReadLock();
try {
MapFile.Reader[] maparray
= maps.values().toArray(new MapFile.Reader[maps.size()]);
@@ -846,7 +845,7 @@ public BytesWritable[] get(HStoreKey key, int numVersions) throws IOException {
}
} finally {
- this.lock.readLock().unlock();
+ this.lock.releaseReadLock();
}
}
@@ -862,7 +861,7 @@ public long getLargestFileSize(Text midKey) {
return maxSize;
}
- this.lock.readLock().lock();
+ this.lock.obtainReadLock();
try {
long mapIndex = 0L;
@@ -889,7 +888,7 @@ public long getLargestFileSize(Text midKey) {
LOG.warn(e);
} finally {
- this.lock.readLock().unlock();
+ this.lock.releaseReadLock();
}
return maxSize;
}
@@ -898,12 +897,12 @@ public long getLargestFileSize(Text midKey) {
* @return Returns the number of map files currently in use
*/
public int getNMaps() {
- this.lock.readLock().lock();
+ this.lock.obtainReadLock();
try {
return maps.size();
} finally {
- this.lock.readLock().unlock();
+ this.lock.releaseReadLock();
}
}
@@ -945,7 +944,7 @@ public HStoreScanner(long timestamp, Text[] targetCols, Text firstRow)
super(timestamp, targetCols);
- lock.readLock().lock();
+ lock.obtainReadLock();
try {
this.readers = new MapFile.Reader[mapFiles.size()];
@@ -1060,7 +1059,7 @@ public void close() {
}
} finally {
- lock.readLock().unlock();
+ lock.releaseReadLock();
scannerClosed = true;
}
}
|
1135c8ce7a43dcbb05c678e0f032d6b646104066
|
kotlin
|
KT-737 compareTo() intrinsic--
|
c
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/JetTypeMapper.java b/compiler/backend/src/org/jetbrains/jet/codegen/JetTypeMapper.java
index c51d21d780190..5649fe11f01ad 100644
--- a/compiler/backend/src/org/jetbrains/jet/codegen/JetTypeMapper.java
+++ b/compiler/backend/src/org/jetbrains/jet/codegen/JetTypeMapper.java
@@ -320,7 +320,8 @@ else if (functionParent instanceof ClassDescriptor) {
ClassDescriptor containingClass = (ClassDescriptor) functionParent;
boolean isInterface = CodegenUtil.isInterface(containingClass);
OwnerKind kind1 = isInterface && superCall ? OwnerKind.TRAIT_IMPL : OwnerKind.IMPLEMENTATION;
- owner = mapType(containingClass.getDefaultType(), kind1).getInternalName();
+ Type type = mapType(containingClass.getDefaultType(), kind1);
+ owner = type.getInternalName();
invokeOpcode = isInterface
? (superCall ? Opcodes.INVOKESTATIC : Opcodes.INVOKEINTERFACE)
: (superCall ? Opcodes.INVOKESPECIAL : Opcodes.INVOKEVIRTUAL);
diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/CompareTo.java b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/CompareTo.java
new file mode 100644
index 0000000000000..991d84c12317e
--- /dev/null
+++ b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/CompareTo.java
@@ -0,0 +1,45 @@
+package org.jetbrains.jet.codegen.intrinsics;
+
+import com.intellij.psi.PsiElement;
+import org.jetbrains.annotations.Nullable;
+import org.jetbrains.jet.codegen.ExpressionCodegen;
+import org.jetbrains.jet.codegen.JetTypeMapper;
+import org.jetbrains.jet.codegen.StackValue;
+import org.jetbrains.jet.lang.psi.JetExpression;
+import org.objectweb.asm.Type;
+import org.objectweb.asm.commons.InstructionAdapter;
+
+import java.util.List;
+
+/**
+ * @author alex.tkachman
+ */
+public class CompareTo implements IntrinsicMethod {
+ @Override
+ public StackValue generate(ExpressionCodegen codegen, InstructionAdapter v, Type expectedType, @Nullable PsiElement element, @Nullable List<JetExpression> arguments, StackValue receiver) {
+ assert arguments != null;
+ receiver.put(receiver.type, v);
+ codegen.gen(arguments.get(0), receiver.type);
+ if(receiver.type == Type.BYTE_TYPE || receiver.type == Type.SHORT_TYPE || receiver.type == Type.CHAR_TYPE)
+ v.sub(Type.INT_TYPE);
+ else if(receiver.type == Type.INT_TYPE) {
+ v.invokestatic("jet/runtime/Intrinsics", "compare", "(II)I");
+ }
+ else if(receiver.type == Type.BOOLEAN_TYPE) {
+ v.invokestatic("jet/runtime/Intrinsics", "compare", "(ZZ)I");
+ }
+ else if(receiver.type == Type.LONG_TYPE) {
+ v.invokestatic("jet/runtime/Intrinsics", "compare", "(JJ)I");
+ }
+ else if(receiver.type == Type.FLOAT_TYPE) {
+ v.invokestatic("java/lang/Float", "compare", "(FF)I");
+ }
+ else if(receiver.type == Type.DOUBLE_TYPE) {
+ v.invokestatic("java/lang/Double", "compare", "(DD)I");
+ }
+ else {
+ throw new UnsupportedOperationException();
+ }
+ return StackValue.onStack(Type.INT_TYPE);
+ }
+}
diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/IntrinsicMethods.java b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/IntrinsicMethods.java
index 33e107c2143cb..6ccff8d02b34a 100644
--- a/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/IntrinsicMethods.java
+++ b/compiler/backend/src/org/jetbrains/jet/codegen/intrinsics/IntrinsicMethods.java
@@ -97,6 +97,9 @@ public IntrinsicMethods(Project project, JetStandardLibrary stdlib) {
declareIntrinsicFunction("FloatIterator", "next", 0, ITERATOR_NEXT);
declareIntrinsicFunction("DoubleIterator", "next", 0, ITERATOR_NEXT);
+ for (String type : PRIMITIVE_NUMBER_TYPES) {
+ declareIntrinsicFunction(type, "compareTo", 1, new CompareTo());
+ }
// declareIntrinsicFunction("Any", "equals", 1, new Equals());
//
declareIntrinsicStringMethods();
diff --git a/compiler/tests/org/jetbrains/jet/codegen/PrimitiveTypesTest.java b/compiler/tests/org/jetbrains/jet/codegen/PrimitiveTypesTest.java
index bb38c2d8e1be3..6dfec5e3e79bb 100644
--- a/compiler/tests/org/jetbrains/jet/codegen/PrimitiveTypesTest.java
+++ b/compiler/tests/org/jetbrains/jet/codegen/PrimitiveTypesTest.java
@@ -315,6 +315,12 @@ public void testSafeNullable () throws Exception {
assertTrue(generateToText().contains("IFNULL"));
}
+ public void testKt737() throws Exception {
+ loadText("fun box() = if(3.compareTo(2) != 1) \"fail\" else if(5.byt.compareTo(10.lng) >= 0) \"fail\" else \"OK\"");
+ System.out.println(generateToText());
+ assertEquals("OK", blackBox());
+ }
+
public void testKt665() throws Exception {
loadText("fun f(x: Long, zzz: Long = 1): Long\n" +
"{\n" +
diff --git a/stdlib/src/jet/runtime/Intrinsics.java b/stdlib/src/jet/runtime/Intrinsics.java
index d162e3634d644..19488b98ba24e 100644
--- a/stdlib/src/jet/runtime/Intrinsics.java
+++ b/stdlib/src/jet/runtime/Intrinsics.java
@@ -17,6 +17,18 @@ public static void throwNpe() {
throw new JetNullPointerException();
}
+ public static int compare(long thisVal, long anotherVal) {
+ return (thisVal<anotherVal ? -1 : (thisVal==anotherVal ? 0 : 1));
+ }
+
+ public static int compare(int thisVal, int anotherVal) {
+ return (thisVal<anotherVal ? -1 : (thisVal==anotherVal ? 0 : 1));
+ }
+
+ public static int compare(boolean thisVal, boolean anotherVal) {
+ return (thisVal == anotherVal ? 0 : (anotherVal ? 1 : -1));
+ }
+
private static Throwable sanitizeStackTrace(Throwable throwable) {
StackTraceElement[] stackTrace = throwable.getStackTrace();
ArrayList<StackTraceElement> list = new ArrayList<StackTraceElement>();
|
3bb3597920c9542c86869a4211e3273f135e1c56
|
camel
|
MR-187: Added more unit tests.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@824320 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/main/java/org/apache/camel/converter/jaxp/DomConverter.java b/camel-core/src/main/java/org/apache/camel/converter/jaxp/DomConverter.java
index a10179edfde81..ecb7199fb9802 100644
--- a/camel-core/src/main/java/org/apache/camel/converter/jaxp/DomConverter.java
+++ b/camel-core/src/main/java/org/apache/camel/converter/jaxp/DomConverter.java
@@ -17,7 +17,6 @@
package org.apache.camel.converter.jaxp;
import org.w3c.dom.Attr;
-import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
@@ -60,9 +59,6 @@ private static void append(StringBuffer buffer, Node node) {
} else if (node instanceof Element) {
Element element = (Element) node;
append(buffer, element.getChildNodes());
- } else if (node instanceof Document) {
- Document doc = (Document) node;
- append(buffer, doc.getChildNodes());
}
}
}
diff --git a/camel-core/src/test/java/org/apache/camel/converter/jaxp/DomConverterTest.java b/camel-core/src/test/java/org/apache/camel/converter/jaxp/DomConverterTest.java
new file mode 100644
index 0000000000000..3404592b7a702
--- /dev/null
+++ b/camel-core/src/test/java/org/apache/camel/converter/jaxp/DomConverterTest.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.converter.jaxp;
+
+import org.w3c.dom.Document;
+
+import org.apache.camel.ContextTestSupport;
+
+/**
+ * @version $Revision$
+ */
+public class DomConverterTest extends ContextTestSupport {
+
+ public void testDomConverter() throws Exception {
+ Document document = context.getTypeConverter().convertTo(Document.class, "<?xml version=\"1.0\" encoding=\"UTF-8\"?><hello>world!</hello>");
+
+ String s = DomConverter.toString(document.getChildNodes());
+ assertEquals("world!", s);
+ }
+
+}
diff --git a/camel-core/src/test/java/org/apache/camel/converter/jaxp/XmlConverterTest.java b/camel-core/src/test/java/org/apache/camel/converter/jaxp/XmlConverterTest.java
new file mode 100644
index 0000000000000..0b6c5d7b05452
--- /dev/null
+++ b/camel-core/src/test/java/org/apache/camel/converter/jaxp/XmlConverterTest.java
@@ -0,0 +1,354 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.converter.jaxp;
+
+import java.io.File;
+import java.io.InputStream;
+import java.io.Reader;
+import java.nio.ByteBuffer;
+import javax.xml.transform.Source;
+import javax.xml.transform.TransformerException;
+import javax.xml.transform.dom.DOMSource;
+import javax.xml.transform.sax.SAXSource;
+import javax.xml.transform.stream.StreamSource;
+
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.Node;
+
+import org.apache.camel.ContextTestSupport;
+import org.apache.camel.Exchange;
+import org.apache.camel.impl.DefaultExchange;
+
+/**
+ * @version $Revision$
+ */
+public class XmlConverterTest extends ContextTestSupport {
+
+ public void testToResultNoSource() throws Exception {
+ XmlConverter conv = new XmlConverter();
+ conv.toResult(null, null);
+ }
+
+ public void testToBytesSource() throws Exception {
+ XmlConverter conv = new XmlConverter();
+ BytesSource bs = conv.toSource("<foo>bar</foo>".getBytes());
+ assertNotNull(bs);
+ assertEquals("<foo>bar</foo>", new String(bs.getData()));
+ }
+
+ public void testToStringFromSourceNoSource() throws Exception {
+ XmlConverter conv = new XmlConverter();
+
+ Source source = null;
+ String out = conv.toString(source);
+ assertEquals(null, out);
+ }
+
+ public void testToStringWithBytesSource() throws Exception {
+ XmlConverter conv = new XmlConverter();
+
+ Source source = conv.toSource("<foo>bar</foo>".getBytes());
+ String out = conv.toString(source);
+ assertEquals("<foo>bar</foo>", out);
+ }
+
+ public void testToByteArrayWithExchange() throws Exception {
+ Exchange exchange = new DefaultExchange(context);
+ XmlConverter conv = new XmlConverter();
+
+ Source source = conv.toSource("<foo>bar</foo>".getBytes());
+ byte[] out = conv.toByteArray(source, exchange);
+ assertEquals("<foo>bar</foo>", new String(out));
+ }
+
+ public void testToByteArrayWithNoExchange() throws Exception {
+ XmlConverter conv = new XmlConverter();
+
+ Source source = conv.toSource("<foo>bar</foo>".getBytes());
+ byte[] out = conv.toByteArray(source, null);
+ assertEquals("<foo>bar</foo>", new String(out));
+ }
+
+ public void testToDomSourceByDomSource() throws Exception {
+ XmlConverter conv = new XmlConverter();
+
+ DOMSource source = conv.toDOMSource("<foo>bar</foo>");
+ DOMSource out = conv.toDOMSource(source);
+ assertSame(source, out);
+ }
+
+ public void testToDomSourceByStaxSource() throws Exception {
+ XmlConverter conv = new XmlConverter();
+
+ SAXSource source = conv.toSAXSource("<foo>bar</foo>");
+ DOMSource out = conv.toDOMSource(source);
+ assertNotSame(source, out);
+
+ assertEquals("<foo>bar</foo>", conv.toString(out));
+ }
+
+ public void testToDomSourceByCustomSource() throws Exception {
+ XmlConverter conv = new XmlConverter();
+
+ Source dummy = new Source() {
+ public String getSystemId() {
+ return null;
+ }
+
+ public void setSystemId(String s) {
+ }
+ };
+
+ DOMSource out = conv.toDOMSource(dummy);
+ assertNull(out);
+ }
+
+ public void testToSaxSourceByInputStream() throws Exception {
+ XmlConverter conv = new XmlConverter();
+
+ InputStream is = context.getTypeConverter().convertTo(InputStream.class, "<foo>bar</foo>");
+ SAXSource out = conv.toSAXSource(is);
+
+ assertNotNull(out);
+ assertEquals("<foo>bar</foo>", conv.toString(out));
+ }
+
+ public void testToSaxSourceByDomSource() throws Exception {
+ XmlConverter conv = new XmlConverter();
+
+ DOMSource source = conv.toDOMSource("<foo>bar</foo>");
+ SAXSource out = conv.toSAXSource(source);
+ assertNotSame(source, out);
+
+ assertEquals("<foo>bar</foo>", conv.toString(out));
+ }
+
+ public void testToSaxSourceByStaxSource() throws Exception {
+ XmlConverter conv = new XmlConverter();
+
+ SAXSource source = conv.toSAXSource("<foo>bar</foo>");
+ SAXSource out = conv.toSAXSource(source);
+ assertSame(source, out);
+ }
+
+ public void testToSaxSourceByCustomSource() throws Exception {
+ XmlConverter conv = new XmlConverter();
+
+ Source dummy = new Source() {
+ public String getSystemId() {
+ return null;
+ }
+
+ public void setSystemId(String s) {
+ }
+ };
+
+ SAXSource out = conv.toSAXSource(dummy);
+ assertNull(out);
+ }
+
+ public void testToStreamSourceByFile() throws Exception {
+ XmlConverter conv = new XmlConverter();
+
+ File file = new File("org/apache/camel/converter/stream/test.xml").getAbsoluteFile();
+ StreamSource source = conv.toStreamSource(file);
+ StreamSource out = conv.toStreamSource(source);
+ assertSame(source, out);
+ }
+
+ public void testToStreamSourceByStreamSource() throws Exception {
+ Exchange exchange = new DefaultExchange(context);
+ XmlConverter conv = new XmlConverter();
+
+ StreamSource source = conv.toStreamSource("<foo>bar</foo>".getBytes(), exchange);
+ StreamSource out = conv.toStreamSource(source);
+ assertSame(source, out);
+ }
+
+ public void testToStreamSourceByDomSource() throws Exception {
+ XmlConverter conv = new XmlConverter();
+
+ DOMSource source = conv.toDOMSource("<foo>bar</foo>");
+ StreamSource out = conv.toStreamSource(source);
+ assertNotSame(source, out);
+
+ assertEquals("<foo>bar</foo>", conv.toString(out));
+ }
+
+ public void testToStreamSourceByStaxSource() throws Exception {
+ XmlConverter conv = new XmlConverter();
+
+ SAXSource source = conv.toSAXSource("<foo>bar</foo>");
+ StreamSource out = conv.toStreamSource(source);
+ assertNotSame(source, out);
+
+ assertEquals("<foo>bar</foo>", conv.toString(out));
+ }
+
+ public void testToStreamSourceByCustomSource() throws Exception {
+ XmlConverter conv = new XmlConverter();
+
+ Source dummy = new Source() {
+ public String getSystemId() {
+ return null;
+ }
+
+ public void setSystemId(String s) {
+ }
+ };
+
+ StreamSource out = conv.toStreamSource(dummy);
+ assertNull(out);
+ }
+
+ public void testToStreamSourceByInputStream() throws Exception {
+ XmlConverter conv = new XmlConverter();
+
+ InputStream is = context.getTypeConverter().convertTo(InputStream.class, "<foo>bar</foo>");
+ StreamSource out = conv.toStreamSource(is);
+ assertNotNull(out);
+ assertEquals("<foo>bar</foo>", conv.toString(out));
+ }
+
+ public void testToStreamSourceByReader() throws Exception {
+ XmlConverter conv = new XmlConverter();
+
+ Reader reader = context.getTypeConverter().convertTo(Reader.class, "<foo>bar</foo>");
+ StreamSource out = conv.toStreamSource(reader);
+ assertNotNull(out);
+ assertEquals("<foo>bar</foo>", conv.toString(out));
+ }
+
+ public void testToStreamSourceByByteArray() throws Exception {
+ Exchange exchange = new DefaultExchange(context);
+ XmlConverter conv = new XmlConverter();
+
+ byte[] bytes = context.getTypeConverter().convertTo(byte[].class, "<foo>bar</foo>");
+ StreamSource out = conv.toStreamSource(bytes, exchange);
+ assertNotNull(out);
+ assertEquals("<foo>bar</foo>", conv.toString(out));
+ }
+
+ public void testToStreamSourceByByteBuffer() throws Exception {
+ Exchange exchange = new DefaultExchange(context);
+ XmlConverter conv = new XmlConverter();
+
+ ByteBuffer bytes = context.getTypeConverter().convertTo(ByteBuffer.class, "<foo>bar</foo>");
+ StreamSource out = conv.toStreamSource(bytes, exchange);
+ assertNotNull(out);
+ assertEquals("<foo>bar</foo>", conv.toString(out));
+ }
+
+ public void testToVariousUsingNull() throws Exception {
+ XmlConverter conv = new XmlConverter();
+
+ InputStream is = null;
+ assertNull(conv.toStreamSource(is));
+
+ Reader reader = null;
+ assertNull(conv.toStreamSource(reader));
+
+ File file = null;
+ assertNull(conv.toStreamSource(file));
+
+ byte[] bytes = null;
+ assertNull(conv.toStreamSource(bytes, null));
+
+ try {
+ Node node = null;
+ conv.toDOMElement(node);
+ fail("Should have thrown exception");
+ } catch (TransformerException e) {
+ // expected
+ }
+ }
+
+ public void testToReaderFromSource() throws Exception {
+ XmlConverter conv = new XmlConverter();
+ SAXSource source = conv.toSAXSource("<foo>bar</foo>");
+
+ Reader out = conv.toReaderFromSource(source);
+ assertNotNull(out);
+ assertEquals("<foo>bar</foo>", context.getTypeConverter().convertTo(String.class, out));
+ }
+
+ public void testToDomSourceFromInputStream() throws Exception {
+ XmlConverter conv = new XmlConverter();
+
+ InputStream is = context.getTypeConverter().convertTo(InputStream.class, "<foo>bar</foo>");
+ DOMSource out = conv.toDOMSource(is);
+ assertNotNull(out);
+ assertEquals("<foo>bar</foo>", context.getTypeConverter().convertTo(String.class, out));
+ }
+
+ public void testToDomElement() throws Exception {
+ XmlConverter conv = new XmlConverter();
+ SAXSource source = conv.toSAXSource("<foo>bar</foo>");
+
+ Element out = conv.toDOMElement(source);
+ assertNotNull(out);
+ assertEquals("<foo>bar</foo>", context.getTypeConverter().convertTo(String.class, out));
+ }
+
+ public void testToDomElementFromDocumentNode() throws Exception {
+ XmlConverter conv = new XmlConverter();
+ Document doc = context.getTypeConverter().convertTo(Document.class, "<?xml version=\"1.0\" encoding=\"UTF-8\"?><foo>bar</foo>");
+
+ Element out = conv.toDOMElement(doc);
+ assertNotNull(out);
+ assertEquals("<foo>bar</foo>", context.getTypeConverter().convertTo(String.class, out));
+ }
+
+ public void testToDomElementFromElementNode() throws Exception {
+ XmlConverter conv = new XmlConverter();
+ Document doc = context.getTypeConverter().convertTo(Document.class, "<?xml version=\"1.0\" encoding=\"UTF-8\"?><foo>bar</foo>");
+
+ Element out = conv.toDOMElement(doc.getDocumentElement());
+ assertNotNull(out);
+ assertEquals("<foo>bar</foo>", context.getTypeConverter().convertTo(String.class, out));
+ }
+
+ public void testToDocumentFromBytes() throws Exception {
+ XmlConverter conv = new XmlConverter();
+ byte[] bytes = "<?xml version=\"1.0\" encoding=\"UTF-8\"?><foo>bar</foo>".getBytes();
+
+ Document out = conv.toDOMDocument(bytes);
+ assertNotNull(out);
+ assertEquals("<foo>bar</foo>", context.getTypeConverter().convertTo(String.class, out));
+ }
+
+ public void testToDocumentFromInputStream() throws Exception {
+ XmlConverter conv = new XmlConverter();
+ InputStream is = context.getTypeConverter().convertTo(InputStream.class, "<?xml version=\"1.0\" encoding=\"UTF-8\"?><foo>bar</foo>");
+
+ Document out = conv.toDOMDocument(is);
+ assertNotNull(out);
+ assertEquals("<foo>bar</foo>", context.getTypeConverter().convertTo(String.class, out));
+ }
+
+ public void testToDocumentFromFile() throws Exception {
+ XmlConverter conv = new XmlConverter();
+ File file = new File("./src/test/resources/org/apache/camel/converter/stream/test.xml").getAbsoluteFile();
+
+ Document out = conv.toDOMDocument(file);
+ assertNotNull(out);
+ String s = context.getTypeConverter().convertTo(String.class, out);
+ assertTrue(s.contains("<firstName>James</firstName>"));
+ }
+
+}
|
e9cdb3d24e3eb440a9adbc96a83e7f5ac600863f
|
spring-framework
|
Polish JavaDoc--
|
p
|
https://github.com/spring-projects/spring-framework
|
diff --git a/spring-context/src/main/java/org/springframework/validation/DefaultMessageCodesResolver.java b/spring-context/src/main/java/org/springframework/validation/DefaultMessageCodesResolver.java
index 7c1a084be232..65a3f6efe46f 100644
--- a/spring-context/src/main/java/org/springframework/validation/DefaultMessageCodesResolver.java
+++ b/spring-context/src/main/java/org/springframework/validation/DefaultMessageCodesResolver.java
@@ -116,6 +116,7 @@ public void setPrefix(String prefix) {
* Specify the format for message codes built by this resolver.
* <p>The default is {@link Format#PREFIX_ERROR_CODE}.
* @since 3.2
+ * @see Format
*/
public void setMessageCodeFormatter(MessageCodeFormatter formatter) {
this.formatter = (formatter == null ? DEFAULT_FORMATTER : formatter);
@@ -206,7 +207,7 @@ protected String postProcessMessageCode(String code) {
/**
* Common message code formats.
*
- * @author Phil Webb
+ * @author Phillip Webb
* @author Chris Beams
* @since 3.2
* @see MessageCodeFormatter
diff --git a/spring-context/src/main/java/org/springframework/validation/MessageCodeFormatter.java b/spring-context/src/main/java/org/springframework/validation/MessageCodeFormatter.java
index 0ce9bdd5f7ce..dd7c3132915b 100644
--- a/spring-context/src/main/java/org/springframework/validation/MessageCodeFormatter.java
+++ b/spring-context/src/main/java/org/springframework/validation/MessageCodeFormatter.java
@@ -22,6 +22,7 @@
* @author Chris Beams
* @since 3.2
* @see DefaultMessageCodesResolver
+ * @see DefaultMessageCodesResolver.Format
*/
public interface MessageCodeFormatter {
|
690051f46cad97e4fcfb5073be63ea06e02ac01c
|
spring-framework
|
Add ability to customize message channels--@EnableWebSocketMessageBroker message channel configuration can now be-customized via WebSocketMessageBrokerConfigurer. It is necessary to-make this easy and even required as part of the basic configuration-since by default the message channels are backed by a thread pool of-size 1, not suitable for production use.--Issue: SPR-11023-
|
a
|
https://github.com/spring-projects/spring-framework
|
diff --git a/spring-messaging/src/main/java/org/springframework/messaging/simp/config/AbstractMessageBrokerConfiguration.java b/spring-messaging/src/main/java/org/springframework/messaging/simp/config/AbstractMessageBrokerConfiguration.java
index a12f3ca15802..6c5945ac0e6e 100644
--- a/spring-messaging/src/main/java/org/springframework/messaging/simp/config/AbstractMessageBrokerConfiguration.java
+++ b/spring-messaging/src/main/java/org/springframework/messaging/simp/config/AbstractMessageBrokerConfiguration.java
@@ -59,6 +59,10 @@ public abstract class AbstractMessageBrokerConfiguration {
"com.fasterxml.jackson.databind.ObjectMapper", AbstractMessageBrokerConfiguration.class.getClassLoader());
+ private ChannelRegistration clientInboundChannelRegistration;
+
+ private ChannelRegistration clientOutboundChannelRegistration;
+
private MessageBrokerRegistry brokerRegistry;
@@ -69,55 +73,117 @@ protected AbstractMessageBrokerConfiguration() {
}
- /**
- * An accessor for the {@link MessageBrokerRegistry} that ensures its one-time creation
- * and initialization through {@link #configureMessageBroker(MessageBrokerRegistry)}.
- */
- protected final MessageBrokerRegistry getBrokerRegistry() {
- if (this.brokerRegistry == null) {
- MessageBrokerRegistry registry = new MessageBrokerRegistry(clientOutboundChannel());
- configureMessageBroker(registry);
- this.brokerRegistry = registry;
- }
- return this.brokerRegistry;
- }
-
- /**
- * A hook for sub-classes to customize message broker configuration through the
- * provided {@link MessageBrokerRegistry} instance.
- */
- protected abstract void configureMessageBroker(MessageBrokerRegistry registry);
-
-
@Bean
public AbstractSubscribableChannel clientInboundChannel() {
- return new ExecutorSubscribableChannel(clientInboundChannelExecutor());
+ ExecutorSubscribableChannel channel = new ExecutorSubscribableChannel(clientInboundChannelExecutor());
+ ChannelRegistration r = getClientInboundChannelRegistration();
+ if (r.hasInterceptors()) {
+ channel.setInterceptors(r.getInterceptors());
+ }
+ return channel;
}
@Bean
public ThreadPoolTaskExecutor clientInboundChannelExecutor() {
- ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
+ TaskExecutorRegistration r = getClientInboundChannelRegistration().getTaskExecutorRegistration();
+ ThreadPoolTaskExecutor executor = (r != null) ? r.getTaskExecutor() : new ThreadPoolTaskExecutor();
executor.setThreadNamePrefix("ClientInboundChannel-");
return executor;
}
+ protected final ChannelRegistration getClientInboundChannelRegistration() {
+ if (this.clientInboundChannelRegistration == null) {
+ ChannelRegistration registration = new ChannelRegistration();
+ configureClientInboundChannel(registration);
+ this.clientInboundChannelRegistration = registration;
+ }
+ return this.clientInboundChannelRegistration;
+ }
+
+
+ /**
+ * A hook for sub-classes to customize the message channel for inbound messages
+ * from WebSocket clients.
+ */
+ protected abstract void configureClientInboundChannel(ChannelRegistration registration);
+
+
@Bean
public AbstractSubscribableChannel clientOutboundChannel() {
- return new ExecutorSubscribableChannel(clientOutboundChannelExecutor());
+ ExecutorSubscribableChannel channel = new ExecutorSubscribableChannel(clientOutboundChannelExecutor());
+ ChannelRegistration r = getClientOutboundChannelRegistration();
+ if (r.hasInterceptors()) {
+ channel.setInterceptors(r.getInterceptors());
+ }
+ return channel;
}
@Bean
public ThreadPoolTaskExecutor clientOutboundChannelExecutor() {
- ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
+ TaskExecutorRegistration r = getClientOutboundChannelRegistration().getTaskExecutorRegistration();
+ ThreadPoolTaskExecutor executor = (r != null) ? r.getTaskExecutor() : new ThreadPoolTaskExecutor();
executor.setThreadNamePrefix("ClientOutboundChannel-");
return executor;
}
+ protected final ChannelRegistration getClientOutboundChannelRegistration() {
+ if (this.clientOutboundChannelRegistration == null) {
+ ChannelRegistration registration = new ChannelRegistration();
+ configureClientOutboundChannel(registration);
+ this.clientOutboundChannelRegistration = registration;
+ }
+ return this.clientOutboundChannelRegistration;
+ }
+
+ /**
+ * A hook for sub-classes to customize the message channel for messages from
+ * the application or message broker to WebSocket clients.
+ */
+ protected abstract void configureClientOutboundChannel(ChannelRegistration registration);
+
@Bean
public AbstractSubscribableChannel brokerChannel() {
- return new ExecutorSubscribableChannel(); // synchronous
+ ChannelRegistration r = getBrokerRegistry().getBrokerChannelRegistration();
+ ExecutorSubscribableChannel channel;
+ if (r.hasTaskExecutor()) {
+ channel = new ExecutorSubscribableChannel(); // synchronous by default
+ }
+ else {
+ channel = new ExecutorSubscribableChannel(brokerChannelExecutor());
+ }
+ if (r.hasInterceptors()) {
+ channel.setInterceptors(r.getInterceptors());
+ }
+ return channel;
+ }
+
+ @Bean
+ public ThreadPoolTaskExecutor brokerChannelExecutor() {
+ TaskExecutorRegistration r = getBrokerRegistry().getBrokerChannelRegistration().getTaskExecutorRegistration();
+ ThreadPoolTaskExecutor executor = (r != null) ? r.getTaskExecutor() : new ThreadPoolTaskExecutor();
+ executor.setThreadNamePrefix("BrokerChannel-");
+ return executor;
+ }
+
+ /**
+ * An accessor for the {@link MessageBrokerRegistry} that ensures its one-time creation
+ * and initialization through {@link #configureMessageBroker(MessageBrokerRegistry)}.
+ */
+ protected final MessageBrokerRegistry getBrokerRegistry() {
+ if (this.brokerRegistry == null) {
+ MessageBrokerRegistry registry = new MessageBrokerRegistry(clientOutboundChannel());
+ configureMessageBroker(registry);
+ this.brokerRegistry = registry;
+ }
+ return this.brokerRegistry;
}
+ /**
+ * A hook for sub-classes to customize message broker configuration through the
+ * provided {@link MessageBrokerRegistry} instance.
+ */
+ protected abstract void configureMessageBroker(MessageBrokerRegistry registry);
+
@Bean
public SimpAnnotationMethodMessageHandler simpAnnotationMethodMessageHandler() {
diff --git a/spring-messaging/src/main/java/org/springframework/messaging/simp/config/ChannelRegistration.java b/spring-messaging/src/main/java/org/springframework/messaging/simp/config/ChannelRegistration.java
new file mode 100644
index 000000000000..52103d9dd07b
--- /dev/null
+++ b/spring-messaging/src/main/java/org/springframework/messaging/simp/config/ChannelRegistration.java
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2002-2013 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.messaging.simp.config;
+
+import org.springframework.messaging.support.channel.ChannelInterceptor;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+
+/**
+ * A registration class for customizing the configuration for a
+ * {@link org.springframework.messaging.MessageChannel}.
+ *
+ * @author Rossen Stoyanchev
+ * @since 4.0
+ */
+public class ChannelRegistration {
+
+ private TaskExecutorRegistration taskExecutorRegistration;
+
+ private List<ChannelInterceptor> interceptors = new ArrayList<ChannelInterceptor>();
+
+
+ /**
+ * Configure properties of the ThreadPoolTaskExecutor backing the message channel.
+ */
+ public TaskExecutorRegistration taskExecutor() {
+ this.taskExecutorRegistration = new TaskExecutorRegistration();
+ return this.taskExecutorRegistration;
+ }
+
+ /**
+ * Configure interceptors for the message channel.
+ */
+ public ChannelRegistration setInterceptors(ChannelInterceptor... interceptors) {
+ if (interceptors != null) {
+ this.interceptors.addAll(Arrays.asList(interceptors));
+ }
+ return this;
+ }
+
+
+ protected boolean hasTaskExecutor() {
+ return (this.taskExecutorRegistration != null);
+ }
+
+ protected TaskExecutorRegistration getTaskExecutorRegistration() {
+ return this.taskExecutorRegistration;
+ }
+
+ protected boolean hasInterceptors() {
+ return !this.interceptors.isEmpty();
+ }
+
+ protected List<ChannelInterceptor> getInterceptors() {
+ return this.interceptors;
+ }
+}
diff --git a/spring-messaging/src/main/java/org/springframework/messaging/simp/config/MessageBrokerRegistry.java b/spring-messaging/src/main/java/org/springframework/messaging/simp/config/MessageBrokerRegistry.java
index f19db80bc811..a250ff737497 100644
--- a/spring-messaging/src/main/java/org/springframework/messaging/simp/config/MessageBrokerRegistry.java
+++ b/spring-messaging/src/main/java/org/springframework/messaging/simp/config/MessageBrokerRegistry.java
@@ -42,6 +42,8 @@ public class MessageBrokerRegistry {
private String userDestinationPrefix;
+ private ChannelRegistration brokerChannelRegistration = new ChannelRegistration();
+
public MessageBrokerRegistry(MessageChannel clientOutboundChannel) {
Assert.notNull(clientOutboundChannel);
@@ -103,6 +105,17 @@ public MessageBrokerRegistry setUserDestinationPrefix(String destinationPrefix)
return this;
}
+ /**
+ * Customize the channel used to send messages from the application to the message
+ * broker. By default messages from the application to the message broker are sent
+ * synchronously, which means application code sending a message will find out
+ * if the message cannot be sent through an exception. However, this can be changed
+ * if the broker channel is configured here with task executor properties.
+ */
+ public ChannelRegistration configureBrokerChannel() {
+ return this.brokerChannelRegistration;
+ }
+
protected SimpleBrokerMessageHandler getSimpleBroker() {
initSimpleBrokerIfNecessary();
@@ -127,4 +140,8 @@ protected Collection<String> getApplicationDestinationPrefixes() {
protected String getUserDestinationPrefix() {
return this.userDestinationPrefix;
}
+
+ protected ChannelRegistration getBrokerChannelRegistration() {
+ return this.brokerChannelRegistration;
+ }
}
diff --git a/spring-messaging/src/main/java/org/springframework/messaging/simp/config/TaskExecutorRegistration.java b/spring-messaging/src/main/java/org/springframework/messaging/simp/config/TaskExecutorRegistration.java
new file mode 100644
index 000000000000..afa1ce81ff71
--- /dev/null
+++ b/spring-messaging/src/main/java/org/springframework/messaging/simp/config/TaskExecutorRegistration.java
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2002-2013 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.messaging.simp.config;
+
+import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
+
+/**
+ * A registration class for customizing the properties of {@link ThreadPoolTaskExecutor}.
+ *
+ * @author Rossen Stoyanchev
+ * @since 4.0
+ */
+public class TaskExecutorRegistration {
+
+ private int corePoolSize = 1;
+
+ private int maxPoolSize = Integer.MAX_VALUE;
+
+ private int keepAliveSeconds = 60;
+
+ private int queueCapacity = Integer.MAX_VALUE;
+
+
+ /**
+ * Set the ThreadPoolExecutor's core pool size.
+ * Default is 1.
+ */
+ public TaskExecutorRegistration corePoolSize(int corePoolSize) {
+ this.corePoolSize = corePoolSize;
+ return this;
+ }
+
+ /**
+ * Set the ThreadPoolExecutor's maximum pool size.
+ * Default is {@code Integer.MAX_VALUE}.
+ */
+ public TaskExecutorRegistration maxPoolSize(int maxPoolSize) {
+ this.maxPoolSize = maxPoolSize;
+ return this;
+ }
+
+ /**
+ * Set the ThreadPoolExecutor's keep-alive seconds.
+ * Default is 60.
+ */
+ public TaskExecutorRegistration keepAliveSeconds(int keepAliveSeconds) {
+ this.keepAliveSeconds = keepAliveSeconds;
+ return this;
+ }
+
+ /**
+ * Set the capacity for the ThreadPoolExecutor's BlockingQueue.
+ * Default is {@code Integer.MAX_VALUE}.
+ * <p>Any positive value will lead to a LinkedBlockingQueue instance;
+ * any other value will lead to a SynchronousQueue instance.
+ * @see java.util.concurrent.LinkedBlockingQueue
+ * @see java.util.concurrent.SynchronousQueue
+ */
+ public TaskExecutorRegistration queueCapacity(int queueCapacity) {
+ this.queueCapacity = queueCapacity;
+ return this;
+ }
+
+ protected ThreadPoolTaskExecutor getTaskExecutor() {
+ ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
+ executor.setCorePoolSize(this.corePoolSize);
+ executor.setMaxPoolSize(this.maxPoolSize);
+ executor.setKeepAliveSeconds(this.keepAliveSeconds);
+ executor.setQueueCapacity(this.queueCapacity);
+ return executor;
+ }
+
+}
diff --git a/spring-messaging/src/main/java/org/springframework/messaging/support/channel/ExecutorSubscribableChannel.java b/spring-messaging/src/main/java/org/springframework/messaging/support/channel/ExecutorSubscribableChannel.java
index 75d45c26839a..440f64ba55ff 100644
--- a/spring-messaging/src/main/java/org/springframework/messaging/support/channel/ExecutorSubscribableChannel.java
+++ b/spring-messaging/src/main/java/org/springframework/messaging/support/channel/ExecutorSubscribableChannel.java
@@ -57,6 +57,10 @@ public ExecutorSubscribableChannel(Executor executor) {
}
+ public Executor getExecutor() {
+ return this.executor;
+ }
+
@Override
protected boolean hasSubscription(MessageHandler handler) {
return this.handlers.contains(handler);
diff --git a/spring-messaging/src/test/java/org/springframework/messaging/simp/config/MessageBrokerConfigurationTests.java b/spring-messaging/src/test/java/org/springframework/messaging/simp/config/MessageBrokerConfigurationTests.java
index 70907050be7a..d803b336e306 100644
--- a/spring-messaging/src/test/java/org/springframework/messaging/simp/config/MessageBrokerConfigurationTests.java
+++ b/spring-messaging/src/test/java/org/springframework/messaging/simp/config/MessageBrokerConfigurationTests.java
@@ -36,13 +36,17 @@
import org.springframework.messaging.simp.stomp.StompHeaderAccessor;
import org.springframework.messaging.support.MessageBuilder;
import org.springframework.messaging.support.channel.AbstractSubscribableChannel;
+import org.springframework.messaging.support.channel.ChannelInterceptor;
+import org.springframework.messaging.support.channel.ChannelInterceptorAdapter;
import org.springframework.messaging.support.channel.ExecutorSubscribableChannel;
import org.springframework.messaging.support.converter.CompositeMessageConverter;
import org.springframework.messaging.support.converter.DefaultContentTypeResolver;
+import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.stereotype.Controller;
import org.springframework.util.MimeTypeUtils;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.*;
@@ -59,6 +63,8 @@ public class MessageBrokerConfigurationTests {
private AnnotationConfigApplicationContext cxtStompBroker;
+ private AnnotationConfigApplicationContext cxtCustomizedChannelConfig;
+
@Before
public void setupOnce() {
@@ -70,6 +76,10 @@ public void setupOnce() {
this.cxtStompBroker = new AnnotationConfigApplicationContext();
this.cxtStompBroker.register(TestStompMessageBrokerConfig.class);
this.cxtStompBroker.refresh();
+
+ this.cxtCustomizedChannelConfig = new AnnotationConfigApplicationContext();
+ this.cxtCustomizedChannelConfig.register(CustomizedChannelConfig.class);
+ this.cxtCustomizedChannelConfig.refresh();
}
@@ -96,6 +106,22 @@ public void clientInboundChannelWithStompBroker() {
assertTrue(values.contains(cxtStompBroker.getBean(StompBrokerRelayMessageHandler.class)));
}
+ @Test
+ public void clientInboundChannelCustomized() {
+
+ AbstractSubscribableChannel channel = this.cxtCustomizedChannelConfig.getBean(
+ "clientInboundChannel", AbstractSubscribableChannel.class);
+
+ assertEquals(1, channel.getInterceptors().size());
+
+ ThreadPoolTaskExecutor taskExecutor = this.cxtCustomizedChannelConfig.getBean(
+ "clientInboundChannelExecutor", ThreadPoolTaskExecutor.class);
+
+ assertEquals(11, taskExecutor.getCorePoolSize());
+ assertEquals(12, taskExecutor.getMaxPoolSize());
+ assertEquals(13, taskExecutor.getKeepAliveSeconds());
+ }
+
@Test
public void clientOutboundChannelUsedByAnnotatedMethod() {
@@ -148,6 +174,22 @@ public void clientOutboundChannelUsedBySimpleBroker() {
assertEquals("bar", new String((byte[]) message.getPayload()));
}
+ @Test
+ public void clientOutboundChannelCustomized() {
+
+ AbstractSubscribableChannel channel = this.cxtCustomizedChannelConfig.getBean(
+ "clientOutboundChannel", AbstractSubscribableChannel.class);
+
+ assertEquals(2, channel.getInterceptors().size());
+
+ ThreadPoolTaskExecutor taskExecutor = this.cxtCustomizedChannelConfig.getBean(
+ "clientOutboundChannelExecutor", ThreadPoolTaskExecutor.class);
+
+ assertEquals(21, taskExecutor.getCorePoolSize());
+ assertEquals(22, taskExecutor.getMaxPoolSize());
+ assertEquals(23, taskExecutor.getKeepAliveSeconds());
+ }
+
@Test
public void brokerChannel() {
TestChannel channel = this.cxtSimpleBroker.getBean("brokerChannel", TestChannel.class);
@@ -207,6 +249,22 @@ public void brokerChannelUsedByUserDestinationMessageHandler() {
assertEquals("/foo-users1", headers.getDestination());
}
+ @Test
+ public void brokerChannelCustomized() {
+
+ AbstractSubscribableChannel channel = this.cxtCustomizedChannelConfig.getBean(
+ "brokerChannel", AbstractSubscribableChannel.class);
+
+ assertEquals(3, channel.getInterceptors().size());
+
+ ThreadPoolTaskExecutor taskExecutor = this.cxtCustomizedChannelConfig.getBean(
+ "brokerChannelExecutor", ThreadPoolTaskExecutor.class);
+
+ assertEquals(31, taskExecutor.getCorePoolSize());
+ assertEquals(32, taskExecutor.getMaxPoolSize());
+ assertEquals(33, taskExecutor.getKeepAliveSeconds());
+ }
+
@Test
public void messageConverter() {
CompositeMessageConverter messageConverter = this.cxtStompBroker.getBean(
@@ -240,9 +298,6 @@ public TestController subscriptionController() {
return new TestController();
}
- @Override
- protected void configureMessageBroker(MessageBrokerRegistry registry) {
- }
@Override
@Bean
@@ -250,16 +305,29 @@ public AbstractSubscribableChannel clientInboundChannel() {
return new TestChannel();
}
+ @Override
+ protected void configureClientInboundChannel(ChannelRegistration registration) {
+ }
+
@Override
@Bean
public AbstractSubscribableChannel clientOutboundChannel() {
return new TestChannel();
}
+ @Override
+ protected void configureClientOutboundChannel(ChannelRegistration registration) {
+ }
+
@Override
public AbstractSubscribableChannel brokerChannel() {
return new TestChannel();
}
+
+ @Override
+ protected void configureMessageBroker(MessageBrokerRegistry registry) {
+ }
+
}
@Configuration
@@ -271,6 +339,32 @@ public void configureMessageBroker(MessageBrokerRegistry registry) {
}
}
+ @Configuration
+ static class CustomizedChannelConfig extends AbstractMessageBrokerConfiguration {
+
+ private ChannelInterceptor interceptor = new ChannelInterceptorAdapter();
+
+
+ @Override
+ protected void configureClientInboundChannel(ChannelRegistration registration) {
+ registration.setInterceptors(this.interceptor);
+ registration.taskExecutor().corePoolSize(11).maxPoolSize(12).keepAliveSeconds(13).queueCapacity(14);
+ }
+
+ @Override
+ protected void configureClientOutboundChannel(ChannelRegistration registration) {
+ registration.setInterceptors(this.interceptor, this.interceptor);
+ registration.taskExecutor().corePoolSize(21).maxPoolSize(22).keepAliveSeconds(23).queueCapacity(24);
+ }
+
+ @Override
+ protected void configureMessageBroker(MessageBrokerRegistry registry) {
+ registry.configureBrokerChannel().setInterceptors(this.interceptor, this.interceptor, this.interceptor);
+ registry.configureBrokerChannel().taskExecutor()
+ .corePoolSize(31).maxPoolSize(32).keepAliveSeconds(33).queueCapacity(34);
+ }
+ }
+
private static class TestChannel extends ExecutorSubscribableChannel {
diff --git a/spring-websocket/src/main/java/org/springframework/web/socket/messaging/config/DelegatingWebSocketMessageBrokerConfiguration.java b/spring-websocket/src/main/java/org/springframework/web/socket/messaging/config/DelegatingWebSocketMessageBrokerConfiguration.java
index 3cb0a5e14930..9c84f2c20410 100644
--- a/spring-websocket/src/main/java/org/springframework/web/socket/messaging/config/DelegatingWebSocketMessageBrokerConfiguration.java
+++ b/spring-websocket/src/main/java/org/springframework/web/socket/messaging/config/DelegatingWebSocketMessageBrokerConfiguration.java
@@ -21,6 +21,7 @@
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
+import org.springframework.messaging.simp.config.ChannelRegistration;
import org.springframework.messaging.simp.config.MessageBrokerRegistry;
import org.springframework.util.CollectionUtils;
@@ -58,6 +59,20 @@ protected void registerStompEndpoints(StompEndpointRegistry registry) {
}
}
+ @Override
+ protected void configureClientInboundChannel(ChannelRegistration registration) {
+ for (WebSocketMessageBrokerConfigurer c : this.configurers) {
+ c.configureClientInboundChannel(registration);
+ }
+ }
+
+ @Override
+ protected void configureClientOutboundChannel(ChannelRegistration registration) {
+ for (WebSocketMessageBrokerConfigurer c : this.configurers) {
+ c.configureClientOutboundChannel(registration);
+ }
+ }
+
@Override
protected void configureMessageBroker(MessageBrokerRegistry registry) {
for (WebSocketMessageBrokerConfigurer c : this.configurers) {
diff --git a/spring-websocket/src/main/java/org/springframework/web/socket/messaging/config/WebSocketMessageBrokerConfigurer.java b/spring-websocket/src/main/java/org/springframework/web/socket/messaging/config/WebSocketMessageBrokerConfigurer.java
index 5a3e00b75c96..f7bba9c58fee 100644
--- a/spring-websocket/src/main/java/org/springframework/web/socket/messaging/config/WebSocketMessageBrokerConfigurer.java
+++ b/spring-websocket/src/main/java/org/springframework/web/socket/messaging/config/WebSocketMessageBrokerConfigurer.java
@@ -17,6 +17,7 @@
package org.springframework.web.socket.messaging.config;
+import org.springframework.messaging.simp.config.ChannelRegistration;
import org.springframework.messaging.simp.config.MessageBrokerRegistry;
/**
@@ -35,6 +36,22 @@ public interface WebSocketMessageBrokerConfigurer {
*/
void registerStompEndpoints(StompEndpointRegistry registry);
+ /**
+ * Configure the {@link org.springframework.messaging.MessageChannel} used for
+ * incoming messages from WebSocket clients. By default the channel is backed
+ * by a thread pool of size 1. It is recommended to customize thread pool
+ * settings for production use.
+ */
+ void configureClientInboundChannel(ChannelRegistration registration);
+
+ /**
+ * Configure the {@link org.springframework.messaging.MessageChannel} used for
+ * incoming messages from WebSocket clients. By default the channel is backed
+ * by a thread pool of size 1. It is recommended to customize thread pool
+ * settings for production use.
+ */
+ void configureClientOutboundChannel(ChannelRegistration registration);
+
/**
* Configure message broker options.
*/
diff --git a/spring-websocket/src/test/java/org/springframework/web/socket/messaging/SimpAnnotationMethodIntegrationTests.java b/spring-websocket/src/test/java/org/springframework/web/socket/messaging/SimpAnnotationMethodIntegrationTests.java
index 377a4d1e1adc..27879b0152d5 100644
--- a/spring-websocket/src/test/java/org/springframework/web/socket/messaging/SimpAnnotationMethodIntegrationTests.java
+++ b/spring-websocket/src/test/java/org/springframework/web/socket/messaging/SimpAnnotationMethodIntegrationTests.java
@@ -36,6 +36,7 @@
import org.springframework.context.annotation.Configuration;
import org.springframework.messaging.handler.annotation.MessageExceptionHandler;
import org.springframework.messaging.handler.annotation.MessageMapping;
+import org.springframework.messaging.simp.config.ChannelRegistration;
import org.springframework.messaging.simp.config.MessageBrokerRegistry;
import org.springframework.messaging.simp.stomp.StompCommand;
import org.springframework.messaging.support.channel.AbstractSubscribableChannel;
@@ -215,6 +216,14 @@ public void registerStompEndpoints(StompEndpointRegistry registry) {
registry.addEndpoint("/ws").setHandshakeHandler(this.handshakeHandler);
}
+ @Override
+ public void configureClientInboundChannel(ChannelRegistration registration) {
+ }
+
+ @Override
+ public void configureClientOutboundChannel(ChannelRegistration registration) {
+ }
+
@Override
public void configureMessageBroker(MessageBrokerRegistry configurer) {
configurer.setApplicationDestinationPrefixes("/app");
diff --git a/spring-websocket/src/test/java/org/springframework/web/socket/messaging/config/WebSocketMessageBrokerConfigurationSupportTests.java b/spring-websocket/src/test/java/org/springframework/web/socket/messaging/config/WebSocketMessageBrokerConfigurationSupportTests.java
index 352f5b002d82..944a44f4c58a 100644
--- a/spring-websocket/src/test/java/org/springframework/web/socket/messaging/config/WebSocketMessageBrokerConfigurationSupportTests.java
+++ b/spring-websocket/src/test/java/org/springframework/web/socket/messaging/config/WebSocketMessageBrokerConfigurationSupportTests.java
@@ -27,6 +27,7 @@
import org.springframework.messaging.handler.annotation.SendTo;
import org.springframework.messaging.simp.SimpMessageType;
import org.springframework.messaging.simp.annotation.SubscribeMapping;
+import org.springframework.messaging.simp.config.ChannelRegistration;
import org.springframework.messaging.simp.config.MessageBrokerRegistry;
import org.springframework.messaging.simp.stomp.StompCommand;
import org.springframework.messaging.simp.stomp.StompHeaderAccessor;
@@ -119,20 +120,29 @@ public String handleMessage() {
@Configuration
static class TestSimpleMessageBrokerConfig implements WebSocketMessageBrokerConfigurer {
+ @Bean
+ public TestController subscriptionController() {
+ return new TestController();
+ }
+
@Override
public void registerStompEndpoints(StompEndpointRegistry registry) {
registry.addEndpoint("/simpleBroker");
}
@Override
- public void configureMessageBroker(MessageBrokerRegistry configurer) {
- // SimpleBroker used by default
+ public void configureClientInboundChannel(ChannelRegistration registration) {
}
- @Bean
- public TestController subscriptionController() {
- return new TestController();
+ @Override
+ public void configureClientOutboundChannel(ChannelRegistration registration) {
}
+
+ @Override
+ public void configureMessageBroker(MessageBrokerRegistry registry) {
+ // SimpleBroker used by default
+ }
+
}
@Configuration
|
70a16e87c18891c9dcea9d0edb06552cd6f2e72e
|
camel
|
CAMEL-541: Removed a bad tangle in camel spi. Not- this package has no tangles.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@748992 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/main/java/org/apache/camel/management/InstrumentationErrorHandlerWrappingStrategy.java b/camel-core/src/main/java/org/apache/camel/management/InstrumentationErrorHandlerWrappingStrategy.java
index 9b45cb432ea40..a01120191a4d8 100644
--- a/camel-core/src/main/java/org/apache/camel/management/InstrumentationErrorHandlerWrappingStrategy.java
+++ b/camel-core/src/main/java/org/apache/camel/management/InstrumentationErrorHandlerWrappingStrategy.java
@@ -29,13 +29,14 @@
public class InstrumentationErrorHandlerWrappingStrategy implements ErrorHandlerWrappingStrategy {
private Map<ProcessorType, PerformanceCounter> counterMap;
+ private RouteContext routeContext;
- public InstrumentationErrorHandlerWrappingStrategy(Map<ProcessorType, PerformanceCounter> counterMap) {
+ public InstrumentationErrorHandlerWrappingStrategy(RouteContext routeContext, Map<ProcessorType, PerformanceCounter> counterMap) {
this.counterMap = counterMap;
+ this.routeContext = routeContext;
}
- public Processor wrapProcessorInErrorHandler(RouteContext routeContext, ProcessorType processorType,
- Processor target) throws Exception {
+ public Processor wrapProcessorInErrorHandler(ProcessorType processorType, Processor target) throws Exception {
// don't wrap our instrumentation interceptors
if (counterMap.containsKey(processorType)) {
diff --git a/camel-core/src/main/java/org/apache/camel/management/InstrumentationLifecycleStrategy.java b/camel-core/src/main/java/org/apache/camel/management/InstrumentationLifecycleStrategy.java
index 2a0cdfc5567fe..a01b8c8fc4c12 100644
--- a/camel-core/src/main/java/org/apache/camel/management/InstrumentationLifecycleStrategy.java
+++ b/camel-core/src/main/java/org/apache/camel/management/InstrumentationLifecycleStrategy.java
@@ -29,7 +29,6 @@
import org.apache.camel.CamelContext;
import org.apache.camel.Consumer;
import org.apache.camel.Endpoint;
-import org.apache.camel.Exchange;
import org.apache.camel.Route;
import org.apache.camel.Service;
import org.apache.camel.impl.DefaultCamelContext;
@@ -121,8 +120,7 @@ public void onRoutesAdd(Collection<Route> routes) {
// retrieve the per-route intercept for this route
InstrumentationProcessor interceptor = interceptorMap.get(route.getEndpoint());
if (interceptor == null) {
- LOG.warn("Instrumentation processor not found for route endpoint "
- + route.getEndpoint());
+ LOG.warn("Instrumentation processor not found for route endpoint: " + route.getEndpoint());
} else {
interceptor.setCounter(mr);
}
@@ -187,9 +185,7 @@ public void onRouteContextCreate(RouteContext routeContext) {
}
routeContext.addInterceptStrategy(new InstrumentationInterceptStrategy(counterMap));
-
- routeContext.setErrorHandlerWrappingStrategy(
- new InstrumentationErrorHandlerWrappingStrategy(counterMap));
+ routeContext.setErrorHandlerWrappingStrategy(new InstrumentationErrorHandlerWrappingStrategy(routeContext, counterMap));
// Add an InstrumentationProcessor at the beginning of each route and
// set up the interceptorMap for onRoutesAdd() method to register the
diff --git a/camel-core/src/main/java/org/apache/camel/model/ProcessorType.java b/camel-core/src/main/java/org/apache/camel/model/ProcessorType.java
index 542ff889fdec2..cc58a52aad34b 100644
--- a/camel-core/src/main/java/org/apache/camel/model/ProcessorType.java
+++ b/camel-core/src/main/java/org/apache/camel/model/ProcessorType.java
@@ -1985,7 +1985,7 @@ protected Processor wrapInErrorHandler(RouteContext routeContext, Processor targ
ObjectHelper.notNull(target, "target", this);
ErrorHandlerWrappingStrategy strategy = routeContext.getErrorHandlerWrappingStrategy();
if (strategy != null) {
- return strategy.wrapProcessorInErrorHandler(routeContext, this, target);
+ return strategy.wrapProcessorInErrorHandler(this, target);
}
return getErrorHandlerBuilder().createErrorHandler(routeContext, target);
}
diff --git a/camel-core/src/main/java/org/apache/camel/spi/ErrorHandlerWrappingStrategy.java b/camel-core/src/main/java/org/apache/camel/spi/ErrorHandlerWrappingStrategy.java
index af9c8be7dca79..83e9225a95308 100644
--- a/camel-core/src/main/java/org/apache/camel/spi/ErrorHandlerWrappingStrategy.java
+++ b/camel-core/src/main/java/org/apache/camel/spi/ErrorHandlerWrappingStrategy.java
@@ -33,13 +33,11 @@ public interface ErrorHandlerWrappingStrategy {
* to give the implementor an opportunity to wrap the target processor
* in a route.
*
- * @param routeContext the route context
* @param processorType the object that invokes this method
* @param target the processor to be wrapped
* @return processor wrapped with an interceptor or not wrapped
* @throws Exception can be thrown
*/
- Processor wrapProcessorInErrorHandler(RouteContext routeContext, ProcessorType processorType,
- Processor target) throws Exception;
+ Processor wrapProcessorInErrorHandler(ProcessorType processorType, Processor target) throws Exception;
}
|
e1c739fe6fcf4458c1ddad3ed2c79d1e78a3e980
|
elasticsearch
|
Improved test, printed out potential shard- failures--
|
p
|
https://github.com/elastic/elasticsearch
|
diff --git a/src/test/java/org/elasticsearch/test/integration/document/BulkTests.java b/src/test/java/org/elasticsearch/test/integration/document/BulkTests.java
index f105e8523153e..c5aba999491fa 100644
--- a/src/test/java/org/elasticsearch/test/integration/document/BulkTests.java
+++ b/src/test/java/org/elasticsearch/test/integration/document/BulkTests.java
@@ -1,6 +1,5 @@
package org.elasticsearch.test.integration.document;
-import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.get.GetResponse;
@@ -9,17 +8,18 @@
import org.elasticsearch.action.update.UpdateRequestBuilder;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.common.Priority;
+import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.query.QueryBuilders;
-import org.elasticsearch.search.SearchHit;
import org.elasticsearch.test.integration.AbstractSharedClusterTest;
import org.junit.Test;
-import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CyclicBarrier;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits;
import static org.hamcrest.Matchers.*;
/**
@@ -341,22 +341,17 @@ public void testBulkUpdate_largerVolume() throws Exception {
@Test
public void testBulkUpdateDocAsUpsertWithParent() throws Exception {
client().admin().indices().prepareCreate("test")
- .setSettings(
- ImmutableSettings.settingsBuilder()
- .put("index.number_of_shards", 2)
- .put("index.number_of_replicas", 1)
- ).addMapping("child", "{\"child\": {\"_parent\": {\"type\": \"parent\"}}}")
+ .addMapping("child", "{\"child\": {\"_parent\": {\"type\": \"parent\"}}}")
.execute().actionGet();
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
BulkRequestBuilder builder = client().prepareBulk();
+ byte[] addParent = new BytesArray("{\"index\" : { \"_index\" : \"test\", \"_type\" : \"parent\", \"_id\" : \"parent1\"}}\n" +
+ "{\"field1\" : \"value1\"}\n").array();
- byte[] addParent = ("{\"index\" : { \"_index\" : \"test\", \"_type\" : \"parent\", \"_id\" : \"parent1\"}}\n" +
- "{\"field1\" : \"value1\"}\n").getBytes("utf-8");
-
- byte[] addChild = ("{ \"update\" : { \"_index\" : \"test\", \"_type\" : \"child\", \"_id\" : \"child1\", \"parent\" : \"parent1\"}}\n" +
- "{\"doc\" : { \"field1\" : \"value1\"}, \"doc_as_upsert\" : \"true\"}\n").getBytes("utf-8");
+ byte[] addChild = new BytesArray("{ \"update\" : { \"_index\" : \"test\", \"_type\" : \"child\", \"_id\" : \"child1\", \"parent\" : \"parent1\"}}\n" +
+ "{\"doc\" : { \"field1\" : \"value1\"}, \"doc_as_upsert\" : \"true\"}\n").array();
builder.add(addParent, 0, addParent.length, false);
builder.add(addChild, 0, addChild.length, false);
@@ -373,33 +368,24 @@ public void testBulkUpdateDocAsUpsertWithParent() throws Exception {
.setQuery(QueryBuilders.hasParentQuery("parent", QueryBuilders.matchAllQuery()))
.get();
- assertThat(searchResponse.getFailedShards(), equalTo(0));
- SearchHit[] hits = searchResponse.getHits().getHits();
- assertThat(hits.length, equalTo(1));
- assertThat(hits[0].getId(), equalTo("child1"));
-
+ assertNoFailures(searchResponse);
+ assertSearchHits(searchResponse, "child1");
}
-
@Test
public void testBulkUpdateUpsertWithParent() throws Exception {
client().admin().indices().prepareCreate("test")
- .setSettings(
- ImmutableSettings.settingsBuilder()
- .put("index.number_of_shards", 2)
- .put("index.number_of_replicas", 1)
- ).addMapping("child", "{\"child\": {\"_parent\": {\"type\": \"parent\"}}}")
+ .addMapping("child", "{\"child\": {\"_parent\": {\"type\": \"parent\"}}}")
.execute().actionGet();
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
BulkRequestBuilder builder = client().prepareBulk();
+ byte[] addParent = new BytesArray("{\"index\" : { \"_index\" : \"test\", \"_type\" : \"parent\", \"_id\" : \"parent1\"}}\n" +
+ "{\"field1\" : \"value1\"}\n").array();
- byte[] addParent = ("{\"index\" : { \"_index\" : \"test\", \"_type\" : \"parent\", \"_id\" : \"parent1\"}}\n" +
- "{\"field1\" : \"value1\"}\n").getBytes("utf-8");
-
- byte[] addChild = ("{\"update\" : { \"_id\" : \"child1\", \"_type\" : \"child\", \"_index\" : \"test\", \"parent\" : \"parent1\"} }\n" +
- "{ \"script\" : \"ctx._source.field2 = 'value2'\", \"upsert\" : {\"field1\" : \"value1\"}}\n").getBytes("utf-8");
+ byte[] addChild = new BytesArray("{\"update\" : { \"_id\" : \"child1\", \"_type\" : \"child\", \"_index\" : \"test\", \"parent\" : \"parent1\"} }\n" +
+ "{ \"script\" : \"ctx._source.field2 = 'value2'\", \"upsert\" : {\"field1\" : \"value1\"}}\n").array();
builder.add(addParent, 0, addParent.length, false);
builder.add(addChild, 0, addChild.length, false);
@@ -415,11 +401,8 @@ public void testBulkUpdateUpsertWithParent() throws Exception {
.setQuery(QueryBuilders.hasParentQuery("parent", QueryBuilders.matchAllQuery()))
.get();
- assertThat(searchResponse.getFailedShards(), equalTo(0));
- SearchHit[] hits = searchResponse.getHits().getHits();
- assertThat(hits.length, equalTo(1));
- assertThat(hits[0].getId(), equalTo("child1"));
-
+ assertNoFailures(searchResponse);
+ assertSearchHits(searchResponse, "child1");
}
@Test
|
5dabaf626e0a3493889eadcbd5ebf73d4e145912
|
camel
|
CAMEL-1091 - Fix compilation issue on Java 1.5--git-svn-id: https://svn.apache.org/repos/asf/activemq/camel/trunk@718279 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/camel
|
diff --git a/components/camel-jetty/src/test/java/org/apache/camel/component/jetty/InterfacesTest.java b/components/camel-jetty/src/test/java/org/apache/camel/component/jetty/InterfacesTest.java
index b36e2faef83de..337ac66092c4a 100644
--- a/components/camel-jetty/src/test/java/org/apache/camel/component/jetty/InterfacesTest.java
+++ b/components/camel-jetty/src/test/java/org/apache/camel/component/jetty/InterfacesTest.java
@@ -15,25 +15,27 @@ public class InterfacesTest extends ContextTestSupport {
private String remoteInterfaceAddress;
- public InterfacesTest() throws SocketException {
- // retirieve an address of some remote network interface
+ public InterfacesTest() throws IOException {
+ // Retrieve an address of some remote network interface
Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces();
while(interfaces.hasMoreElements()) {
NetworkInterface interfaze = interfaces.nextElement();
- if (!interfaze.isUp() || interfaze.isLoopback()) {
- continue;
- }
Enumeration<InetAddress> addresses = interfaze.getInetAddresses();
- if(addresses.hasMoreElements()) {
- remoteInterfaceAddress = addresses.nextElement().getHostAddress();
+ if(addresses.hasMoreElements()) {
+ InetAddress nextAddress = addresses.nextElement();
+ if (nextAddress.isLoopbackAddress() || nextAddress.isReachable(2000)) {
+ break;
+ }
+ remoteInterfaceAddress = nextAddress.getHostAddress();
}
};
}
public void testLocalInterfaceHandled() throws IOException, InterruptedException {
- getMockEndpoint("mock:endpoint").expectedMessageCount(3);
+ int expectedMessages = (remoteInterfaceAddress != null) ? 3 : 2;
+ getMockEndpoint("mock:endpoint").expectedMessageCount(expectedMessages);
URL localUrl = new URL("http://localhost:4567/testRoute");
String localResponse = IOUtils.toString(localUrl.openStream());
@@ -44,9 +46,11 @@ public void testLocalInterfaceHandled() throws IOException, InterruptedException
localResponse = IOUtils.toString(localUrl.openStream());
assertEquals("local-differentPort", localResponse);
- URL url = new URL("http://" + remoteInterfaceAddress + ":4567/testRoute");
- String remoteResponse = IOUtils.toString(url.openStream());
- assertEquals("remote", remoteResponse);
+ if (remoteInterfaceAddress != null) {
+ URL url = new URL("http://" + remoteInterfaceAddress + ":4567/testRoute");
+ String remoteResponse = IOUtils.toString(url.openStream());
+ assertEquals("remote", remoteResponse);
+ }
assertMockEndpointsSatisfied();
}
@@ -65,9 +69,11 @@ public void configure() throws Exception {
.setBody().constant("local-differentPort")
.to("mock:endpoint");
- from("jetty:http://" + remoteInterfaceAddress + ":4567/testRoute")
- .setBody().constant("remote")
- .to("mock:endpoint");
+ if (remoteInterfaceAddress != null) {
+ from("jetty:http://" + remoteInterfaceAddress + ":4567/testRoute")
+ .setBody().constant("remote")
+ .to("mock:endpoint");
+ }
}
};
}
|
646aa624eaddb4f543a1019f2d5ef49795cf66ce
|
ReactiveX-RxJava
|
Fixed issue -417--
|
c
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/operators/OperationMap.java b/rxjava-core/src/main/java/rx/operators/OperationMap.java
index 9eb2520420..940147b0b8 100644
--- a/rxjava-core/src/main/java/rx/operators/OperationMap.java
+++ b/rxjava-core/src/main/java/rx/operators/OperationMap.java
@@ -15,12 +15,16 @@
*/
package rx.operators;
-import static org.junit.Assert.*;
-import static org.mockito.Matchers.*;
-import static org.mockito.Mockito.*;
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.inOrder;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
import java.util.HashMap;
import java.util.Map;
+import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger;
import org.junit.Before;
@@ -33,6 +37,7 @@
import rx.Observable.OnSubscribeFunc;
import rx.Observer;
import rx.Subscription;
+import rx.concurrency.Schedulers;
import rx.util.functions.Func1;
import rx.util.functions.Func2;
@@ -59,17 +64,12 @@ public final class OperationMap {
* @return a sequence that is the result of applying the transformation function to each item in the input sequence.
*/
public static <T, R> OnSubscribeFunc<R> map(final Observable<? extends T> sequence, final Func1<? super T, ? extends R> func) {
- return new OnSubscribeFunc<R>() {
- @Override
- public Subscription onSubscribe(Observer<? super R> observer) {
- return new MapObservable<T, R>(sequence, new Func2<T, Integer, R>() {
+ return mapWithIndex(sequence, new Func2<T, Integer, R>() {
@Override
public R call(T value, @SuppressWarnings("unused") Integer unused) {
return func.call(value);
}
- }).onSubscribe(observer);
- }
- };
+ });
}
/**
@@ -136,7 +136,8 @@ public MapObservable(Observable<? extends T> sequence, Func2<? super T, Integer,
@Override
public Subscription onSubscribe(final Observer<? super R> observer) {
- return sequence.subscribe(new Observer<T>() {
+ final SafeObservableSubscription subscription = new SafeObservableSubscription();
+ return subscription.wrap(sequence.subscribe(new SafeObserver<T>(subscription, new Observer<T>() {
@Override
public void onNext(T value) {
observer.onNext(func.call(value, index));
@@ -152,7 +153,7 @@ public void onError(Throwable ex) {
public void onCompleted() {
observer.onCompleted();
}
- });
+ })));
}
}
@@ -366,6 +367,41 @@ public String call(String s) {
assertEquals(1, c2.get());
}
+ @Test(expected = IllegalArgumentException.class)
+ public void testMapWithIssue417() {
+ Observable.from(1).observeOn(Schedulers.threadPoolForComputation())
+ .map(new Func1<Integer, Integer>() {
+ public Integer call(Integer arg0) {
+ throw new IllegalArgumentException("any error");
+ }
+ }).toBlockingObservable().single();
+ }
+
+ @Test
+ public void testMapWithErrorInFuncAndThreadPoolScheduler() throws InterruptedException {
+ // The error will throw in one of threads in the thread pool.
+ // If map does not handle it, the error will disappear.
+ // so map needs to handle the error by itself.
+ final CountDownLatch latch = new CountDownLatch(1);
+ Observable<String> m = Observable.from("one")
+ .observeOn(Schedulers.threadPoolForComputation())
+ .map(new Func1<String, String>() {
+ public String call(String arg0) {
+ try {
+ throw new IllegalArgumentException("any error");
+ } finally {
+ latch.countDown();
+ }
+ }
+ });
+
+ m.subscribe(stringObserver);
+ latch.await();
+ InOrder inorder = inOrder(stringObserver);
+ inorder.verify(stringObserver, times(1)).onError(any(IllegalArgumentException.class));
+ inorder.verifyNoMoreInteractions();
+ }
+
private static Map<String, String> getMap(String prefix) {
Map<String, String> m = new HashMap<String, String>();
m.put("firstName", prefix + "First");
|
541cf2de8047f726b780fb1343a7ac9adff77e29
|
spring-framework
|
removed unused method; polishing--
|
p
|
https://github.com/spring-projects/spring-framework
|
diff --git a/org.springframework.core/src/main/java/org/springframework/core/convert/TypeDescriptor.java b/org.springframework.core/src/main/java/org/springframework/core/convert/TypeDescriptor.java
index 7012842ad908..2d0e48a5b266 100644
--- a/org.springframework.core/src/main/java/org/springframework/core/convert/TypeDescriptor.java
+++ b/org.springframework.core/src/main/java/org/springframework/core/convert/TypeDescriptor.java
@@ -26,7 +26,6 @@
import org.springframework.core.MethodParameter;
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;
-import org.springframework.util.ObjectUtils;
/**
* Context about a type to convert to.
@@ -136,7 +135,7 @@ public Field getField() {
/**
* Determine the declared (non-generic) type of the wrapped parameter/field.
- * @return the declared type
+ * @return the declared type, or null if this is {@link TypeDescriptor#NULL}.
*/
public Class<?> getType() {
if (this.type != null) {
@@ -162,14 +161,6 @@ public Class<?> getObjectType() {
return (type != null ? ClassUtils.resolvePrimitiveIfNecessary(type) : type);
}
- /**
- * Does the underyling declared type equal the type provided?
- * @param type the type to test against
- */
- public boolean typeEquals(Class<?> type) {
- return ObjectUtils.nullSafeEquals(getType(), type);
- }
-
/**
* Returns the name of this type: the fully qualified class name.
*/
diff --git a/org.springframework.core/src/main/java/org/springframework/core/convert/support/CollectionToMapConverter.java b/org.springframework.core/src/main/java/org/springframework/core/convert/support/CollectionToMapConverter.java
index 0d7fb4d1d21c..357367fe83a8 100644
--- a/org.springframework.core/src/main/java/org/springframework/core/convert/support/CollectionToMapConverter.java
+++ b/org.springframework.core/src/main/java/org/springframework/core/convert/support/CollectionToMapConverter.java
@@ -77,7 +77,7 @@ public Object convert(Object source, TypeDescriptor sourceType, TypeDescriptor t
}
if (keysCompatible && valuesCompatible) {
Map target = CollectionFactory.createMap(targetType.getType(), sourceCollection.size());
- if (sourceElementType.typeEquals(String.class)) {
+ if (String.class.equals(sourceElementType.getType())) {
for (Object element : sourceCollection) {
String[] property = parseProperty((String) element);
target.put(property[0], property[1]);
@@ -94,7 +94,7 @@ public Object convert(Object source, TypeDescriptor sourceType, TypeDescriptor t
Map target = CollectionFactory.createMap(targetType.getType(), sourceCollection.size());
MapEntryConverter converter = new MapEntryConverter(sourceElementType, sourceElementType, targetKeyType,
targetValueType, keysCompatible, valuesCompatible, conversionService);
- if (sourceElementType.typeEquals(String.class)) {
+ if (String.class.equals(sourceElementType.getType())) {
for (Object element : sourceCollection) {
String[] property = parseProperty((String) element);
Object targetKey = converter.convertKey(property[0]);
|
52bf7e145152f3b9c5d1ffe07247bab5f95bdde9
|
ReactiveX-RxJava
|
Set threads to daemons so they don't prevent system- from exiting--- This applies to any pools RxJava itself creates. It will be up to users to do this for Executors they inject.-
|
c
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/concurrency/ExecutorScheduler.java b/rxjava-core/src/main/java/rx/concurrency/ExecutorScheduler.java
index bed7e5f6c8..133f772889 100644
--- a/rxjava-core/src/main/java/rx/concurrency/ExecutorScheduler.java
+++ b/rxjava-core/src/main/java/rx/concurrency/ExecutorScheduler.java
@@ -53,7 +53,9 @@ public class ExecutorScheduler extends AbstractScheduler {
@Override
public Thread newThread(Runnable r) {
- return new Thread(r, "RxScheduledExecutorPool-" + counter.incrementAndGet());
+ Thread t = new Thread(r, "RxScheduledExecutorPool-" + counter.incrementAndGet());
+ t.setDaemon(true);
+ return t;
}
});
diff --git a/rxjava-core/src/main/java/rx/concurrency/Schedulers.java b/rxjava-core/src/main/java/rx/concurrency/Schedulers.java
index bd35ab58ff..f805917b83 100644
--- a/rxjava-core/src/main/java/rx/concurrency/Schedulers.java
+++ b/rxjava-core/src/main/java/rx/concurrency/Schedulers.java
@@ -118,7 +118,9 @@ private static ScheduledExecutorService createComputationExecutor() {
@Override
public Thread newThread(Runnable r) {
- return new Thread(r, "RxComputationThreadPool-" + counter.incrementAndGet());
+ Thread t = new Thread(r, "RxComputationThreadPool-" + counter.incrementAndGet());
+ t.setDaemon(true);
+ return t;
}
});
}
@@ -129,7 +131,9 @@ private static Executor createIOExecutor() {
@Override
public Thread newThread(Runnable r) {
- return new Thread(r, "RxIOThreadPool-" + counter.incrementAndGet());
+ Thread t = new Thread(r, "RxIOThreadPool-" + counter.incrementAndGet());
+ t.setDaemon(true);
+ return t;
}
});
|
ffdde40b9f189cb30dee4c5187d63b61809f2d62
|
hadoop
|
HADOOP-6583. Captures authentication and- authorization metrics. Contributed by Devaraj Das.--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@915095 13f79535-47bb-0310-9956-ffa450edef68-
|
a
|
https://github.com/apache/hadoop
|
diff --git a/CHANGES.txt b/CHANGES.txt
index bf071d24a12aa..4a58a1d1a3b68 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -156,6 +156,8 @@ Trunk (unreleased changes)
meaningful exceptions when there are failures instead of returning
false. (omalley)
+ HADOOP-6583. Captures authentication and authorization metrics. (ddas)
+
OPTIMIZATIONS
BUG FIXES
diff --git a/src/java/org/apache/hadoop/ipc/Server.java b/src/java/org/apache/hadoop/ipc/Server.java
index 6297ec7a947d2..efb25a70e8018 100644
--- a/src/java/org/apache/hadoop/ipc/Server.java
+++ b/src/java/org/apache/hadoop/ipc/Server.java
@@ -220,6 +220,11 @@ public static void bind(ServerSocket socket, InetSocketAddress address,
}
}
}
+
+ /*Returns a handle to the rpcMetrics (required in tests)*/
+ public RpcMetrics getRpcMetrics() {
+ return rpcMetrics;
+ }
/** A call queued for handling. */
private static class Call {
@@ -877,7 +882,13 @@ public Object run() throws IOException {
if (LOG.isDebugEnabled())
LOG.debug("Have read input token of size " + saslToken.length
+ " for processing by saslServer.evaluateResponse()");
- byte[] replyToken = saslServer.evaluateResponse(saslToken);
+ byte[] replyToken;
+ try {
+ replyToken = saslServer.evaluateResponse(saslToken);
+ } catch (SaslException se) {
+ rpcMetrics.authenticationFailures.inc();
+ throw se;
+ }
if (replyToken != null) {
if (LOG.isDebugEnabled())
LOG.debug("Will send token of size " + replyToken.length
@@ -1078,6 +1089,7 @@ private void processUnwrappedData(byte[] inBuf) throws IOException,
private void processOneRpc(byte[] buf) throws IOException,
InterruptedException {
+ rpcMetrics.authenticationSuccesses.inc();
if (headerRead) {
processData(buf);
} else {
@@ -1121,7 +1133,9 @@ private boolean authorizeConnection() throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("Successfully authorized " + header);
}
+ rpcMetrics.authorizationSuccesses.inc();
} catch (AuthorizationException ae) {
+ rpcMetrics.authorizationFailures.inc();
authFailedCall.connection = this;
setupResponse(authFailedResponse, authFailedCall, Status.FATAL, null,
ae.getClass().getName(), ae.getMessage());
diff --git a/src/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java b/src/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java
index a1fbccd06d45b..dd5d2af5c725a 100644
--- a/src/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java
+++ b/src/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java
@@ -27,6 +27,7 @@
import org.apache.hadoop.metrics.util.MetricsBase;
import org.apache.hadoop.metrics.util.MetricsIntValue;
import org.apache.hadoop.metrics.util.MetricsRegistry;
+import org.apache.hadoop.metrics.util.MetricsTimeVaryingInt;
import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate;
/**
@@ -79,7 +80,14 @@ public RpcMetrics(String hostName, String port, Server server) {
new MetricsIntValue("NumOpenConnections", registry);
public MetricsIntValue callQueueLen =
new MetricsIntValue("callQueueLen", registry);
-
+ public MetricsTimeVaryingInt authenticationFailures =
+ new MetricsTimeVaryingInt("rpcAuthenticationFailures", registry);
+ public MetricsTimeVaryingInt authenticationSuccesses =
+ new MetricsTimeVaryingInt("rpcAuthenticationSuccesses", registry);
+ public MetricsTimeVaryingInt authorizationFailures =
+ new MetricsTimeVaryingInt("rpcAuthorizationFailures", registry);
+ public MetricsTimeVaryingInt authorizationSuccesses =
+ new MetricsTimeVaryingInt("rpcAuthorizationSuccesses", registry);
/**
* Push the metrics to the monitoring subsystem on doUpdate() call.
*/
diff --git a/src/test/core/org/apache/hadoop/ipc/TestRPC.java b/src/test/core/org/apache/hadoop/ipc/TestRPC.java
index 0bb3f8dc5c0b3..b94c88b4d19e7 100644
--- a/src/test/core/org/apache/hadoop/ipc/TestRPC.java
+++ b/src/test/core/org/apache/hadoop/ipc/TestRPC.java
@@ -368,6 +368,31 @@ private void doRPCs(Configuration conf, boolean expectFailure) throws Exception
if (proxy != null) {
RPC.stopProxy(proxy);
}
+ if (expectFailure) {
+ assertTrue("Expected 1 but got " +
+ server.getRpcMetrics().authorizationFailures
+ .getCurrentIntervalValue(),
+ server.getRpcMetrics().authorizationFailures
+ .getCurrentIntervalValue() == 1);
+ } else {
+ assertTrue("Expected 1 but got " +
+ server.getRpcMetrics().authorizationSuccesses
+ .getCurrentIntervalValue(),
+ server.getRpcMetrics().authorizationSuccesses
+ .getCurrentIntervalValue() == 1);
+ }
+ //since we don't have authentication turned ON, we should see
+ // >0 for the authentication successes and 0 for failure
+ assertTrue("Expected 0 but got " +
+ server.getRpcMetrics().authenticationFailures
+ .getCurrentIntervalValue(),
+ server.getRpcMetrics().authenticationFailures
+ .getCurrentIntervalValue() == 0);
+ assertTrue("Expected greater than 0 but got " +
+ server.getRpcMetrics().authenticationSuccesses
+ .getCurrentIntervalValue(),
+ server.getRpcMetrics().authenticationSuccesses
+ .getCurrentIntervalValue() > 0);
}
}
|
521bbfcf5613232af5f907843db0d54b5f9b493f
|
spring-framework
|
Allow configuring custom ThreadPoolTaskExecutor--Issue: SPR-12272-
|
a
|
https://github.com/spring-projects/spring-framework
|
diff --git a/spring-messaging/src/main/java/org/springframework/messaging/simp/config/ChannelRegistration.java b/spring-messaging/src/main/java/org/springframework/messaging/simp/config/ChannelRegistration.java
index f1b2e1374184..6ced0579be99 100644
--- a/spring-messaging/src/main/java/org/springframework/messaging/simp/config/ChannelRegistration.java
+++ b/spring-messaging/src/main/java/org/springframework/messaging/simp/config/ChannelRegistration.java
@@ -21,6 +21,7 @@
import java.util.List;
import org.springframework.messaging.support.ChannelInterceptor;
+import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
/**
* A registration class for customizing the configuration for a
@@ -46,6 +47,17 @@ public TaskExecutorRegistration taskExecutor() {
return this.registration;
}
+ /**
+ * Configure the thread pool backing this message channel using a custom
+ * ThreadPoolTaskExecutor.
+ */
+ public TaskExecutorRegistration taskExecutor(ThreadPoolTaskExecutor taskExecutor) {
+ if (this.registration == null) {
+ this.registration = new TaskExecutorRegistration(taskExecutor);
+ }
+ return this.registration;
+ }
+
/**
* Configure interceptors for the message channel.
*/
diff --git a/spring-messaging/src/main/java/org/springframework/messaging/simp/config/TaskExecutorRegistration.java b/spring-messaging/src/main/java/org/springframework/messaging/simp/config/TaskExecutorRegistration.java
index e8a5e050673e..d7ec1bcca15c 100644
--- a/spring-messaging/src/main/java/org/springframework/messaging/simp/config/TaskExecutorRegistration.java
+++ b/spring-messaging/src/main/java/org/springframework/messaging/simp/config/TaskExecutorRegistration.java
@@ -26,6 +26,8 @@
*/
public class TaskExecutorRegistration {
+ private ThreadPoolTaskExecutor taskExecutor;
+
private int corePoolSize = Runtime.getRuntime().availableProcessors() * 2;
private int maxPoolSize = Integer.MAX_VALUE;
@@ -35,6 +37,13 @@ public class TaskExecutorRegistration {
private int keepAliveSeconds = 60;
+ public TaskExecutorRegistration() {
+ }
+
+ public TaskExecutorRegistration(ThreadPoolTaskExecutor taskExecutor) {
+ this.taskExecutor = taskExecutor;
+ }
+
/**
* Set the core pool size of the ThreadPoolExecutor.
* <p><strong>NOTE:</strong> The core pool size is effectively the max pool size
@@ -93,7 +102,7 @@ public TaskExecutorRegistration keepAliveSeconds(int keepAliveSeconds) {
}
protected ThreadPoolTaskExecutor getTaskExecutor() {
- ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
+ ThreadPoolTaskExecutor executor = (this.taskExecutor != null ? this.taskExecutor : new ThreadPoolTaskExecutor());
executor.setCorePoolSize(this.corePoolSize);
executor.setMaxPoolSize(this.maxPoolSize);
executor.setKeepAliveSeconds(this.keepAliveSeconds);
diff --git a/spring-messaging/src/main/java/org/springframework/messaging/support/Test.java b/spring-messaging/src/main/java/org/springframework/messaging/support/Test.java
deleted file mode 100644
index 2a7cf38575ad..000000000000
--- a/spring-messaging/src/main/java/org/springframework/messaging/support/Test.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright 2002-2014 the original author or authors.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.springframework.messaging.support;
-
-/**
- * @author Rossen Stoyanchev
- * @since 4.1
- */
-public class Test {
-
- public static void main(String[] args) {
-
- ExecutorSubscribableChannel.ExecutorSubscribableChannelTask task = null;
-
- }
-
-}
diff --git a/spring-messaging/src/test/java/org/springframework/messaging/simp/config/MessageBrokerConfigurationTests.java b/spring-messaging/src/test/java/org/springframework/messaging/simp/config/MessageBrokerConfigurationTests.java
index ce4aceb30dbe..a7684b97e221 100644
--- a/spring-messaging/src/test/java/org/springframework/messaging/simp/config/MessageBrokerConfigurationTests.java
+++ b/spring-messaging/src/test/java/org/springframework/messaging/simp/config/MessageBrokerConfigurationTests.java
@@ -132,8 +132,8 @@ public void clientInboundChannelCustomized() {
assertEquals(2, channel.getInterceptors().size());
- ThreadPoolTaskExecutor taskExecutor = this.customContext.getBean(
- "clientInboundChannelExecutor", ThreadPoolTaskExecutor.class);
+ CustomThreadPoolTaskExecutor taskExecutor = this.customContext.getBean(
+ "clientInboundChannelExecutor", CustomThreadPoolTaskExecutor.class);
assertEquals(11, taskExecutor.getCorePoolSize());
assertEquals(12, taskExecutor.getMaxPoolSize());
@@ -489,7 +489,8 @@ static class CustomConfig extends AbstractMessageBrokerConfiguration {
@Override
protected void configureClientInboundChannel(ChannelRegistration registration) {
registration.setInterceptors(this.interceptor);
- registration.taskExecutor().corePoolSize(11).maxPoolSize(12).keepAliveSeconds(13).queueCapacity(14);
+ registration.taskExecutor(new CustomThreadPoolTaskExecutor())
+ .corePoolSize(11).maxPoolSize(12).keepAliveSeconds(13).queueCapacity(14);
}
@Override
@@ -540,4 +541,7 @@ public void validate(Object target, Errors errors) {
}
}
+ private static class CustomThreadPoolTaskExecutor extends ThreadPoolTaskExecutor {
+ }
+
}
|
cc1d8448ed6de75c22eb4af46d21580211517d5e
|
kotlin
|
Prevent lazy types to be accidentally computed in- debug--
|
c
|
https://github.com/JetBrains/kotlin
|
diff --git a/core/descriptor.loader.java/src/org/jetbrains/jet/lang/resolve/java/lazy/types/LazyType.kt b/core/descriptor.loader.java/src/org/jetbrains/jet/lang/resolve/java/lazy/types/LazyType.kt
index c92e2e21c2be9..25bf6ca60ff39 100644
--- a/core/descriptor.loader.java/src/org/jetbrains/jet/lang/resolve/java/lazy/types/LazyType.kt
+++ b/core/descriptor.loader.java/src/org/jetbrains/jet/lang/resolve/java/lazy/types/LazyType.kt
@@ -47,4 +47,14 @@ abstract class LazyType(storageManager: StorageManager) : AbstractJetType() {
override fun isError()= getConstructor().getDeclarationDescriptor().inn({ d -> ErrorUtils.isError(d)}, false)
override fun getAnnotations(): List<AnnotationDescriptor> = listOf()
+
+ override fun toString(): String? {
+ if (!_typeConstructor.isComputed()) {
+ return "Type constructor is not computed"
+ }
+ if (!_arguments.isComputed()) {
+ return "" + getConstructor() + "<arguments are not computed>"
+ }
+ return super<AbstractJetType>.toString()
+ }
}
diff --git a/core/descriptors/src/org/jetbrains/jet/lang/types/AbstractJetType.java b/core/descriptors/src/org/jetbrains/jet/lang/types/AbstractJetType.java
index 1d7f57878c10b..d5ddee2fd7428 100644
--- a/core/descriptors/src/org/jetbrains/jet/lang/types/AbstractJetType.java
+++ b/core/descriptors/src/org/jetbrains/jet/lang/types/AbstractJetType.java
@@ -41,7 +41,7 @@ public final boolean equals(Object obj) {
}
@Override
- public final String toString() {
+ public String toString() {
List<TypeProjection> arguments = getArguments();
return getConstructor() + (arguments.isEmpty() ? "" : "<" + argumentsToString(arguments) + ">") + (isNullable() ? "?" : "");
}
|
7571758906d60e95dce84bfdd2d047c1ae3bdb43
|
intellij-community
|
IDEA-63381 we should find android module when- platform is specified--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/plugins/android/src/org/jetbrains/android/logcat/AndroidLogcatToolWindowView.java b/plugins/android/src/org/jetbrains/android/logcat/AndroidLogcatToolWindowView.java
index 54090012c0eda..35b2dca164c69 100644
--- a/plugins/android/src/org/jetbrains/android/logcat/AndroidLogcatToolWindowView.java
+++ b/plugins/android/src/org/jetbrains/android/logcat/AndroidLogcatToolWindowView.java
@@ -36,8 +36,10 @@
import org.jetbrains.android.ddms.AdbManager;
import org.jetbrains.android.ddms.AdbNotRespondingException;
import org.jetbrains.android.facet.AndroidFacet;
+import org.jetbrains.android.sdk.AndroidPlatform;
import org.jetbrains.android.util.AndroidUtils;
import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
@@ -208,10 +210,22 @@ private void updateLogConsole() {
}
}
+ @Nullable
+ private static AndroidPlatform getAndroidPlatform(@NotNull Project project) {
+ List<AndroidFacet> facets = ProjectFacetManager.getInstance(project).getFacets(AndroidFacet.ID);
+ for (AndroidFacet facet : facets) {
+ AndroidPlatform platform = facet.getConfiguration().getAndroidPlatform();
+ if (platform != null) {
+ return platform;
+ }
+ }
+ return null;
+ }
+
private void updateDevices() {
- List<AndroidFacet> facets = ProjectFacetManager.getInstance(myProject).getFacets(AndroidFacet.ID);
- if (facets.size() > 0) {
- final AndroidDebugBridge debugBridge = facets.get(0).getDebugBridge();
+ AndroidPlatform platform = getAndroidPlatform(myProject);
+ if (platform != null) {
+ final AndroidDebugBridge debugBridge = platform.getSdk().getDebugBridge(myProject);
if (debugBridge != null) {
IDevice[] devices;
try {
|
1257196d255cf3697ab869a86eb6f84034232f78
|
orientdb
|
Fixed problem of ConcurrentModificationException- reported by Bayoda: the problem should be due to a wrong isolation on Map in- cache: used exclusive lock even for keys() and get().--
|
c
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/cache/ODefaultCache.java b/core/src/main/java/com/orientechnologies/orient/core/cache/ODefaultCache.java
index 21d3ea1a889..ee0286cde1b 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/cache/ODefaultCache.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/cache/ODefaultCache.java
@@ -80,11 +80,11 @@ public ORecordInternal<?> get(final ORID id) {
if (!isEnabled())
return null;
- lock.acquireSharedLock();
+ lock.acquireExclusiveLock();
try {
return cache.get(id);
} finally {
- lock.releaseSharedLock();
+ lock.releaseExclusiveLock();
}
}
@@ -138,11 +138,11 @@ public int limit() {
}
public Collection<ORID> keys() {
- lock.acquireSharedLock();
+ lock.acquireExclusiveLock();
try {
return new ArrayList<ORID>(cache.keySet());
} finally {
- lock.releaseSharedLock();
+ lock.releaseExclusiveLock();
}
}
|
4dcd9dfe0ea6f9c8a4eef47d6910dd188638e2f6
|
restlet-framework-java
|
Added support of the Authentication-Info header.--
|
a
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/build/tmpl/text/changes.txt b/build/tmpl/text/changes.txt
index 7e03cc9809..f20b1481f6 100644
--- a/build/tmpl/text/changes.txt
+++ b/build/tmpl/text/changes.txt
@@ -206,6 +206,7 @@ Changes log
204 (success, no content).
- Added support of the warning header.
- Added support of the ranges in the gwt edition.
+ - Added support of the Authentication-Info header.
- Misc
- Added Velocity template in the org.restlet.example project.
Reported by Ben Vesco.
diff --git a/modules/org.restlet/src/org/restlet/engine/http/HttpAdapter.java b/modules/org.restlet/src/org/restlet/engine/http/HttpAdapter.java
index 3f75ac6e86..dd1b698969 100644
--- a/modules/org.restlet/src/org/restlet/engine/http/HttpAdapter.java
+++ b/modules/org.restlet/src/org/restlet/engine/http/HttpAdapter.java
@@ -78,6 +78,8 @@ public void addAdditionalHeaders(Series<Parameter> existingHeaders,
HttpConstants.HEADER_AGE)
|| param.getName().equalsIgnoreCase(
HttpConstants.HEADER_ALLOW)
+ || param.getName().equalsIgnoreCase(
+ HttpConstants.HEADER_AUTHENTICATION_INFO)
|| param.getName().equalsIgnoreCase(
HttpConstants.HEADER_AUTHORIZATION)
|| param.getName().equalsIgnoreCase(
diff --git a/modules/org.restlet/src/org/restlet/engine/http/HttpServerAdapter.java b/modules/org.restlet/src/org/restlet/engine/http/HttpServerAdapter.java
index 2cb53095f1..e71b55f3c2 100644
--- a/modules/org.restlet/src/org/restlet/engine/http/HttpServerAdapter.java
+++ b/modules/org.restlet/src/org/restlet/engine/http/HttpServerAdapter.java
@@ -282,8 +282,21 @@ public static void addResponseHeaders(Response response,
// Add the Cache-control headers
if (!response.getCacheDirectives().isEmpty()) {
- responseHeaders.add(HttpConstants.HEADER_CACHE_CONTROL, CacheControlUtils
- .format(response.getCacheDirectives()));
+ responseHeaders.add(HttpConstants.HEADER_CACHE_CONTROL,
+ CacheControlUtils.format(response.getCacheDirectives()));
+ }
+
+ // Add the Authentication-Info header
+ if (response.getAuthenticationInfo() != null) {
+ try {
+ responseHeaders.add(HttpConstants.HEADER_AUTHENTICATION_INFO,
+ org.restlet.engine.security.AuthenticatorUtils
+ .formatAuthenticationInfo(response
+ .getAuthenticationInfo()));
+ } catch (IOException e) {
+ Context.getCurrentLogger().log(Level.WARNING,
+ "Unable to write the Authentication-Info header", e);
+ }
}
}
diff --git a/modules/org.restlet/src/org/restlet/engine/security/AuthenticatorUtils.java b/modules/org.restlet/src/org/restlet/engine/security/AuthenticatorUtils.java
index 87c740e43d..b854663332 100644
--- a/modules/org.restlet/src/org/restlet/engine/security/AuthenticatorUtils.java
+++ b/modules/org.restlet/src/org/restlet/engine/security/AuthenticatorUtils.java
@@ -43,6 +43,7 @@
import org.restlet.data.ChallengeScheme;
import org.restlet.data.Parameter;
import org.restlet.engine.Engine;
+import org.restlet.engine.http.HeaderBuilder;
import org.restlet.engine.http.HeaderReader;
import org.restlet.engine.http.HttpConstants;
import org.restlet.security.Guard;
@@ -155,6 +156,60 @@ public static void challenge(Response response, boolean stale, Guard guard) {
}
}
+ /**
+ * Formats an authentication information as a HTTP header value. The header
+ * is {@link HttpConstants#HEADER_AUTHENTICATION_INFO}.
+ *
+ * @param info
+ * The authentication information to format.
+ * @return The {@link HttpConstants#HEADER_AUTHENTICATION_INFO} header
+ * value.
+ * @throws IOException
+ */
+ public static String formatAuthenticationInfo(AuthenticationInfo info)
+ throws IOException {
+ HeaderBuilder hb = new HeaderBuilder();
+ boolean firstParameter = true;
+
+ if (info != null) {
+ if (info.getNextServerNonce() != null
+ && info.getNextServerNonce().length() > 0) {
+ hb.setFirstParameter(firstParameter);
+ hb
+ .appendQuotedParameter("nextnonce", info
+ .getNextServerNonce());
+ firstParameter = false;
+ }
+ if (info.getQuality() != null && info.getQuality().length() > 0) {
+ hb.setFirstParameter(firstParameter);
+ hb.appendParameter("qop", info.getQuality());
+ firstParameter = false;
+ if (info.getNonceCount() > 0) {
+ StringBuilder result = new StringBuilder(Integer
+ .toHexString(info.getNonceCount()));
+ while (result.length() < 8) {
+ result.insert(0, '0');
+ }
+ hb.appendParameter("nc", result.toString());
+ }
+ }
+ if (info.getResponseDigest() != null
+ && info.getResponseDigest().length() > 0) {
+ hb.setFirstParameter(firstParameter);
+ hb.appendQuotedParameter("rspauth", info.getResponseDigest());
+ firstParameter = false;
+ }
+ if (info.getClientNonce() != null
+ && info.getClientNonce().length() > 0) {
+ hb.setFirstParameter(firstParameter);
+ hb.appendParameter("cnonce", info.getClientNonce());
+ firstParameter = false;
+ }
+ }
+
+ return hb.toString();
+ }
+
/**
* Formats a challenge request as a HTTP header value. The header is
* {@link HttpConstants#HEADER_WWW_AUTHENTICATE}.
@@ -238,41 +293,38 @@ public static String formatResponse(ChallengeResponse challenge,
public static AuthenticationInfo parseAuthenticationInfo(String header) {
AuthenticationInfo result = null;
HeaderReader hr = new HeaderReader(header);
-
try {
- Parameter param;
- param = hr.readParameter();
-
- while (param != null) {
-
- param = hr.readParameter();
- }
-
String nextNonce = null;
- int nonceCount = 0;
- String cnonce = null;
String qop = null;
String responseAuth = null;
+ String cnonce = null;
+ int nonceCount = 0;
- String[] authFields = header.split(",");
- for (String field : authFields) {
- String[] nameValuePair = field.trim().split("=");
- if (nameValuePair[0].equals("nextnonce")) {
- nextNonce = nameValuePair[1];
- } else if (nameValuePair[0].equals("nc")) {
- nonceCount = Integer.parseInt(nameValuePair[1], 16);
- } else if (nameValuePair[0].equals("cnonce")) {
- cnonce = nameValuePair[1];
- if (cnonce.charAt(0) == '"') {
- cnonce = cnonce.substring(1, cnonce.length() - 1);
+ Parameter param = hr.readParameter();
+ while (param != null) {
+ try {
+ if ("nextnonce".equals(param.getName())) {
+ nextNonce = param.getValue();
+ } else if ("qop".equals(param.getName())) {
+ qop = param.getValue();
+ } else if ("rspauth".equals(param.getName())) {
+ responseAuth = param.getValue();
+ } else if ("cnonce".equals(param.getName())) {
+ cnonce = param.getValue();
+ } else if ("nc".equals(param.getName())) {
+ nonceCount = Integer.parseInt(param.getValue(), 16);
}
- } else if (nameValuePair[0].equals("qop")) {
- qop = nameValuePair[1];
- } else if (nameValuePair[0].equals("responseAuth")) {
- responseAuth = nameValuePair[1];
+
+ param = hr.readParameter();
+ } catch (Exception e) {
+ Context
+ .getCurrentLogger()
+ .log(
+ Level.WARNING,
+ "Unable to parse the authentication info header parameter",
+ e);
}
}
-
result = new AuthenticationInfo(nextNonce, nonceCount, cnonce, qop,
responseAuth);
} catch (IOException e) {
|
72d9872fffa2b8f6d534612436b1613ed062e026
|
ReactiveX-RxJava
|
updated a test and added another one, trying to get- the expected behavior right--
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/operators/OperationCombineLatest.java b/rxjava-core/src/main/java/rx/operators/OperationCombineLatest.java
index 725d7781ae..53a11e0b24 100644
--- a/rxjava-core/src/main/java/rx/operators/OperationCombineLatest.java
+++ b/rxjava-core/src/main/java/rx/operators/OperationCombineLatest.java
@@ -304,7 +304,7 @@ public void testCombineLatestDifferentLengthObservableSequences1() {
/* we should have been called 4 times on the Observer */
InOrder inOrder = inOrder(w);
- inOrder.verify(w).onNext("1a2a3a");
+ inOrder.verify(w).onNext("1a2b3a");
inOrder.verify(w).onNext("1a2b3b");
inOrder.verify(w).onNext("1a2b3c");
inOrder.verify(w).onNext("1a2b3d");
@@ -348,6 +348,45 @@ public void testCombineLatestDifferentLengthObservableSequences2() {
}
+ @SuppressWarnings("unchecked")
+ /* mock calls don't do generics */
+ @Test
+ public void testCombineLatestWithInterleavingSequences() {
+ Observer<String> w = mock(Observer.class);
+
+ TestObservable w1 = new TestObservable();
+ TestObservable w2 = new TestObservable();
+ TestObservable w3 = new TestObservable();
+
+ Observable<String> combineLatestW = Observable.create(combineLatest(w1, w2, w3, getConcat3StringsCombineLatestFunction()));
+ combineLatestW.subscribe(w);
+
+ /* simulate sending data */
+ w1.Observer.onNext("1a");
+ w2.Observer.onNext("2a");
+ w2.Observer.onNext("2b");
+ w3.Observer.onNext("3a");
+
+ w1.Observer.onNext("1b");
+ w2.Observer.onNext("2c");
+ w2.Observer.onNext("2d");
+ w3.Observer.onNext("3b");
+
+ w1.Observer.onCompleted();
+ w2.Observer.onCompleted();
+ w3.Observer.onCompleted();
+
+ /* we should have been called 5 times on the Observer */
+ InOrder inOrder = inOrder(w);
+ inOrder.verify(w).onNext("1a2b3a");
+ inOrder.verify(w).onNext("1b2b3a");
+ inOrder.verify(w).onNext("1b2c3a");
+ inOrder.verify(w).onNext("1b2d3a");
+ inOrder.verify(w).onNext("1b2d3b");
+
+ inOrder.verify(w, times(1)).onCompleted();
+ }
+
/**
* Testing internal private logic due to the complexity so I want to use TDD to test as a I build it rather than relying purely on the overall functionality expected by the public methods.
*/
|
ccd24b5c3ce471428531e12737591b94c91db8bf
|
ReactiveX-RxJava
|
Add doOnSubscribe for Single--
|
a
|
https://github.com/ReactiveX/RxJava
|
diff --git a/src/main/java/rx/Single.java b/src/main/java/rx/Single.java
index 20b983c063..a8a10bafb9 100644
--- a/src/main/java/rx/Single.java
+++ b/src/main/java/rx/Single.java
@@ -2250,6 +2250,28 @@ public void onNext(T t) {
return lift(new OperatorDoOnEach<T>(observer));
}
+ /**
+ * Modifies the source {@code Single} so that it invokes the given action when it is subscribed from
+ * its subscribers. Each subscription will result in an invocation of the given action except when the
+ * source {@code Single} is reference counted, in which case the source {@code Single} will invoke
+ * the given action for the first subscription.
+ * <p>
+ * <img width="640" height="390" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/doOnSubscribe.png" alt="">
+ * <dl>
+ * <dt><b>Scheduler:</b></dt>
+ * <dd>{@code doOnSubscribe} does not operate by default on a particular {@link Scheduler}.</dd>
+ * </dl>
+ *
+ * @param subscribe
+ * the action that gets called when an observer subscribes to this {@code Single}
+ * @return the source {@code Single} modified so as to call this Action when appropriate
+ * @see <a href="http://reactivex.io/documentation/operators/do.html">ReactiveX operators documentation: Do</a>
+ */
+ @Experimental
+ public final Single<T> doOnSubscribe(final Action0 subscribe) {
+ return lift(new OperatorDoOnSubscribe<T>(subscribe));
+ }
+
/**
* Returns an Single that emits the items emitted by the source Single shifted forward in time by a
* specified delay. Error notifications from the source Single are not delayed.
diff --git a/src/test/java/rx/SingleTest.java b/src/test/java/rx/SingleTest.java
index 3ce86e9772..17794e4dbb 100644
--- a/src/test/java/rx/SingleTest.java
+++ b/src/test/java/rx/SingleTest.java
@@ -878,6 +878,43 @@ public void doOnSuccessShouldNotSwallowExceptionThrownByAction() {
verify(action).call(eq("value"));
}
+ @Test
+ public void doOnSubscribeShouldInvokeAction() {
+ Action0 action = mock(Action0.class);
+ Single<Integer> single = Single.just(1).doOnSubscribe(action);
+
+ verifyZeroInteractions(action);
+
+ single.subscribe();
+ single.subscribe();
+
+ verify(action, times(2)).call();
+ }
+
+ @Test
+ public void doOnSubscribeShouldInvokeActionBeforeSubscriberSubscribes() {
+ final List<String> callSequence = new ArrayList<String>(2);
+
+ Single<Integer> single = Single.create(new OnSubscribe<Integer>() {
+ @Override
+ public void call(SingleSubscriber<? super Integer> singleSubscriber) {
+ callSequence.add("onSubscribe");
+ singleSubscriber.onSuccess(1);
+ }
+ }).doOnSubscribe(new Action0() {
+ @Override
+ public void call() {
+ callSequence.add("doOnSubscribe");
+ }
+ });
+
+ single.subscribe();
+
+ assertEquals(2, callSequence.size());
+ assertEquals("doOnSubscribe", callSequence.get(0));
+ assertEquals("onSubscribe", callSequence.get(1));
+ }
+
@Test
public void delayWithSchedulerShouldDelayCompletion() {
TestScheduler scheduler = new TestScheduler();
|
e8f9097127843a3e56cd594da91c12430c178525
|
kotlin
|
do not mark error type if the expression is- resolved to namespace (EXPRESSION_EXPECTED_NAMESPACE_FOUND error)--
|
c
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/frontend/src/org/jetbrains/jet/checkers/DebugInfoUtil.java b/compiler/frontend/src/org/jetbrains/jet/checkers/DebugInfoUtil.java
index 0d62832e48485..572aaf4cd72e7 100644
--- a/compiler/frontend/src/org/jetbrains/jet/checkers/DebugInfoUtil.java
+++ b/compiler/frontend/src/org/jetbrains/jet/checkers/DebugInfoUtil.java
@@ -70,6 +70,9 @@ else if (factory == Errors.SUPER_IS_NOT_AN_EXPRESSION
JetSuperExpression superExpression = (JetSuperExpression) diagnostic.getPsiElement();
markedWithErrorElements.put(superExpression.getInstanceReference(), factory);
}
+ else if (factory == Errors.EXPRESSION_EXPECTED_NAMESPACE_FOUND) {
+ markedWithErrorElements.put((JetSimpleNameExpression) diagnostic.getPsiElement(), factory);
+ }
}
root.acceptChildren(new JetTreeVisitorVoid() {
diff --git a/compiler/testData/diagnostics/tests/NamespaceAsExpression.kt b/compiler/testData/diagnostics/tests/NamespaceAsExpression.kt
index f26e6530f3dd1..9d2ca455811c4 100644
--- a/compiler/testData/diagnostics/tests/NamespaceAsExpression.kt
+++ b/compiler/testData/diagnostics/tests/NamespaceAsExpression.kt
@@ -4,5 +4,5 @@ package root.a
// FILE: b.kt
package root
-val x = <!EXPRESSION_EXPECTED_NAMESPACE_FOUND, DEBUG_INFO_ERROR_ELEMENT!>a<!>
+val x = <!EXPRESSION_EXPECTED_NAMESPACE_FOUND!>a<!>
val y2 = <!NAMESPACE_IS_NOT_AN_EXPRESSION!>package<!>
\ No newline at end of file
diff --git a/compiler/testData/diagnostics/tests/NamespaceInExpressionPosition.kt b/compiler/testData/diagnostics/tests/NamespaceInExpressionPosition.kt
index 3db9bca39918c..740ed2c0b3a4d 100644
--- a/compiler/testData/diagnostics/tests/NamespaceInExpressionPosition.kt
+++ b/compiler/testData/diagnostics/tests/NamespaceInExpressionPosition.kt
@@ -2,10 +2,10 @@ package foo
class X {}
-val s = <!EXPRESSION_EXPECTED_NAMESPACE_FOUND, DEBUG_INFO_ERROR_ELEMENT!>java<!>
+val s = <!EXPRESSION_EXPECTED_NAMESPACE_FOUND!>java<!>
val ss = <!NO_CLASS_OBJECT!>System<!>
val sss = <!NO_CLASS_OBJECT!>X<!>
-val xs = java.<!EXPRESSION_EXPECTED_NAMESPACE_FOUND, DEBUG_INFO_ERROR_ELEMENT!>lang<!>
+val xs = java.<!EXPRESSION_EXPECTED_NAMESPACE_FOUND!>lang<!>
val xss = java.lang.<!NO_CLASS_OBJECT!>System<!>
val xsss = foo.<!NO_CLASS_OBJECT!>X<!>
-val xssss = <!EXPRESSION_EXPECTED_NAMESPACE_FOUND, DEBUG_INFO_ERROR_ELEMENT!>foo<!>
\ No newline at end of file
+val xssss = <!EXPRESSION_EXPECTED_NAMESPACE_FOUND!>foo<!>
\ No newline at end of file
|
91749f9c93f0589d0e93e215e068d6b039c2d26a
|
hbase
|
HBASE-10449 Wrong execution pool configuration in- HConnectionManager--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1563878 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java
index 79ad7817833a..c467af298a0c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java
@@ -766,22 +766,27 @@ private ExecutorService getBatchPool() {
synchronized (this) {
if (batchPool == null) {
int maxThreads = conf.getInt("hbase.hconnection.threads.max", 256);
- int coreThreads = conf.getInt("hbase.hconnection.threads.core", 0);
+ int coreThreads = conf.getInt("hbase.hconnection.threads.core", 256);
if (maxThreads == 0) {
maxThreads = Runtime.getRuntime().availableProcessors() * 8;
}
- long keepAliveTime = conf.getLong("hbase.hconnection.threads.keepalivetime", 10);
+ if (coreThreads == 0) {
+ coreThreads = Runtime.getRuntime().availableProcessors() * 8;
+ }
+ long keepAliveTime = conf.getLong("hbase.hconnection.threads.keepalivetime", 60);
LinkedBlockingQueue<Runnable> workQueue =
new LinkedBlockingQueue<Runnable>(maxThreads *
conf.getInt(HConstants.HBASE_CLIENT_MAX_TOTAL_TASKS,
HConstants.DEFAULT_HBASE_CLIENT_MAX_TOTAL_TASKS));
- this.batchPool = new ThreadPoolExecutor(
+ ThreadPoolExecutor tpe = new ThreadPoolExecutor(
coreThreads,
maxThreads,
keepAliveTime,
TimeUnit.SECONDS,
workQueue,
Threads.newDaemonThreadFactory(toString() + "-shared-"));
+ tpe.allowCoreThreadTimeOut(true);
+ this.batchPool = tpe;
}
this.cleanupPool = true;
}
|
73ea4cb8ebbefe934d9f088e99903b15772fa445
|
orientdb
|
Implemented self-repair in case the security is- broken. This new method in the ODatabaseListener interface will allow to- handle corruptions in better way.--Console now displays a message and after a user input fix it. Cool!-
|
a
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/db/ODatabaseListener.java b/core/src/main/java/com/orientechnologies/orient/core/db/ODatabaseListener.java
index 5a420c50066..a8490605974 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/db/ODatabaseListener.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/db/ODatabaseListener.java
@@ -40,4 +40,15 @@ public interface ODatabaseListener {
public void onAfterTxCommit(final ODatabase iDatabase);
public void onClose(final ODatabase iDatabase);
+
+ /**
+ * Callback to decide if repair the database upon corruption.
+ *
+ * @param iDatabase
+ * Target database
+ * @param iReason
+ * Reason of corruption
+ * @return true if repair must be done, otherwise false
+ */
+ public boolean onCorruptionRepairDatabase(final ODatabase iDatabase, final String iReason);
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseRecordAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseRecordAbstract.java
index 37d180e65b8..895012330bf 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseRecordAbstract.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseRecordAbstract.java
@@ -31,6 +31,7 @@
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.db.ODatabase;
import com.orientechnologies.orient.core.db.ODatabaseComplex;
+import com.orientechnologies.orient.core.db.ODatabaseListener;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.ODatabaseWrapperAbstract;
import com.orientechnologies.orient.core.db.raw.ODatabaseRaw;
@@ -108,6 +109,18 @@ public <DB extends ODatabase> DB open(final String iUserName, final String iUser
if (getStorage() instanceof OStorageEmbedded) {
user = getMetadata().getSecurity().authenticate(iUserName, iUserPassword);
+ final Set<ORole> roles = user.getRoles();
+ if (roles == null || roles.isEmpty() || roles.iterator().next() == null) {
+ // SEEMS CORRUPTED: INSTALL DEFAULT ROLE
+ for (ODatabaseListener l : underlying.getListeners()) {
+ if (l.onCorruptionRepairDatabase(this, "security metadata are corrupted: current user '" + user.getName()
+ + "' has no roles defined")) {
+ user = null;
+ user = metadata.getSecurity().repair();
+ break;
+ }
+ }
+ }
registerHook(new OUserTrigger());
registerHook(new OClassIndexManager());
} else
diff --git a/core/src/main/java/com/orientechnologies/orient/core/index/OIndexMVRBTreeAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/index/OIndexMVRBTreeAbstract.java
index e80e4654b67..10207f34f7e 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/index/OIndexMVRBTreeAbstract.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/index/OIndexMVRBTreeAbstract.java
@@ -698,6 +698,10 @@ public void onBeforeTxBegin(final ODatabase iDatabase) {
public void onBeforeTxRollback(final ODatabase iDatabase) {
}
+ public boolean onCorruptionRepairDatabase(final ODatabase iDatabase, final String iReason) {
+ return false;
+ }
+
public void onAfterTxRollback(final ODatabase iDatabase) {
acquireExclusiveLock();
diff --git a/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurity.java b/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurity.java
index bec469d398c..4bb12c949d4 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurity.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurity.java
@@ -46,5 +46,7 @@ public interface OSecurity {
public List<ODocument> getRoles();
+ public OUser repair();
+
public void close();
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityNull.java b/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityNull.java
index 7e084c73d7f..cd3aee0abe9 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityNull.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityNull.java
@@ -68,4 +68,8 @@ public OUser authenticate(String iUsername, String iUserPassword) {
public void close() {
}
+
+ public OUser repair() {
+ return null;
+ }
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityProxy.java b/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityProxy.java
index bfc5c9212c6..f9c14268cf2 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityProxy.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityProxy.java
@@ -28,8 +28,8 @@
* @author Luca Garulli
*
*/
-public class OSecurityProxy extends OProxedResource<OSecurityShared> implements OSecurity {
- public OSecurityProxy(final OSecurityShared iDelegate, final ODatabaseRecord iDatabase) {
+public class OSecurityProxy extends OProxedResource<OSecurity> implements OSecurity {
+ public OSecurityProxy(final OSecurity iDelegate, final ODatabaseRecord iDatabase) {
super(iDelegate, iDatabase);
}
@@ -80,4 +80,8 @@ public List<ODocument> getRoles() {
public String toString() {
return delegate.toString();
}
+
+ public OUser repair() {
+ return delegate.repair();
+ }
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityShared.java b/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityShared.java
index 9469ead306a..c028166aef9 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityShared.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityShared.java
@@ -168,25 +168,10 @@ public OUser create() {
acquireExclusiveLock();
try {
- final ODatabaseRecord database = getDatabase();
-
- if (!database.getMetadata().getSchema().getClasses().isEmpty())
+ if (!getDatabase().getMetadata().getSchema().getClasses().isEmpty())
throw new OSecurityException("Default users and roles already installed");
- // CREATE ROLE AND USER SCHEMA CLASSES
- final OClass roleClass = database.getMetadata().getSchema().createClass("ORole");
- roleClass.createProperty("name", OType.STRING).setMandatory(true).setNotNull(true);
- roleClass.createProperty("mode", OType.BYTE);
- roleClass.createProperty("rules", OType.EMBEDDEDMAP, OType.BYTE);
-
- final OClass userClass = database.getMetadata().getSchema().createClass("OUser");
- userClass.createProperty("name", OType.STRING).setMandatory(true).setNotNull(true);
- userClass.createProperty("password", OType.STRING).setMandatory(true).setNotNull(true);
- userClass.createProperty("roles", OType.LINKSET, roleClass);
-
- // CREATE ROLES AND USERS
- final ORole adminRole = createRole(ORole.ADMIN, ORole.ALLOW_MODES.ALLOW_ALL_BUT);
- final OUser adminUser = createUser(OUser.ADMIN, OUser.ADMIN, new String[] { adminRole.getName() });
+ final OUser adminUser = createMetadata();
final ORole readerRole = createRole("reader", ORole.ALLOW_MODES.DENY_ALL_BUT);
readerRole.addRule(ODatabaseSecurityResources.DATABASE, ORole.PERMISSION_READ);
@@ -222,6 +207,57 @@ public OUser create() {
}
}
+ /**
+ * Repairs the security structure if broken by creating the ADMIN role and user with default password.
+ *
+ * @return
+ */
+ public OUser repair() {
+ acquireExclusiveLock();
+ try {
+
+ return createMetadata();
+
+ } finally {
+ releaseExclusiveLock();
+ }
+ }
+
+ protected OUser createMetadata() {
+ final ODatabaseRecord database = getDatabase();
+
+ OClass roleClass = database.getMetadata().getSchema().getClass("ORole");
+ if (roleClass == null)
+ roleClass = database.getMetadata().getSchema().createClass("ORole");
+ if (!roleClass.existsProperty("name"))
+ roleClass.createProperty("name", OType.STRING).setMandatory(true).setNotNull(true);
+ if (!roleClass.existsProperty("mode"))
+ roleClass.createProperty("mode", OType.BYTE);
+ if (!roleClass.existsProperty("rules"))
+ roleClass.createProperty("rules", OType.EMBEDDEDMAP, OType.BYTE);
+
+ OClass userClass = database.getMetadata().getSchema().getClass("OUser");
+ if (userClass == null)
+ userClass = database.getMetadata().getSchema().createClass("OUser");
+ if (!userClass.existsProperty("name"))
+ userClass.createProperty("name", OType.STRING).setMandatory(true).setNotNull(true);
+ if (!userClass.existsProperty("password"))
+ userClass.createProperty("password", OType.STRING).setMandatory(true).setNotNull(true);
+ if (!userClass.existsProperty("roles"))
+ userClass.createProperty("roles", OType.LINKSET, roleClass);
+
+ // CREATE ROLES AND USERS
+ ORole adminRole = getRole(ORole.ADMIN);
+ if (adminRole == null)
+ adminRole = createRole(ORole.ADMIN, ORole.ALLOW_MODES.ALLOW_ALL_BUT);
+
+ OUser adminUser = getUser(OUser.ADMIN);
+ if (adminUser == null)
+ adminUser = createUser(OUser.ADMIN, OUser.ADMIN, new String[] { adminRole.getName() });
+
+ return adminUser;
+ }
+
public void close() {
}
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/DbListenerTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/DbListenerTest.java
index d40f050f987..297035367a8 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/DbListenerTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/DbListenerTest.java
@@ -51,6 +51,7 @@ public class DbListenerTest {
protected int onCreate = 0;
protected int onDelete = 0;
protected int onOpen = 0;
+ protected int onCorruption = 0;
protected int onRecordPulled = 0;
protected int onClusterConfigurationChange = 0;
@@ -92,6 +93,11 @@ public void onDelete(ODatabase iDatabase) {
public void onOpen(ODatabase iDatabase) {
onOpen++;
}
+
+ public boolean onCorruptionRepairDatabase(ODatabase iDatabase, final String iReason) {
+ onCorruption++;
+ return true;
+ }
}
@Parameters(value = "url")
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/TransactionAtomicTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/TransactionAtomicTest.java
index 186e7488884..72accc88b11 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/TransactionAtomicTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/TransactionAtomicTest.java
@@ -126,6 +126,10 @@ public void onDelete(ODatabase iDatabase) {
public void onOpen(ODatabase iDatabase) {
}
+
+ public boolean onCorruptionRepairDatabase(ODatabase iDatabase, final String iReason) {
+ return true;
+ }
});
db.commit();
diff --git a/tools/src/main/java/com/orientechnologies/orient/console/OConsoleDatabaseApp.java b/tools/src/main/java/com/orientechnologies/orient/console/OConsoleDatabaseApp.java
index c8c614531fd..2b07332f920 100644
--- a/tools/src/main/java/com/orientechnologies/orient/console/OConsoleDatabaseApp.java
+++ b/tools/src/main/java/com/orientechnologies/orient/console/OConsoleDatabaseApp.java
@@ -27,6 +27,7 @@
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
+import java.util.Scanner;
import java.util.Set;
import com.orientechnologies.common.console.TTYConsoleReader;
@@ -169,6 +170,8 @@ public void connect(
currentDatabase = new ODatabaseDocumentTx(iURL);
if (currentDatabase == null)
throw new OException("Database " + iURL + " not found");
+
+ currentDatabase.registerListener(new OConsoleDatabaseListener(this));
currentDatabase.open(iUserName, iUserPassword);
currentDatabaseName = currentDatabase.getName();
@@ -1430,7 +1433,15 @@ private void dumpRecordDetails() {
out.println();
}
- public void onMessage(String iText) {
+ public String ask(final String iText) {
+ out.print(iText);
+ final Scanner scanner = new Scanner(in);
+ final String answer = scanner.nextLine();
+ scanner.close();
+ return answer;
+ }
+
+ public void onMessage(final String iText) {
out.print(iText);
}
diff --git a/tools/src/main/java/com/orientechnologies/orient/console/OConsoleDatabaseListener.java b/tools/src/main/java/com/orientechnologies/orient/console/OConsoleDatabaseListener.java
new file mode 100644
index 00000000000..64e1fbcf6a4
--- /dev/null
+++ b/tools/src/main/java/com/orientechnologies/orient/console/OConsoleDatabaseListener.java
@@ -0,0 +1,45 @@
+package com.orientechnologies.orient.console;
+
+import com.orientechnologies.orient.core.db.ODatabase;
+import com.orientechnologies.orient.core.db.ODatabaseListener;
+
+public class OConsoleDatabaseListener implements ODatabaseListener {
+ OConsoleDatabaseApp console;
+
+ public OConsoleDatabaseListener(OConsoleDatabaseApp console) {
+ this.console = console;
+ }
+
+ public void onCreate(ODatabase iDatabase) {
+ }
+
+ public void onDelete(ODatabase iDatabase) {
+ }
+
+ public void onOpen(ODatabase iDatabase) {
+ }
+
+ public void onBeforeTxBegin(ODatabase iDatabase) {
+ }
+
+ public void onBeforeTxRollback(ODatabase iDatabase) {
+ }
+
+ public void onAfterTxRollback(ODatabase iDatabase) {
+ }
+
+ public void onBeforeTxCommit(ODatabase iDatabase) {
+ }
+
+ public void onAfterTxCommit(ODatabase iDatabase) {
+ }
+
+ public void onClose(ODatabase iDatabase) {
+ }
+
+ public boolean onCorruptionRepairDatabase(ODatabase iDatabase, final String iReason) {
+ final String answer = console
+ .ask("\nDatabase seems corrupted. The cause is " + iReason + ".\nDo you want to repair it (Y/n)? ");
+ return answer.length() == 0 || answer.equalsIgnoreCase("Y") || answer.equalsIgnoreCase("Yes");
+ }
+}
|
701a907010e10b5214b16008b96992a42d1e82ae
|
restlet-framework-java
|
- Some unit test were catching exceptions,- preventing JUnit from failing the tests.--
|
c
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/plugins/internal/com.noelios.restlet.test/src/com/noelios/restlet/test/FileClientTestCase.java b/plugins/internal/com.noelios.restlet.test/src/com/noelios/restlet/test/FileClientTestCase.java
index c843bd6eea..4d551f3ca2 100644
--- a/plugins/internal/com.noelios.restlet.test/src/com/noelios/restlet/test/FileClientTestCase.java
+++ b/plugins/internal/com.noelios.restlet.test/src/com/noelios/restlet/test/FileClientTestCase.java
@@ -1,7 +1,7 @@
-
package com.noelios.restlet.test;
import java.io.File;
+import java.io.IOException;
import junit.framework.TestCase;
@@ -21,30 +21,24 @@
public class FileClientTestCase extends TestCase
{
- public void testFileClient()
+ public void testFileClient() throws IOException
{
- try
- {
- String text = "Test content\r\nLine 2\r\nLine2";
- Client fc = new DefaultClient(Protocols.FILE);
- FileReference fr = new FileReference(File.createTempFile("Restlet", ".txt"));
-
- // Write the text to temporary file
- Call call = fc.put(fr.toString(), new StringRepresentation(text));
- assertTrue(call.getStatus().equals(Statuses.SUCCESS_OK));
-
- // Get the text and compare to the original
- call = fc.get(fr.toString());
- assertTrue(call.getStatus().equals(Statuses.SUCCESS_OK));
-
- // Delete the file
- call = fc.delete(fr.toString());
- assertTrue(call.getStatus().equals(Statuses.SUCCESS_NO_CONTENT));
- }
- catch (Exception e)
- {
- e.printStackTrace();
- }
+ String text = "Test content\r\nLine 2\r\nLine2";
+ Client fc = new DefaultClient(Protocols.FILE);
+ FileReference fr = new FileReference(File.createTempFile("Restlet",
+ ".txt"));
+
+ // Write the text to temporary file
+ Call call = fc.put(fr.toString(), new StringRepresentation(text));
+ assertTrue(call.getStatus().equals(Statuses.SUCCESS_OK));
+
+ // Get the text and compare to the original
+ call = fc.get(fr.toString());
+ assertTrue(call.getStatus().equals(Statuses.SUCCESS_OK));
+
+ // Delete the file
+ call = fc.delete(fr.toString());
+ assertTrue(call.getStatus().equals(Statuses.SUCCESS_NO_CONTENT));
}
-
+
}
diff --git a/plugins/internal/com.noelios.restlet.test/src/com/noelios/restlet/test/RedirectTestCase.java b/plugins/internal/com.noelios.restlet.test/src/com/noelios/restlet/test/RedirectTestCase.java
index 210641d67b..78c842c218 100644
--- a/plugins/internal/com.noelios.restlet.test/src/com/noelios/restlet/test/RedirectTestCase.java
+++ b/plugins/internal/com.noelios.restlet.test/src/com/noelios/restlet/test/RedirectTestCase.java
@@ -22,8 +22,6 @@
package com.noelios.restlet.test;
-import java.io.IOException;
-
import junit.framework.TestCase;
import org.restlet.AbstractRestlet;
@@ -46,97 +44,76 @@
*/
public class RedirectTestCase extends TestCase
{
- /**
- * Tests the cookies parsing.
- */
- public void testRedirect() throws IOException
- {
- try
- {
- // Create a new Restlet container
- RestletContainer myContainer = new RestletContainer();
-
- // Create the client connectors
- myContainer.getClients().put("TestClient", Protocols.HTTP);
- myContainer.getClients().put("ProxyClient", Protocols.HTTP);
-
- // Create the proxy Restlet
- String target = "http://localhost:9090${path}#[if query]?${query}#[end]";
- RedirectRestlet proxy = new RedirectRestlet(myContainer, target, RedirectRestlet.MODE_CONNECTOR);
- proxy.setConnectorName("ProxyClient");
-
- // Create a new Restlet that will display some path information.
- Restlet trace = new AbstractRestlet(myContainer)
- {
- public void handle(Call call)
- {
- // Print the requested URI path
- String output = "Resource URI: " + call.getResourceRef() + '\n' +
- "Context path: " + call.getContextPath() + '\n' +
- "Resource path: " + call.getResourcePath() + '\n' +
- "Query string: " + call.getResourceRef().getQuery() + '\n' +
- "Method name: " + call.getMethod() + '\n';
- call.setOutput(new StringRepresentation(output, MediaTypes.TEXT_PLAIN));
- }
- };
-
- // Create the server connectors
- myContainer.getServers().put("ProxyServer", new DefaultServer(Protocols.HTTP, proxy, 8080));
- myContainer.getServers().put("OriginServer", new DefaultServer(Protocols.HTTP, trace, 9090));
-
- // Now, let's start the container!
- myContainer.start();
-
- // Tests
- String uri = "http://localhost:8080/?foo=bar";
- testCall(myContainer, Methods.GET, uri);
- testCall(myContainer, Methods.POST, uri);
- testCall(myContainer, Methods.PUT, uri);
- testCall(myContainer, Methods.DELETE, uri);
-
- uri = "http://localhost:8080/abcd/efgh/ijkl?foo=bar&foo=beer";
- testCall(myContainer, Methods.GET, uri);
- testCall(myContainer, Methods.POST, uri);
- testCall(myContainer, Methods.PUT, uri);
- testCall(myContainer, Methods.DELETE, uri);
-
- uri = "http://localhost:8080/v1/client/kwse/CnJlNUQV9%252BNNqbUf7Lhs2BYEK2Y%253D/user/johnm/uVGYTDK4kK4zsu96VHGeTCzfwso%253D/";
- testCall(myContainer, Methods.GET, uri);
-
- // Stop the container
- myContainer.stop();
- }
- catch(Exception e)
- {
- e.printStackTrace();
- }
- }
-
- private void testCall(RestletContainer myContainer, Method method, String uri)
- {
- try
- {
- Call call = new DefaultCall();
- call.setMethod(method);
- call.setResourceRef(uri);
- myContainer.callClient("TestClient", call);
- call.getOutput().write(System.out);
- }
- catch(IOException e)
- {
- e.printStackTrace();
- }
- }
-
- public static void main(String[] args)
- {
- try
- {
- new RedirectTestCase().testRedirect();
- }
- catch(IOException e)
- {
- e.printStackTrace();
- }
- }
+ /**
+ * Tests the cookies parsing.
+ */
+ public void testRedirect() throws Exception
+ {
+ // Create a new Restlet container
+ RestletContainer myContainer = new RestletContainer();
+
+ // Create the client connectors
+ myContainer.getClients().put("TestClient", Protocols.HTTP);
+ myContainer.getClients().put("ProxyClient", Protocols.HTTP);
+
+ // Create the proxy Restlet
+ String target = "http://localhost:9090${path}#[if query]?${query}#[end]";
+ RedirectRestlet proxy = new RedirectRestlet(myContainer, target,
+ RedirectRestlet.MODE_CONNECTOR);
+ proxy.setConnectorName("ProxyClient");
+
+ // Create a new Restlet that will display some path information.
+ Restlet trace = new AbstractRestlet(myContainer)
+ {
+ public void handle(Call call)
+ {
+ // Print the requested URI path
+ String output = "Resource URI: " + call.getResourceRef() + '\n'
+ + "Context path: " + call.getContextPath() + '\n'
+ + "Resource path: " + call.getResourcePath() + '\n'
+ + "Query string: " + call.getResourceRef().getQuery() + '\n'
+ + "Method name: " + call.getMethod() + '\n';
+ call.setOutput(new StringRepresentation(output,
+ MediaTypes.TEXT_PLAIN));
+ }
+ };
+
+ // Create the server connectors
+ myContainer.getServers().put("ProxyServer",
+ new DefaultServer(Protocols.HTTP, proxy, 8080));
+ myContainer.getServers().put("OriginServer",
+ new DefaultServer(Protocols.HTTP, trace, 9090));
+
+ // Now, let's start the container!
+ myContainer.start();
+
+ // Tests
+ String uri = "http://localhost:8080/?foo=bar";
+ testCall(myContainer, Methods.GET, uri);
+ testCall(myContainer, Methods.POST, uri);
+ testCall(myContainer, Methods.PUT, uri);
+ testCall(myContainer, Methods.DELETE, uri);
+
+ uri = "http://localhost:8080/abcd/efgh/ijkl?foo=bar&foo=beer";
+ testCall(myContainer, Methods.GET, uri);
+ testCall(myContainer, Methods.POST, uri);
+ testCall(myContainer, Methods.PUT, uri);
+ testCall(myContainer, Methods.DELETE, uri);
+
+ uri = "http://localhost:8080/v1/client/kwse/CnJlNUQV9%252BNNqbUf7Lhs2BYEK2Y%253D/user/johnm/uVGYTDK4kK4zsu96VHGeTCzfwso%253D/";
+ testCall(myContainer, Methods.GET, uri);
+
+ // Stop the container
+ myContainer.stop();
+ }
+
+ private void testCall(RestletContainer myContainer, Method method, String uri) throws Exception
+ {
+ Call call = new DefaultCall();
+ call.setMethod(method);
+ call.setResourceRef(uri);
+ myContainer.callClient("TestClient", call);
+ call.getOutput().write(System.out);
+ }
+
}
diff --git a/plugins/internal/org.restlet.test/src/org/restlet/test/RestletTestCase.java b/plugins/internal/org.restlet.test/src/org/restlet/test/RestletTestCase.java
index a0d32b350b..a13f198d05 100644
--- a/plugins/internal/org.restlet.test/src/org/restlet/test/RestletTestCase.java
+++ b/plugins/internal/org.restlet.test/src/org/restlet/test/RestletTestCase.java
@@ -33,6 +33,5 @@
*/
public abstract class RestletTestCase extends TestCase
{
-
// Currently emtpy.
}
|
e24b71e70035f9a9baf7ec19c279311eceec31a9
|
spring-framework
|
Shutdown Reactor env when relay handler is- stopped--The Reactor Environment (that's used by the TcpClient) manages a-number of threads. To ensure that these threads are cleaned up-Environment.shutdown() must be called when the Environment is no-longer needed.-
|
c
|
https://github.com/spring-projects/spring-framework
|
diff --git a/spring-websocket/src/main/java/org/springframework/web/messaging/stomp/support/StompRelayPubSubMessageHandler.java b/spring-websocket/src/main/java/org/springframework/web/messaging/stomp/support/StompRelayPubSubMessageHandler.java
index 2e1d31d42947..ef2d9eaaea48 100644
--- a/spring-websocket/src/main/java/org/springframework/web/messaging/stomp/support/StompRelayPubSubMessageHandler.java
+++ b/spring-websocket/src/main/java/org/springframework/web/messaging/stomp/support/StompRelayPubSubMessageHandler.java
@@ -73,6 +73,8 @@ public class StompRelayPubSubMessageHandler extends AbstractPubSubMessageHandler
private MessageConverter payloadConverter;
+ private Environment environment;
+
private TcpClient<String, String> tcpClient;
private final Map<String, RelaySession> relaySessions = new ConcurrentHashMap<String, RelaySession>();
@@ -181,9 +183,9 @@ public boolean isRunning() {
@Override
public void start() {
synchronized (this.lifecycleMonitor) {
-
+ this.environment = new Environment();
this.tcpClient = new TcpClient.Spec<String, String>(NettyTcpClient.class)
- .using(new Environment())
+ .using(this.environment)
.codec(new DelimitedCodec<String, String>((byte) 0, true, StandardCodecs.STRING_CODEC))
.connect(this.relayHost, this.relayPort)
.get();
@@ -214,6 +216,7 @@ public void stop() {
this.running = false;
try {
this.tcpClient.close().await(5000, TimeUnit.MILLISECONDS);
+ this.environment.shutdown();
}
catch (InterruptedException e) {
// ignore
|
7709c68f6312703b60b40f9ded1bd6121daa1d58
|
elasticsearch
|
optimize boolean queries when possible--
|
p
|
https://github.com/elastic/elasticsearch
|
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/json/BoolJsonQueryParser.java b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/json/BoolJsonQueryParser.java
index f28794a67d898..965a6899b3f04 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/json/BoolJsonQueryParser.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/json/BoolJsonQueryParser.java
@@ -111,6 +111,6 @@ public class BoolJsonQueryParser extends AbstractIndexComponent implements JsonQ
if (minimumNumberShouldMatch != -1) {
query.setMinimumNumberShouldMatch(minimumNumberShouldMatch);
}
- return fixNegativeQueryIfNeeded(query);
+ return optimizeQuery(fixNegativeQueryIfNeeded(query));
}
}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/json/FieldJsonQueryParser.java b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/json/FieldJsonQueryParser.java
index 9de02f0f8a12f..1979f9376cc1c 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/json/FieldJsonQueryParser.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/json/FieldJsonQueryParser.java
@@ -159,7 +159,7 @@ public class FieldJsonQueryParser extends AbstractIndexComponent implements Json
try {
Query query = queryParser.parse(queryString);
query.setBoost(boost);
- return fixNegativeQueryIfNeeded(query);
+ return optimizeQuery(fixNegativeQueryIfNeeded(query));
} catch (ParseException e) {
throw new QueryParsingException(index, "Failed to parse query [" + queryString + "]", e);
}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/json/QueryStringJsonQueryParser.java b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/json/QueryStringJsonQueryParser.java
index e92059422a288..b65d299dc7cc3 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/json/QueryStringJsonQueryParser.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/json/QueryStringJsonQueryParser.java
@@ -156,7 +156,7 @@ public class QueryStringJsonQueryParser extends AbstractIndexComponent implement
try {
Query query = queryParser.parse(queryString);
query.setBoost(boost);
- return fixNegativeQueryIfNeeded(query);
+ return optimizeQuery(fixNegativeQueryIfNeeded(query));
} catch (ParseException e) {
throw new QueryParsingException(index, "Failed to parse query [" + queryString + "]", e);
}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/support/QueryParsers.java b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/support/QueryParsers.java
index b05428b3b4bec..4f0a0691558e3 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/support/QueryParsers.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/support/QueryParsers.java
@@ -37,6 +37,35 @@ private QueryParsers() {
}
+ /**
+ * Optimizes the given query and returns the optimized version of it.
+ */
+ public static Query optimizeQuery(Query q) {
+ if (q instanceof BooleanQuery) {
+ return optimizeBooleanQuery((BooleanQuery) q);
+ }
+ return q;
+ }
+
+ public static BooleanQuery optimizeBooleanQuery(BooleanQuery q) {
+ BooleanQuery optimized = new BooleanQuery(q.isCoordDisabled());
+ optimized.setMinimumNumberShouldMatch(q.getMinimumNumberShouldMatch());
+ optimizeBooleanQuery(optimized, q);
+ return optimized;
+ }
+
+ public static void optimizeBooleanQuery(BooleanQuery optimized, BooleanQuery q) {
+ for (BooleanClause clause : q.clauses()) {
+ Query cq = clause.getQuery();
+ cq.setBoost(cq.getBoost() * q.getBoost());
+ if (cq instanceof BooleanQuery && !clause.isRequired() && !clause.isProhibited()) {
+ optimizeBooleanQuery(optimized, (BooleanQuery) cq);
+ } else {
+ optimized.add(clause);
+ }
+ }
+ }
+
public static boolean isNegativeQuery(Query q) {
if (!(q instanceof BooleanQuery)) {
return false;
|
2b5e3f5586ad0c71dcf404a166e18529a960eb00
|
elasticsearch
|
Fixed resolving closest nested object when- sorting on a field inside nested object--
|
c
|
https://github.com/elastic/elasticsearch
|
diff --git a/src/main/java/org/elasticsearch/search/sort/SortParseElement.java b/src/main/java/org/elasticsearch/search/sort/SortParseElement.java
index c89069ca0443a..67301e1ac26dc 100644
--- a/src/main/java/org/elasticsearch/search/sort/SortParseElement.java
+++ b/src/main/java/org/elasticsearch/search/sort/SortParseElement.java
@@ -242,18 +242,24 @@ private static ObjectMapper resolveClosestNestedObjectMapper(String fieldName, S
int indexOf = fieldName.lastIndexOf('.');
if (indexOf == -1) {
return null;
- }
+ } else {
+ do {
+ String objectPath = fieldName.substring(0, indexOf);
+ ObjectMappers objectMappers = context.mapperService().objectMapper(objectPath);
+ if (objectMappers == null) {
+ return null;
+ }
- String objectPath = fieldName.substring(0, indexOf);
- ObjectMappers objectMappers = context.mapperService().objectMapper(objectPath);
- if (objectMappers == null) {
- return null;
- }
+ if (objectMappers.hasNested()) {
+ for (ObjectMapper objectMapper : objectMappers) {
+ if (objectMapper.nested().isNested()) {
+ return objectMapper;
+ }
+ }
+ }
- for (ObjectMapper objectMapper : objectMappers) {
- if (objectMapper.nested().isNested()) {
- return objectMapper;
- }
+ indexOf = objectPath.lastIndexOf('.');
+ } while (indexOf != -1);
}
return null;
diff --git a/src/test/java/org/elasticsearch/test/integration/nested/SimpleNestedTests.java b/src/test/java/org/elasticsearch/test/integration/nested/SimpleNestedTests.java
index f7782facbd3a5..246ebcd4a4065 100644
--- a/src/test/java/org/elasticsearch/test/integration/nested/SimpleNestedTests.java
+++ b/src/test/java/org/elasticsearch/test/integration/nested/SimpleNestedTests.java
@@ -714,6 +714,9 @@ public void testSortNestedWithNestedFilter() throws Exception {
.startObject("child")
.field("filter", true)
.field("child_values", 1l)
+ .startObject("child_obj")
+ .field("value", 1l)
+ .endObject()
.endObject()
.startObject("child")
.field("filter", false)
@@ -742,6 +745,9 @@ public void testSortNestedWithNestedFilter() throws Exception {
.startObject("child")
.field("filter", true)
.field("child_values", 2l)
+ .startObject("child_obj")
+ .field("value", 2l)
+ .endObject()
.endObject()
.startObject("child")
.field("filter", false)
@@ -770,6 +776,9 @@ public void testSortNestedWithNestedFilter() throws Exception {
.startObject("child")
.field("filter", true)
.field("child_values", 3l)
+ .startObject("child_obj")
+ .field("value", 3l)
+ .endObject()
.endObject()
.startObject("child")
.field("filter", false)
@@ -884,6 +893,24 @@ public void testSortNestedWithNestedFilter() throws Exception {
// assertThat(searchResponse.getHits().getHits()[1].sortValues()[0].toString(), equalTo("-2"));
// assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("1"));
// assertThat(searchResponse.getHits().getHits()[2].sortValues()[0].toString(), equalTo("-1"));
+
+ // Check if closest nested type is resolved
+ searchResponse = client.prepareSearch()
+ .setQuery(matchAllQuery())
+ .addSort(
+ SortBuilders.fieldSort("parent.child.child_obj.value")
+ .setNestedFilter(FilterBuilders.termFilter("parent.child.filter", true))
+ .order(SortOrder.ASC)
+ )
+ .execute().actionGet();
+ assertThat(searchResponse.getHits().totalHits(), equalTo(3l));
+ assertThat(searchResponse.getHits().getHits().length, equalTo(3));
+ assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1"));
+ assertThat(searchResponse.getHits().getHits()[0].sortValues()[0].toString(), equalTo("1"));
+ assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2"));
+ assertThat(searchResponse.getHits().getHits()[1].sortValues()[0].toString(), equalTo("2"));
+ assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("3"));
+ assertThat(searchResponse.getHits().getHits()[2].sortValues()[0].toString(), equalTo("3"));
}
}
\ No newline at end of file
|
a6fd48ba12e0f82b3ea937227845a63e5c1f8bf7
|
hbase
|
HBASE-11499- AsyncProcess.buildDetailedErrorMessage concatenates strings using + in a loop- (Mike Drob)--
|
p
|
https://github.com/apache/hbase
|
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
index c184147d59dd..d1bcc0b2a77b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
@@ -1407,23 +1407,24 @@ private void decActionCounter(int index) {
}
private String buildDetailedErrorMsg(String string, int index) {
- String error = string + "; called for " + index +
- ", actionsInProgress " + actionsInProgress.get() + "; replica gets: ";
+ StringBuilder error = new StringBuilder(128);
+ error.append(string).append("; called for ").append(index).append(", actionsInProgress ")
+ .append(actionsInProgress.get()).append("; replica gets: ");
if (replicaGetIndices != null) {
for (int i = 0; i < replicaGetIndices.length; ++i) {
- error += replicaGetIndices[i] + ", ";
+ error.append(replicaGetIndices[i]).append(", ");
}
} else {
- error += (hasAnyReplicaGets ? "all" : "none");
+ error.append(hasAnyReplicaGets ? "all" : "none");
}
- error += "; results ";
+ error.append("; results ");
if (results != null) {
for (int i = 0; i < results.length; ++i) {
Object o = results[i];
- error += ((o == null) ? "null" : o.toString()) + ", ";
+ error.append(((o == null) ? "null" : o.toString())).append(", ");
}
}
- return error;
+ return error.toString();
}
@Override
|
afb42145aeb0d228a638628582631975db4bb473
|
drools
|
-Few fixes to manners--git-svn-id: https://svn.jboss.org/repos/labs/trunk/labs/jbossrules@2061 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-core/src/test/java/org/drools/examples/manners/Chosen.java b/drools-core/src/test/java/org/drools/examples/manners/Chosen.java
index 9ea43fae8b6..cf75f2ae2a9 100644
--- a/drools-core/src/test/java/org/drools/examples/manners/Chosen.java
+++ b/drools-core/src/test/java/org/drools/examples/manners/Chosen.java
@@ -76,6 +76,6 @@ public Hobby getHobby() {
}
public String toString() {
- return "{guest name=" + this.guestName + ",hobbies=" + this.hobby + "}";
+ return "{Chosen id=" + this.id + ", name=" + this.guestName + ", hobbies=" + this.hobby + "}";
}
}
\ No newline at end of file
diff --git a/drools-core/src/test/java/org/drools/examples/manners/MannersTest.java b/drools-core/src/test/java/org/drools/examples/manners/MannersTest.java
index eeec4dbb19b..d41aae4b5f3 100644
--- a/drools-core/src/test/java/org/drools/examples/manners/MannersTest.java
+++ b/drools-core/src/test/java/org/drools/examples/manners/MannersTest.java
@@ -109,9 +109,17 @@ protected void setUp() throws Exception {
this.booleanNotEqualEvaluator = EvaluatorFactory.getInstance().getEvaluator( Evaluator.BOOLEAN_TYPE,
Evaluator.NOT_EQUAL );
- }
-
- public void test1() throws DuplicateRuleNameException, InvalidRuleException, IntrospectionException, RuleIntegrationException, RuleSetIntegrationException, InvalidPatternException, FactException, IOException, InterruptedException {
+ }
+
+ public void test1() throws DuplicateRuleNameException,
+ InvalidRuleException,
+ IntrospectionException,
+ RuleIntegrationException,
+ RuleSetIntegrationException,
+ InvalidPatternException,
+ FactException,
+ IOException,
+ InterruptedException {
RuleSet ruleSet = new RuleSet( "Miss Manners" );
ruleSet.addRule( getAssignFirstSeatRule() );
ruleSet.addRule( getMakePath() );
@@ -119,59 +127,58 @@ public void test1() throws DuplicateRuleNameException, InvalidRuleException, Int
ruleSet.addRule( getPathDone() );
ruleSet.addRule( getAreWeDone() );
ruleSet.addRule( getContinueProcessing() );
-// ruleSet.addRule( getAllDone() );
-
+ // ruleSet.addRule( getAllDone() );
+
final RuleBaseImpl ruleBase = new RuleBaseImpl();
ruleBase.addRuleSet( ruleSet );
-
-// final ReteooJungViewer viewer = new ReteooJungViewer(ruleBase);
-//
-// javax.swing.SwingUtilities.invokeLater(new Runnable() {
-// public void run() {
-// viewer.showGUI();
-// }
-// });
-
-
+
+ // final ReteooJungViewer viewer = new ReteooJungViewer(ruleBase);
+ //
+ // javax.swing.SwingUtilities.invokeLater(new Runnable() {
+ // public void run() {
+ // viewer.showGUI();
+ // }
+ // });
+
WorkingMemory workingMemory = ruleBase.newWorkingMemory();
-
+
InputStream is = getClass().getResourceAsStream( "/manners16.dat" );
- List list = getInputObjects(is);
- for (Iterator it = list.iterator(); it.hasNext(); ) {
- FactHandle handle = workingMemory.assertObject( it.next() );
+ List list = getInputObjects( is );
+ for ( Iterator it = list.iterator(); it.hasNext(); ) {
+ FactHandle handle = workingMemory.assertObject( it.next() );
}
-
- workingMemory.assertObject( new Count(0) );
-
+
+ workingMemory.assertObject( new Count( 1 ) );
+
workingMemory.fireAllRules();
-
-// while (viewer.isRunning()) {
-// Thread.sleep( 1000 );
-// }
-
+
+ // while (viewer.isRunning()) {
+ // Thread.sleep( 1000 );
+ // }
+
}
/**
* <pre>
- * rule assignFirstSeat() {
- * Context context;
- * Guest guest;
- * Count count;
- * when {
- * context : Context( state == Context.START_UP )
- * guest : Guest()
- * count : Count()
- * } then {
- * String guestName = guest.getName();
- * drools.assert( new Seating( count.getValue(), 1, true, 1, guestName, 1, guestName) );
- * drools.assert( new Path( count.getValue(), 1, guestName ) );
- * count.setCount( count.getValue() + 1 );
- *
- * System.out.println( "seat 1 " + guest.getName() + " );
- *
- * context.setPath( Context.ASSIGN_SEATS );
- * }
- * }
+ * rule assignFirstSeat() {
+ * Context context;
+ * Guest guest;
+ * Count count;
+ * when {
+ * context : Context( state == Context.START_UP )
+ * guest : Guest()
+ * count : Count()
+ * } then {
+ * String guestName = guest.getName();
+ * drools.assert( new Seating( count.getValue(), 1, true, 1, guestName, 1, guestName) );
+ * drools.assert( new Path( count.getValue(), 1, guestName ) );
+ * count.setCount( count.getValue() + 1 );
+ *
+ * System.out.println( "seat 1 " + guest.getName() + " );
+ *
+ * context.setPath( Context.ASSIGN_SEATS );
+ * }
+ * }
* </pre>
*
*
@@ -236,16 +243,21 @@ public void invoke(Activation activation) throws ConsequenceException {
String guestName = guest.getName();
- drools.assertObject( new Seating( count.getValue(),
- 0,
- true,
- 1,
- guestName,
- 1,
- guestName ) );
- drools.assertObject( new Path( count.getValue(),
+ Seating seating = new Seating( count.getValue(),
+ 0,
+ true,
+ 1,
+ guestName,
1,
- guestName ) );
+ guestName );
+
+ drools.assertObject( seating );
+
+ Path path = new Path( count.getValue(),
+ 0,
+ guestName );
+
+ drools.assertObject( path );
count.setValue( count.getValue() + 1 );
drools.modifyObject( tuple.getFactHandleForDeclaration( countDeclaration ),
@@ -254,9 +266,10 @@ public void invoke(Activation activation) throws ConsequenceException {
context.setState( Context.ASSIGN_SEATS );
drools.modifyObject( tuple.getFactHandleForDeclaration( contextDeclaration ),
context );
- System.out.println( "assigned first seat : " + guest );
+ System.out.println( "assign first seat : " + seating + " : " + path );
- } catch ( Exception e ) {
+ }
+ catch ( Exception e ) {
throw new ConsequenceException( e );
}
}
@@ -270,21 +283,21 @@ public void invoke(Activation activation) throws ConsequenceException {
/**
* <pre>
- * rule makePath() {
- * Context context;
- * int seatingId, seatingPid, pathSeat;
- * String pathGuestName;
- *
- * when {
- * context : Context( state == Context.MAKE_PATH )
- * Seating( seatingId:id, seatingPid:pid, pathDone == false )
- * Path( id == seatingPid, pathGuestName:guest, pathSeat:seat )
- * (not Path( id == seatingId, guestName == pathGuestName )
- * } else {
- * drools.assert( new Path( seatingId, pathSeat, pathGuestName ) );
- *
- * }
- * }
+ * rule makePath() {
+ * Context context;
+ * int seatingId, seatingPid, pathSeat;
+ * String pathGuestName;
+ *
+ * when {
+ * context : Context( state == Context.MAKE_PATH )
+ * Seating( seatingId:id, seatingPid:pid, pathDone == false )
+ * Path( id == seatingPid, pathGuestName:guest, pathSeat:seat )
+ * (not Path( id == seatingId, guestName == pathGuestName )
+ * } else {
+ * drools.assert( new Path( seatingId, pathSeat, pathGuestName ) );
+ *
+ * }
+ * }
* </pre>
*
* @return
@@ -292,7 +305,7 @@ public void invoke(Activation activation) throws ConsequenceException {
* @throws InvalidRuleException
*/
private Rule getMakePath() throws IntrospectionException,
- InvalidRuleException {
+ InvalidRuleException {
final Rule rule = new Rule( "makePath" );
// -----------
@@ -385,17 +398,18 @@ public void invoke(Activation activation) throws ConsequenceException {
tuple );
int id = ((Integer) tuple.get( seatingIdDeclaration )).intValue();
- String guestName = (String) tuple.get( pathGuestNameDeclaration );
int seat = ((Integer) tuple.get( pathSeatDeclaration )).intValue();
+ String guestName = (String) tuple.get( pathGuestNameDeclaration );
Path path = new Path( id,
seat,
guestName );
-
+
drools.assertObject( path );
-
+
System.out.println( "make path : " + path );
- } catch ( Exception e ) {
+ }
+ catch ( Exception e ) {
throw new ConsequenceException( e );
}
}
@@ -409,37 +423,37 @@ public void invoke(Activation activation) throws ConsequenceException {
/**
* <pre>
- * rule findSeating() {
- * Context context;
- * int seatingId, seatingPid;
- * String seatingRightGuestName, leftGuestName;
- * Sex rightGuestSex;
- * Hobby rightGuestHobby;
- * Count count;
- *
- * when {
- * context : Context( state == Context.ASSIGN_SEATS )
- * Seating( seatingId:id, seatingPid:pid, pathDone == true
- * seatingRightSeat:rightSeat seatingRightGuestName:rightGuestName )
- * Guest( name == seatingRightGuestName, rightGuestSex:sex, rightGuestHobby:hobby )
- * Guest( leftGuestName:name , sex != rightGuestSex, hobby == rightGuestHobby )
- *
- * count : Count()
- *
- * not ( Path( id == seatingId, guestName == leftGuestName) )
- * not ( Chosen( id == seatingId, guestName == leftGuestName, hobby == rightGuestHobby) )
- * } then {
- * int newSeat = rightSeat + 1;
- * drools.assert( new Seating( coung.getValue(), rightSeat, rightSeatName, leftGuestName, newSeat, countValue, id, false );
- * drools.assert( new Path( countValue, leftGuestName, newSeat );
- * drools.assert( new Chosen( id, leftGuestName, rightGuestHobby ) );
- *
- * System.out.println( "seat " + rightSeat + " " + rightSeatName + " " + leftGuestName );
- *
- * count.setCount( countValue + 1 );
- * context.setPath( Context.MAKE_PATH );
- * }
- * }
+ * rule findSeating() {
+ * Context context;
+ * int seatingId, seatingPid;
+ * String seatingRightGuestName, leftGuestName;
+ * Sex rightGuestSex;
+ * Hobby rightGuestHobby;
+ * Count count;
+ *
+ * when {
+ * context : Context( state == Context.ASSIGN_SEATS )
+ * Seating( seatingId:id, seatingPid:pid, pathDone == true
+ * seatingRightSeat:rightSeat seatingRightGuestName:rightGuestName )
+ * Guest( name == seatingRightGuestName, rightGuestSex:sex, rightGuestHobby:hobby )
+ * Guest( leftGuestName:name , sex != rightGuestSex, hobby == rightGuestHobby )
+ *
+ * count : Count()
+ *
+ * not ( Path( id == seatingId, guestName == leftGuestName) )
+ * not ( Chosen( id == seatingId, guestName == leftGuestName, hobby == rightGuestHobby) )
+ * } then {
+ * int newSeat = rightSeat + 1;
+ * drools.assert( new Seating( coung.getValue(), rightSeat, rightSeatName, leftGuestName, newSeat, countValue, id, false );
+ * drools.assert( new Path( countValue, leftGuestName, newSeat );
+ * drools.assert( new Chosen( id, leftGuestName, rightGuestHobby ) );
+ *
+ * System.out.println( "seat " + rightSeat + " " + rightSeatName + " " + leftGuestName );
+ *
+ * count.setCount( countValue + 1 );
+ * context.setPath( Context.MAKE_PATH );
+ * }
+ * }
* </pre>
*
* @return
@@ -447,7 +461,7 @@ public void invoke(Activation activation) throws ConsequenceException {
* @throws InvalidRuleException
*/
private Rule getFindSeating() throws IntrospectionException,
- InvalidRuleException {
+ InvalidRuleException {
final Rule rule = new Rule( "findSeating" );
// ---------------
@@ -468,7 +482,7 @@ private Rule getFindSeating() throws IntrospectionException,
// -------------------------------
// Seating( seatingId:id, seatingPid:pid, pathDone == true
- // seatingRightSeat:rightSeat seatingRightGuestName:rightGuestName )
+ // seatingRightSeat:rightSeat seatingRightGuestName:rightGuestName )
// -------------------------------
Column seatingColumn = new Column( 1,
seatingType );
@@ -497,7 +511,8 @@ private Rule getFindSeating() throws IntrospectionException,
final Declaration seatingRightGuestNameDeclaration = rule.getDeclaration( "seatingRightGuestName" );
final Declaration seatingRightSeatDeclaration = rule.getDeclaration( "seatingRightSeat" );
// --------------
- // Guest( name == seatingRightGuestName, rightGuestSex:sex, rightGuestHobby:hobby )
+ // Guest( name == seatingRightGuestName, rightGuestSex:sex,
+ // rightGuestHobby:hobby )
// ---------------
Column rightGuestColumn = new Column( 2,
guestType );
@@ -521,7 +536,8 @@ private Rule getFindSeating() throws IntrospectionException,
final Declaration rightGuestHobbyDeclaration = rule.getDeclaration( "rightGuestHobby" );
// ----------------
- // Guest( leftGuestName:name , sex != rightGuestSex, hobby == rightGuestHobby )
+ // Guest( leftGuestName:name , sex != rightGuestSex, hobby ==
+ // rightGuestHobby )
// ----------------
Column leftGuestColumn = new Column( 3,
guestType );
@@ -556,7 +572,7 @@ private Rule getFindSeating() throws IntrospectionException,
// --------------
// not ( Path( id == seatingId, guestName == leftGuestName) )
// --------------
- Column notPathColumn = new Column( 3,
+ Column notPathColumn = new Column( 5,
pathType );
notPathColumn.addConstraint( getBoundVariableConstraint( notPathColumn,
@@ -572,9 +588,10 @@ private Rule getFindSeating() throws IntrospectionException,
notPath.addChild( notPathColumn );
rule.addPattern( notPath );
// ------------
- // not ( Chosen( id == seatingId, guestName == leftGuestName, hobby == rightGuestHobby ) )
+ // not ( Chosen( id == seatingId, guestName == leftGuestName, hobby ==
+ // rightGuestHobby ) )
// ------------
- Column notChosenColumn = new Column( 5,
+ Column notChosenColumn = new Column( 6,
chosenType );
notChosenColumn.addConstraint( getBoundVariableConstraint( notChosenColumn,
@@ -631,18 +648,22 @@ public void invoke(Activation activation) throws ConsequenceException {
seatId,
false,
seatingRightSeat,
- leftGuestName,
+ rightGuestName ,
seatingRightSeat + 1,
- rightGuestName );
+ leftGuestName );
drools.assertObject( seating );
- drools.assertObject( new Path( count.getValue(),
- seatingRightSeat + 1,
- leftGuestName ) );
+ Path path = new Path( count.getValue(),
+ seatingRightSeat + 1,
+ leftGuestName );
+
+ drools.assertObject( path );
+
+ Chosen chosen = new Chosen( seatId,
+ leftGuestName,
+ rightGuestHobby );
- drools.assertObject( new Chosen( seatId,
- leftGuestName,
- rightGuestHobby ) );
+ drools.assertObject( chosen );
count.setValue( count.getValue() + 1 );
drools.modifyObject( tuple.getFactHandleForDeclaration( countDeclaration ),
@@ -652,9 +673,10 @@ public void invoke(Activation activation) throws ConsequenceException {
drools.modifyObject( tuple.getFactHandleForDeclaration( contextDeclaration ),
context );
- System.out.println( "assign seating : " + seating );
-
- } catch ( Exception e ) {
+ System.out.println( "find seating : " + seating + " : " + path + " : " + chosen );
+
+ }
+ catch ( Exception e ) {
throw new ConsequenceException( e );
}
}
@@ -668,25 +690,18 @@ public void invoke(Activation activation) throws ConsequenceException {
/**
*
- * rule pathDone() {
- * Context context;
- * Seating seating;
- * when {
- * context : Context( state == Context.MAKE_PATH )
- * seating : Seating( pathDone == false )
- * } then {
- * seating.setPathDone( true );
- * context.setName( Context.CHECK_DONE );
- * }
- * }
- *
+ * rule pathDone() { Context context; Seating seating; when { context :
+ * Context( state == Context.MAKE_PATH ) seating : Seating( pathDone ==
+ * false ) } then { seating.setPathDone( true ); context.setName(
+ * Context.CHECK_DONE ); } }
+ *
*
* @return
* @throws IntrospectionException
* @throws InvalidRuleException
*/
private Rule getPathDone() throws IntrospectionException,
- InvalidRuleException {
+ InvalidRuleException {
final Rule rule = new Rule( "pathDone" );
// -----------
@@ -721,8 +736,8 @@ private Rule getPathDone() throws IntrospectionException,
final Declaration seatingDeclaration = rule.getDeclaration( "seating" );
// ------------
- // context.setName( Context.CHECK_DONE );
- // seating.setPathDone( true );
+ // context.setName( Context.CHECK_DONE );
+ // seating.setPathDone( true );
// ------------
Consequence consequence = new Consequence() {
@@ -735,16 +750,17 @@ public void invoke(Activation activation) throws ConsequenceException {
Context context = (Context) tuple.get( contextDeclaration );
Seating seating = (Seating) tuple.get( seatingDeclaration );
-
+
seating.setPathDone( true );
drools.modifyObject( tuple.getFactHandleForDeclaration( seatingDeclaration ),
- seating );
-
+ seating );
+
context.setState( Context.CHECK_DONE );
drools.modifyObject( tuple.getFactHandleForDeclaration( contextDeclaration ),
- context );
- System.out.println("path done" + seating);
- } catch ( Exception e ) {
+ context );
+ System.out.println( "path done" + seating );
+ }
+ catch ( Exception e ) {
throw new ConsequenceException( e );
}
}
@@ -758,25 +774,18 @@ public void invoke(Activation activation) throws ConsequenceException {
/**
*
- * rule areWeDone() {
- * Context context;
- * LastSeat lastSear;
- * when {
- * context : Context( state == Context.CHECK_DONE )
- * LastSeat( lastSeat: seat )
- * Seating( rightSeat == lastSeat )
- * } then {
- * context.setState( Context.PRINT_RESULTS );
- * }
- * }
- *
+ * rule areWeDone() { Context context; LastSeat lastSear; when { context :
+ * Context( state == Context.CHECK_DONE ) LastSeat( lastSeat: seat )
+ * Seating( rightSeat == lastSeat ) } then { context.setState(
+ * Context.PRINT_RESULTS ); } }
+ *
*
* @return
* @throws IntrospectionException
* @throws InvalidRuleException
*/
private Rule getAreWeDone() throws IntrospectionException,
- InvalidRuleException {
+ InvalidRuleException {
final Rule rule = new Rule( "areWeDone" );
// -----------
@@ -797,30 +806,30 @@ private Rule getAreWeDone() throws IntrospectionException,
// LastSeat( lastSeat: seat )
// ---------------
Column lastSeatColumn = new Column( 1,
- lastSeatType,
- null );
+ lastSeatType,
+ null );
lastSeatColumn.addConstraint( getFieldBinding( lastSeatColumn,
- "seat",
- "lastSeat" ) );
+ "seat",
+ "lastSeat" ) );
rule.addPattern( lastSeatColumn );
final Declaration lastSeatDeclaration = rule.getDeclaration( "lastSeat" );
// -------------
- // Seating( rightSeat == lastSeat )
+ // Seating( rightSeat == lastSeat )
// -------------
Column seatingColumn = new Column( 2,
seatingType,
null );
-
+
seatingColumn.addConstraint( getBoundVariableConstraint( seatingColumn,
"rightSeat",
lastSeatDeclaration,
- integerEqualEvaluator ) );
-
+ integerEqualEvaluator ) );
+
rule.addPattern( seatingColumn );
-
+
// ------------
- // context.setName( Context.PRINT_RESULTS );
+ // context.setName( Context.PRINT_RESULTS );
// ------------
Consequence consequence = new Consequence() {
@@ -833,12 +842,13 @@ public void invoke(Activation activation) throws ConsequenceException {
Context context = (Context) tuple.get( contextDeclaration );
context.setState( Context.PRINT_RESULTS );
-
+
drools.modifyObject( tuple.getFactHandleForDeclaration( contextDeclaration ),
- context );
-
+ context );
+
System.out.println( "are we done yet" );
- } catch ( Exception e ) {
+ }
+ catch ( Exception e ) {
throw new ConsequenceException( e );
}
}
@@ -848,25 +858,19 @@ public void invoke(Activation activation) throws ConsequenceException {
rule.setConsequence( consequence );
return rule;
- }
-
+ }
+
/**
*
- * rule continue() {
- * Context context;
- * when {
- * context : Context( state == Context.CHECK_DONE )
- * } then {
- * context.setState( Context.ASSIGN_SEATS );
- * }
- * }
+ * rule continue() { Context context; when { context : Context( state ==
+ * Context.CHECK_DONE ) } then { context.setState( Context.ASSIGN_SEATS ); } }
*
* @return
* @throws IntrospectionException
* @throws InvalidRuleException
*/
private Rule getContinueProcessing() throws IntrospectionException,
- InvalidRuleException {
+ InvalidRuleException {
final Rule rule = new Rule( "continueProcessng" );
// -----------
@@ -883,9 +887,9 @@ private Rule getContinueProcessing() throws IntrospectionException,
rule.addPattern( contextColumn );
final Declaration contextDeclaration = rule.getDeclaration( "context" );
-
+
// ------------
- // context.setName( Context.ASSIGN_SEATS );
+ // context.setName( Context.ASSIGN_SEATS );
// ------------
Consequence consequence = new Consequence() {
@@ -898,12 +902,13 @@ public void invoke(Activation activation) throws ConsequenceException {
Context context = (Context) tuple.get( contextDeclaration );
context.setState( Context.ASSIGN_SEATS );
-
+
drools.modifyObject( tuple.getFactHandleForDeclaration( contextDeclaration ),
- context );
-
- System.out.println("continue processing");
- } catch ( Exception e ) {
+ context );
+
+ System.out.println( "continue processing" );
+ }
+ catch ( Exception e ) {
throw new ConsequenceException( e );
}
}
@@ -913,26 +918,21 @@ public void invoke(Activation activation) throws ConsequenceException {
rule.setConsequence( consequence );
return rule;
- }
+ }
/**
*
- * rule all_done() {
- * Context context;
- * when {
- * context : Context( state == Context.PRINT_RESULTS )
- * } then {
- *
- * }
- * }
- *
+ * rule all_done() { Context context; when { context : Context( state ==
+ * Context.PRINT_RESULTS ) } then {
+ * } }
+ *
*
* @return
* @throws IntrospectionException
* @throws InvalidRuleException
*/
private Rule getAllDone() throws IntrospectionException,
- InvalidRuleException {
+ InvalidRuleException {
final Rule rule = new Rule( "alldone" );
// -----------
@@ -948,7 +948,7 @@ private Rule getAllDone() throws IntrospectionException,
rule.addPattern( contextColumn );
final Declaration contextDeclaration = rule.getDeclaration( "context" );
-
+
// ------------
//
// ------------
@@ -956,8 +956,9 @@ private Rule getAllDone() throws IntrospectionException,
public void invoke(Activation activation) throws ConsequenceException {
try {
-
- } catch ( Exception e ) {
+ System.out.println( "all done" );
+ }
+ catch ( Exception e ) {
throw new ConsequenceException( e );
}
}
@@ -967,77 +968,66 @@ public void invoke(Activation activation) throws ConsequenceException {
rule.setConsequence( consequence );
return rule;
- }
+ }
/**
* Convert the facts from the <code>InputStream</code> to a list of
* objects.
*/
- private List getInputObjects(InputStream inputStream) throws IOException
- {
- List list = new ArrayList( );
+ private List getInputObjects(InputStream inputStream) throws IOException {
+ List list = new ArrayList();
BufferedReader br = new BufferedReader( new InputStreamReader( inputStream ) );
String line;
- while ( (line = br.readLine( )) != null )
- {
- if ( line.trim( ).length( ) == 0 || line.trim( ).startsWith( ";" ) )
- {
+ while ( (line = br.readLine()) != null ) {
+ if ( line.trim().length() == 0 || line.trim().startsWith( ";" ) ) {
continue;
}
StringTokenizer st = new StringTokenizer( line,
"() " );
- String type = st.nextToken( );
+ String type = st.nextToken();
- if ( "guest".equals( type ) )
- {
- if ( !"name".equals( st.nextToken( ) ) )
- {
+ if ( "guest".equals( type ) ) {
+ if ( !"name".equals( st.nextToken() ) ) {
throw new IOException( "expected 'name' in: " + line );
}
- String name = st.nextToken( );
- if ( !"sex".equals( st.nextToken( ) ) )
- {
+ String name = st.nextToken();
+ if ( !"sex".equals( st.nextToken() ) ) {
throw new IOException( "expected 'sex' in: " + line );
}
- String sex = st.nextToken( );
- if ( !"hobby".equals( st.nextToken( ) ) )
- {
+ String sex = st.nextToken();
+ if ( !"hobby".equals( st.nextToken() ) ) {
throw new IOException( "expected 'hobby' in: " + line );
}
- String hobby = st.nextToken( );
+ String hobby = st.nextToken();
Guest guest = new Guest( name,
- Sex.resolve(sex),
- Hobby.resolve(hobby));
+ Sex.resolve( sex ),
+ Hobby.resolve( hobby ) );
- list.add( guest );
+ list.add( guest );
}
- if ( "last_seat".equals( type ) )
- {
- if ( !"seat".equals( st.nextToken( ) ) )
- {
+ if ( "last_seat".equals( type ) ) {
+ if ( !"seat".equals( st.nextToken() ) ) {
throw new IOException( "expected 'seat' in: " + line );
}
- list.add( new LastSeat( new Integer( st.nextToken( ) ).intValue( ) ) );
+ list.add( new LastSeat( new Integer( st.nextToken() ).intValue() ) );
}
- if ( "context".equals( type ) )
- {
- if ( !"state".equals( st.nextToken( ) ) )
- {
+ if ( "context".equals( type ) ) {
+ if ( !"state".equals( st.nextToken() ) ) {
throw new IOException( "expected 'state' in: " + line );
}
- list.add( new Context( st.nextToken( ) ) );
+ list.add( new Context( st.nextToken() ) );
}
}
- inputStream.close( );
+ inputStream.close();
return list;
- }
-
+ }
+
private InputStream generateData() {
final String LINE_SEPARATOR = System.getProperty( "line.separator" );
|
5ae1c8a24284d1bbc96c8410c81600a5e1a7a9f5
|
spring-framework
|
Clean up spring-webmvc-portlet tests warnings--Clean up compiler warnings in the tests of spring-webmvc-portlet. This-commit adds type parameters to all the types (mostly `List` and `Map`).--After this commit the only warnings in spring-web left are the-subclasses of `MyCommandProvidingFormController`.-
|
p
|
https://github.com/spring-projects/spring-framework
|
diff --git a/spring-webmvc-portlet/src/test/java/org/springframework/context/BeanThatListens.java b/spring-webmvc-portlet/src/test/java/org/springframework/context/BeanThatListens.java
index ab40da4b8969..d6f4b6ab9121 100644
--- a/spring-webmvc-portlet/src/test/java/org/springframework/context/BeanThatListens.java
+++ b/spring-webmvc-portlet/src/test/java/org/springframework/context/BeanThatListens.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2012 the original author or authors.
+ * Copyright 2002-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -24,7 +24,7 @@
* @author Thomas Risberg
* @author Juergen Hoeller
*/
-public class BeanThatListens implements ApplicationListener {
+public class BeanThatListens implements ApplicationListener<ApplicationEvent> {
private BeanThatBroadcasts beanThatBroadcasts;
@@ -36,7 +36,7 @@ public BeanThatListens() {
public BeanThatListens(BeanThatBroadcasts beanThatBroadcasts) {
this.beanThatBroadcasts = beanThatBroadcasts;
- Map beans = beanThatBroadcasts.applicationContext.getBeansOfType(BeanThatListens.class);
+ Map<String, BeanThatListens> beans = beanThatBroadcasts.applicationContext.getBeansOfType(BeanThatListens.class);
if (!beans.isEmpty()) {
throw new IllegalStateException("Shouldn't have found any BeanThatListens instances");
}
diff --git a/spring-webmvc-portlet/src/test/java/org/springframework/context/TestListener.java b/spring-webmvc-portlet/src/test/java/org/springframework/context/TestListener.java
index 739222190fa9..7762131ab418 100644
--- a/spring-webmvc-portlet/src/test/java/org/springframework/context/TestListener.java
+++ b/spring-webmvc-portlet/src/test/java/org/springframework/context/TestListener.java
@@ -1,3 +1,19 @@
+/*
+ * Copyright 2002-2014 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package org.springframework.context;
import org.springframework.context.ApplicationEvent;
@@ -9,7 +25,7 @@
* @author Rod Johnson
* @since January 21, 2001
*/
-public class TestListener implements ApplicationListener {
+public class TestListener implements ApplicationListener<ApplicationEvent> {
private int eventCount;
diff --git a/spring-webmvc-portlet/src/test/java/org/springframework/mock/web/portlet/MockPortletRequest.java b/spring-webmvc-portlet/src/test/java/org/springframework/mock/web/portlet/MockPortletRequest.java
index 2cbefb79c1a2..08278d07d521 100644
--- a/spring-webmvc-portlet/src/test/java/org/springframework/mock/web/portlet/MockPortletRequest.java
+++ b/spring-webmvc-portlet/src/test/java/org/springframework/mock/web/portlet/MockPortletRequest.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2012 the original author or authors.
+ * Copyright 2002-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -271,8 +271,8 @@ public void addProperty(String key, String value) {
@Override
public String getProperty(String key) {
Assert.notNull(key, "Property key must not be null");
- List list = this.properties.get(key);
- return (list != null && list.size() > 0 ? (String) list.get(0) : null);
+ List<String> list = this.properties.get(key);
+ return (list != null && list.size() > 0 ? list.get(0) : null);
}
@Override
diff --git a/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/ComplexPortletApplicationContext.java b/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/ComplexPortletApplicationContext.java
index b1b1459ee75e..564a2d5722db 100644
--- a/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/ComplexPortletApplicationContext.java
+++ b/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/ComplexPortletApplicationContext.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2013 the original author or authors.
+ * Copyright 2002-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -39,6 +39,7 @@
import org.springframework.beans.BeansException;
import org.springframework.beans.MutablePropertyValues;
+import org.springframework.beans.factory.config.BeanReference;
import org.springframework.beans.factory.config.ConstructorArgumentValues;
import org.springframework.beans.factory.config.RuntimeBeanReference;
import org.springframework.beans.factory.support.ManagedList;
@@ -118,14 +119,14 @@ public void refresh() throws BeansException {
ParameterMappingInterceptor parameterMappingInterceptor = new ParameterMappingInterceptor();
parameterMappingInterceptor.setParameterName("interceptingParam");
- List interceptors = new ArrayList();
+ List<HandlerInterceptor> interceptors = new ArrayList<HandlerInterceptor>(4);
interceptors.add(parameterMappingInterceptor);
interceptors.add(userRoleInterceptor);
interceptors.add(new MyHandlerInterceptor1());
interceptors.add(new MyHandlerInterceptor2());
MutablePropertyValues pvs = new MutablePropertyValues();
- Map portletModeMap = new ManagedMap();
+ Map<String, BeanReference> portletModeMap = new ManagedMap<String, BeanReference>();
portletModeMap.put("view", new RuntimeBeanReference("viewController"));
portletModeMap.put("edit", new RuntimeBeanReference("editController"));
pvs.add("portletModeMap", portletModeMap);
@@ -133,7 +134,7 @@ public void refresh() throws BeansException {
registerSingleton("handlerMapping3", PortletModeHandlerMapping.class, pvs);
pvs = new MutablePropertyValues();
- Map parameterMap = new ManagedMap();
+ Map<String, BeanReference> parameterMap = new ManagedMap<String, BeanReference>();
parameterMap.put("test1", new RuntimeBeanReference("testController1"));
parameterMap.put("test2", new RuntimeBeanReference("testController2"));
parameterMap.put("requestLocaleChecker", new RuntimeBeanReference("requestLocaleCheckingController"));
@@ -148,10 +149,10 @@ public void refresh() throws BeansException {
registerSingleton("handlerMapping2", ParameterHandlerMapping.class, pvs);
pvs = new MutablePropertyValues();
- Map innerMap = new ManagedMap();
+ Map<String, Object> innerMap = new ManagedMap<String, Object>();
innerMap.put("help1", new RuntimeBeanReference("helpController1"));
innerMap.put("help2", new RuntimeBeanReference("helpController2"));
- Map outerMap = new ManagedMap();
+ Map<String, Object> outerMap = new ManagedMap<String, Object>();
outerMap.put("help", innerMap);
pvs.add("portletModeParameterMap", outerMap);
pvs.add("order", "1");
@@ -171,7 +172,7 @@ public void refresh() throws BeansException {
pvs.add("exceptionMappings",
"java.lang.Exception=failed-exception\n" +
"java.lang.RuntimeException=failed-runtime");
- List mappedHandlers = new ManagedList();
+ List<BeanReference> mappedHandlers = new ManagedList<BeanReference>();
mappedHandlers.add(new RuntimeBeanReference("exceptionThrowingHandler1"));
pvs.add("mappedHandlers", mappedHandlers);
pvs.add("defaultErrorView", "failed-default-0");
@@ -533,7 +534,7 @@ public void cleanupMultipart(MultipartActionRequest request) {
}
- public static class TestApplicationListener implements ApplicationListener {
+ public static class TestApplicationListener implements ApplicationListener<ApplicationEvent> {
public int counter = 0;
diff --git a/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/DispatcherPortletTests.java b/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/DispatcherPortletTests.java
index 16ee96b7fd89..d99733d26c75 100644
--- a/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/DispatcherPortletTests.java
+++ b/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/DispatcherPortletTests.java
@@ -1,18 +1,18 @@
/*
-* Copyright 2002-2013 the original author or authors.
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
+ * Copyright 2002-2014 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package org.springframework.web.portlet;
@@ -125,7 +125,7 @@ public void testPortletModeParameterMappingInvalidHelpRenderRequest() throws Exc
request.setPortletMode(PortletMode.HELP);
request.setParameter("action", "help3");
complexDispatcherPortlet.doDispatch(request, response);
- Map model = (Map) request.getAttribute(ViewRendererServlet.MODEL_ATTRIBUTE);
+ Map<?, ?> model = (Map<?, ?>) request.getAttribute(ViewRendererServlet.MODEL_ATTRIBUTE);
assertTrue(model.get("exception").getClass().equals(NoHandlerFoundException.class));
InternalResourceView view = (InternalResourceView) request.getAttribute(ViewRendererServlet.VIEW_ATTRIBUTE);
assertEquals("failed-unavailable", view.getBeanName());
@@ -164,7 +164,7 @@ public void testPortletModeMappingValidViewRenderRequest() throws Exception {
request.setParameter("action", "not mapped");
request.setParameter("myParam", "not mapped");
complexDispatcherPortlet.doDispatch(request, response);
- Map model = (Map) request.getAttribute(ViewRendererServlet.MODEL_ATTRIBUTE);
+ Map<?, ?> model = (Map<?, ?>) request.getAttribute(ViewRendererServlet.MODEL_ATTRIBUTE);
assertEquals("view was here", model.get("result"));
InternalResourceView view = (InternalResourceView) request.getAttribute(ViewRendererServlet.VIEW_ATTRIBUTE);
assertEquals("someViewName", view.getBeanName());
@@ -178,7 +178,7 @@ public void testPortletModeMappingViewRenderRequestWithUnauthorizedUserRole() th
request.setParameter("action", "not mapped");
request.setParameter("myParam", "not mapped");
complexDispatcherPortlet.doDispatch(request, response);
- Map model = (Map) request.getAttribute(ViewRendererServlet.MODEL_ATTRIBUTE);
+ Map<?, ?> model = (Map<?, ?>) request.getAttribute(ViewRendererServlet.MODEL_ATTRIBUTE);
Exception exception = (Exception) model.get("exception");
assertNotNull(exception);
assertTrue(exception.getClass().equals(PortletSecurityException.class));
@@ -222,7 +222,7 @@ public void testUnknownHandlerRenderRequest() throws Exception {
MockRenderResponse response = new MockRenderResponse();
request.setParameter("myParam", "unknown");
complexDispatcherPortlet.doDispatch(request, response);
- Map model = (Map) request.getAttribute(ViewRendererServlet.MODEL_ATTRIBUTE);
+ Map<?, ?> model = (Map<?, ?>) request.getAttribute(ViewRendererServlet.MODEL_ATTRIBUTE);
Exception exception = (Exception)model.get("exception");
assertTrue(exception.getClass().equals(PortletException.class));
assertTrue(exception.getMessage().indexOf("No adapter for handler") != -1);
@@ -255,7 +255,7 @@ public void testNoDetectAllHandlerMappingsWithParameterRenderRequest() throws Ex
MockRenderResponse response = new MockRenderResponse();
request.setParameter("myParam", "test1");
complexDispatcherPortlet.doDispatch(request, response);
- Map model = (Map) request.getAttribute(ViewRendererServlet.MODEL_ATTRIBUTE);
+ Map<?, ?> model = (Map<?, ?>) request.getAttribute(ViewRendererServlet.MODEL_ATTRIBUTE);
Exception exception = (Exception) model.get("exception");
assertTrue(exception.getClass().equals(NoHandlerFoundException.class));
InternalResourceView view = (InternalResourceView) request.getAttribute(ViewRendererServlet.VIEW_ATTRIBUTE);
@@ -333,7 +333,7 @@ public void testIncorrectLocaleInRequest() throws Exception {
request.setParameter("myParam", "requestLocaleChecker");
request.addPreferredLocale(Locale.ENGLISH);
complexDispatcherPortlet.doDispatch(request, response);
- Map model = (Map) request.getAttribute(ViewRendererServlet.MODEL_ATTRIBUTE);
+ Map<?, ?> model = (Map<?, ?>) request.getAttribute(ViewRendererServlet.MODEL_ATTRIBUTE);
Exception exception = (Exception) model.get("exception");
assertTrue(exception.getClass().equals(PortletException.class));
assertEquals("Incorrect Locale in RenderRequest", exception.getMessage());
@@ -356,7 +356,7 @@ public void testIncorrectLocaleInLocalContextHolder() throws Exception {
request.setParameter("myParam", "contextLocaleChecker");
request.addPreferredLocale(Locale.ENGLISH);
complexDispatcherPortlet.doDispatch(request, response);
- Map model = (Map) request.getAttribute(ViewRendererServlet.MODEL_ATTRIBUTE);
+ Map<?, ?> model = (Map<?, ?>) request.getAttribute(ViewRendererServlet.MODEL_ATTRIBUTE);
Exception exception = (Exception) model.get("exception");
assertTrue(exception.getClass().equals(PortletException.class));
assertEquals("Incorrect Locale in LocaleContextHolder", exception.getMessage());
@@ -401,7 +401,7 @@ public void testHandlerInterceptorNotClearingModelAndView() throws Exception {
request.addUserRole("role1");
request.addParameter("noView", "false");
complexDispatcherPortlet.doDispatch(request, response);
- Map model = (Map) request.getAttribute(ViewRendererServlet.MODEL_ATTRIBUTE);
+ Map<?, ?> model = (Map<?, ?>) request.getAttribute(ViewRendererServlet.MODEL_ATTRIBUTE);
assertEquals("view was here", model.get("result"));
InternalResourceView view = (InternalResourceView) request.getAttribute(ViewRendererServlet.VIEW_ATTRIBUTE);
assertEquals("someViewName", view.getBeanName());
@@ -414,7 +414,7 @@ public void testHandlerInterceptorClearingModelAndView() throws Exception {
request.addUserRole("role1");
request.addParameter("noView", "true");
complexDispatcherPortlet.doDispatch(request, response);
- Map model = (Map) request.getAttribute(ViewRendererServlet.MODEL_ATTRIBUTE);
+ Map<?, ?> model = (Map<?, ?>) request.getAttribute(ViewRendererServlet.MODEL_ATTRIBUTE);
assertNull(model);
InternalResourceView view = (InternalResourceView) request.getAttribute(ViewRendererServlet.VIEW_ATTRIBUTE);
assertNull(view);
diff --git a/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/bind/PortletRequestDataBinderTests.java b/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/bind/PortletRequestDataBinderTests.java
index 3bf246248935..7bb97cda6c06 100644
--- a/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/bind/PortletRequestDataBinderTests.java
+++ b/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/bind/PortletRequestDataBinderTests.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2013 the original author or authors.
+ * Copyright 2002-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -166,7 +166,7 @@ public void testBindingMap() {
public void testBindingSet() {
TestBean bean = new TestBean();
- Set set = new LinkedHashSet<>(2);
+ Set<TestBean> set = new LinkedHashSet<TestBean>(2);
set.add(new TestBean("test1"));
set.add(new TestBean("test2"));
bean.setSomeSet(set);
@@ -181,7 +181,7 @@ public void testBindingSet() {
assertNotNull(bean.getSomeSet());
assertEquals(2, bean.getSomeSet().size());
- Iterator iter = bean.getSomeSet().iterator();
+ Iterator<?> iter = bean.getSomeSet().iterator();
TestBean bean1 = (TestBean) iter.next();
assertEquals("test1", bean1.getName());
diff --git a/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/context/PortletWebRequestTests.java b/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/context/PortletWebRequestTests.java
index e52c722b2c34..da32fcfdc7af 100644
--- a/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/context/PortletWebRequestTests.java
+++ b/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/context/PortletWebRequestTests.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2012 the original author or authors.
+ * Copyright 2002-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -56,13 +56,13 @@ public void testParameters() {
assertEquals("value2", request.getParameterValues("param2")[0]);
assertEquals("value2a", request.getParameterValues("param2")[1]);
- Map paramMap = request.getParameterMap();
+ Map<String, String[]> paramMap = request.getParameterMap();
assertEquals(2, paramMap.size());
- assertEquals(1, ((String[]) paramMap.get("param1")).length);
- assertEquals("value1", ((String[]) paramMap.get("param1"))[0]);
- assertEquals(2, ((String[]) paramMap.get("param2")).length);
- assertEquals("value2", ((String[]) paramMap.get("param2"))[0]);
- assertEquals("value2a", ((String[]) paramMap.get("param2"))[1]);
+ assertEquals(1, paramMap.get("param1").length);
+ assertEquals("value1", paramMap.get("param1")[0]);
+ assertEquals(2, paramMap.get("param2").length);
+ assertEquals("value2", paramMap.get("param2")[0]);
+ assertEquals("value2a", paramMap.get("param2")[1]);
}
@Test
diff --git a/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/mvc/annotation/Portlet20AnnotationControllerTests.java b/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/mvc/annotation/Portlet20AnnotationControllerTests.java
index 600dc7e8fa60..755738871e03 100644
--- a/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/mvc/annotation/Portlet20AnnotationControllerTests.java
+++ b/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/mvc/annotation/Portlet20AnnotationControllerTests.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2013 the original author or authors.
+ * Copyright 2002-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -23,6 +23,7 @@
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
+
import javax.portlet.ActionRequest;
import javax.portlet.ActionResponse;
import javax.portlet.EventResponse;
@@ -173,7 +174,7 @@ public void adaptedHandleMethods4() throws Exception {
doTestAdaptedHandleMethods(MyAdaptedController4.class);
}
- private void doTestAdaptedHandleMethods(final Class controllerClass) throws Exception {
+ private void doTestAdaptedHandleMethods(final Class<?> controllerClass) throws Exception {
DispatcherPortlet portlet = new DispatcherPortlet() {
@Override
protected ApplicationContext createPortletApplicationContext(ApplicationContext parent) throws BeansException {
@@ -1233,7 +1234,7 @@ public void myDefaultResource(Writer writer) throws IOException {
private static class TestView {
- public void render(String viewName, Map model, PortletRequest request, MimeResponse response) throws Exception {
+ public void render(String viewName, Map<String, Object> model, PortletRequest request, MimeResponse response) throws Exception {
TestBean tb = (TestBean) model.get("testBean");
if (tb == null) {
tb = (TestBean) model.get("myCommand");
@@ -1248,9 +1249,9 @@ public void render(String viewName, Map model, PortletRequest request, MimeRespo
if (errors.hasFieldErrors("date")) {
throw new IllegalStateException();
}
- List<TestBean> testBeans = (List<TestBean>) model.get("testBeanList");
+ List<?> testBeans = (List<?>) model.get("testBeanList");
response.getWriter().write(viewName + "-" + tb.getName() + "-" + errors.getFieldError("age").getCode() +
- "-" + testBeans.get(0).getName() + "-" + model.get("myKey"));
+ "-" + ((TestBean) testBeans.get(0)).getName() + "-" + model.get("myKey"));
}
}
diff --git a/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/mvc/annotation/PortletAnnotationControllerTests.java b/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/mvc/annotation/PortletAnnotationControllerTests.java
index 7a58b4a1c45d..4ab35eae05d9 100644
--- a/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/mvc/annotation/PortletAnnotationControllerTests.java
+++ b/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/mvc/annotation/PortletAnnotationControllerTests.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2013 the original author or authors.
+ * Copyright 2002-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -114,7 +114,7 @@ public void testAdaptedHandleMethods3() throws Exception {
doTestAdaptedHandleMethods(MyAdaptedController3.class);
}
- public void doTestAdaptedHandleMethods(final Class controllerClass) throws Exception {
+ public void doTestAdaptedHandleMethods(final Class<?> controllerClass) throws Exception {
DispatcherPortlet portlet = new DispatcherPortlet() {
@Override
protected ApplicationContext createPortletApplicationContext(ApplicationContext parent) throws BeansException {
@@ -796,7 +796,7 @@ public void myHandle(RenderResponse response) throws IOException {
private static class TestView {
- public void render(String viewName, Map model, PortletRequest request, MimeResponse response) throws Exception {
+ public void render(String viewName, Map<String, Object> model, PortletRequest request, MimeResponse response) throws Exception {
TestBean tb = (TestBean) model.get("testBean");
if (tb == null) {
tb = (TestBean) model.get("myCommand");
@@ -811,9 +811,9 @@ public void render(String viewName, Map model, PortletRequest request, MimeRespo
if (errors.hasFieldErrors("date")) {
throw new IllegalStateException();
}
- List<TestBean> testBeans = (List<TestBean>) model.get("testBeanList");
+ List<?> testBeans = (List<?>) model.get("testBeanList");
response.getWriter().write(viewName + "-" + tb.getName() + "-" + errors.getFieldError("age").getCode() +
- "-" + testBeans.get(0).getName() + "-" + model.get("myKey"));
+ "-" + ((TestBean) testBeans.get(0)).getName() + "-" + model.get("myKey"));
}
}
@@ -830,7 +830,7 @@ public static class MyModelAndViewResolver implements ModelAndViewResolver {
@Override
public org.springframework.web.servlet.ModelAndView resolveModelAndView(Method handlerMethod,
- Class handlerType,
+ Class<?> handlerType,
Object returnValue,
ExtendedModelMap implicitModel,
NativeWebRequest webRequest) {
diff --git a/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/util/PortletUtilsTests.java b/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/util/PortletUtilsTests.java
index f7192f7accd0..ba0d45e9719e 100644
--- a/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/util/PortletUtilsTests.java
+++ b/spring-webmvc-portlet/src/test/java/org/springframework/web/portlet/util/PortletUtilsTests.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2013 the original author or authors.
+ * Copyright 2002-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -245,8 +245,7 @@ public void testClearAllRenderParameters() throws Exception {
public void testClearAllRenderParametersDoesNotPropagateExceptionIfRedirectAlreadySentAtTimeOfCall() throws Exception {
MockActionResponse response = new MockActionResponse() {
@Override
- @SuppressWarnings("unchecked")
- public void setRenderParameters(Map parameters) {
+ public void setRenderParameters(Map<String, String[]> parameters) {
throw new IllegalStateException();
}
};
@@ -302,7 +301,6 @@ public void testExposeRequestAttributesWithNullAttributesMap() throws Exception
PortletUtils.exposeRequestAttributes(new MockPortletRequest(), null);
}
- @SuppressWarnings("unchecked")
@Test
public void testExposeRequestAttributesSunnyDay() throws Exception {
MockPortletRequest request = new MockPortletRequest();
@@ -314,7 +312,6 @@ public void testExposeRequestAttributesSunnyDay() throws Exception {
assertEquals("Roy Fokker", request.getAttribute("mentor"));
}
- @SuppressWarnings("unchecked")
@Test
public void testExposeRequestAttributesWithEmptyAttributesMapIsAnIdempotentOperation() throws Exception {
MockPortletRequest request = new MockPortletRequest();
|
ff788e8f9da140e9a2cc08d4615fb878dbdb1c7d
|
camel
|
CAMEL-2636 Fixed the issue of IOException: Bad- file descriptor--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@934852 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/main/java/org/apache/camel/converter/stream/CachedOutputStream.java b/camel-core/src/main/java/org/apache/camel/converter/stream/CachedOutputStream.java
index b1765f0fe824a..804730f8efd35 100644
--- a/camel-core/src/main/java/org/apache/camel/converter/stream/CachedOutputStream.java
+++ b/camel-core/src/main/java/org/apache/camel/converter/stream/CachedOutputStream.java
@@ -68,37 +68,7 @@ public CachedOutputStream(Exchange exchange) {
if (dir != null) {
this.outputDir = exchange.getContext().getTypeConverter().convertTo(File.class, dir);
}
-
- // add on completion so we can cleanup after the exchange is done such as deleting temporary files
- exchange.addOnCompletion(new SynchronizationAdapter() {
- @Override
- public void onDone(Exchange exchange) {
- try {
- // close the stream and FileInputStreamCache
- close();
- for (FileInputStreamCache cache : fileInputStreamCaches) {
- cache.close();
- }
- // cleanup temporary file
- if (tempFile != null) {
- boolean deleted = tempFile.delete();
- if (!deleted) {
- LOG.warn("Cannot delete temporary cache file: " + tempFile);
- } else if (LOG.isTraceEnabled()) {
- LOG.trace("Deleted temporary cache file: " + tempFile);
- }
- tempFile = null;
- }
- } catch (Exception e) {
- LOG.warn("Error deleting temporary cache file: " + tempFile, e);
- }
- }
-
- @Override
- public String toString() {
- return "OnCompletion[CachedOutputStream]";
- }
- });
+
}
public void flush() throws IOException {
@@ -107,6 +77,20 @@ public void flush() throws IOException {
public void close() throws IOException {
currentStream.close();
+ try {
+ // cleanup temporary file
+ if (tempFile != null) {
+ boolean deleted = tempFile.delete();
+ if (!deleted) {
+ LOG.warn("Cannot delete temporary cache file: " + tempFile);
+ } else if (LOG.isTraceEnabled()) {
+ LOG.trace("Deleted temporary cache file: " + tempFile);
+ }
+ tempFile = null;
+ }
+ } catch (Exception e) {
+ LOG.warn("Error deleting temporary cache file: " + tempFile, e);
+ }
}
public boolean equals(Object obj) {
diff --git a/camel-core/src/main/java/org/apache/camel/converter/stream/FileInputStreamCache.java b/camel-core/src/main/java/org/apache/camel/converter/stream/FileInputStreamCache.java
index 8341ea2f04af5..ec002f2c087f7 100644
--- a/camel-core/src/main/java/org/apache/camel/converter/stream/FileInputStreamCache.java
+++ b/camel-core/src/main/java/org/apache/camel/converter/stream/FileInputStreamCache.java
@@ -41,19 +41,24 @@ public FileInputStreamCache(File file, CachedOutputStream cos) throws FileNotFou
@Override
public void close() {
try {
- getInputStream().close();
+ if (isSteamOpened()) {
+ getInputStream().close();
+ }
+ // when close the FileInputStreamCache we should also close the cachedOutputStream
if (cachedOutputStream != null) {
cachedOutputStream.close();
}
} catch (Exception e) {
throw new RuntimeCamelException(e);
- }
+ }
}
@Override
public void reset() {
try {
- getInputStream().close();
+ if (isSteamOpened()) {
+ getInputStream().close();
+ }
// reset by creating a new stream based on the file
stream = new FileInputStream(file);
} catch (Exception e) {
@@ -78,5 +83,13 @@ public int read() throws IOException {
protected InputStream getInputStream() {
return stream;
}
+
+ private boolean isSteamOpened() {
+ if (stream != null && stream instanceof FileInputStream) {
+ return ((FileInputStream) stream).getChannel().isOpen();
+ } else {
+ return stream != null;
+ }
+ }
}
diff --git a/camel-core/src/test/java/org/apache/camel/converter/stream/CachedOutputStreamTest.java b/camel-core/src/test/java/org/apache/camel/converter/stream/CachedOutputStreamTest.java
index ee84671e9c91c..6049c55c93602 100644
--- a/camel-core/src/test/java/org/apache/camel/converter/stream/CachedOutputStreamTest.java
+++ b/camel-core/src/test/java/org/apache/camel/converter/stream/CachedOutputStreamTest.java
@@ -78,8 +78,6 @@ public void testCacheStreamToFileAndCloseStream() throws IOException {
((InputStream)cache).close();
assertEquals("Cached a wrong file", temp, TEST_STRING);
- exchange.getUnitOfWork().done(exchange);
-
try {
cache.reset();
// The stream is closed, so the temp file is gone.
@@ -110,7 +108,6 @@ public void testCacheStreamToFileAndNotCloseStream() throws IOException {
temp = toString((InputStream)cache);
assertEquals("Cached a wrong file", temp, TEST_STRING);
- exchange.getUnitOfWork().done(exchange);
((InputStream)cache).close();
files = file.list();
@@ -131,8 +128,6 @@ public void testCacheStreamToMemory() throws IOException {
assertTrue("Should get the InputStreamCache", cache instanceof InputStreamCache);
String temp = IOConverter.toString((InputStream)cache, null);
assertEquals("Cached a wrong file", temp, TEST_STRING);
-
- exchange.getUnitOfWork().done(exchange);
}
public void testCacheStreamToMemoryAsDiskIsdisabled() throws IOException {
diff --git a/tests/camel-itest/src/test/java/org/apache/camel/itest/issues/JettyHttpFileCacheTest.java b/tests/camel-itest/src/test/java/org/apache/camel/itest/issues/JettyHttpFileCacheTest.java
new file mode 100644
index 0000000000000..cf45479375bb6
--- /dev/null
+++ b/tests/camel-itest/src/test/java/org/apache/camel/itest/issues/JettyHttpFileCacheTest.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.itest.issues;
+
+import java.io.File;
+
+import org.apache.camel.Exchange;
+import org.apache.camel.Processor;
+import org.apache.camel.builder.RouteBuilder;
+import org.apache.camel.component.mock.MockEndpoint;
+import org.apache.camel.converter.stream.CachedOutputStream;
+import org.apache.camel.impl.DefaultExchange;
+import org.apache.camel.impl.DefaultUnitOfWork;
+import org.apache.camel.spi.UnitOfWork;
+import org.apache.camel.test.junit4.CamelTestSupport;
+import org.junit.Before;
+import org.junit.Test;
+
+public class JettyHttpFileCacheTest extends CamelTestSupport {
+ private static final String TEST_STRING = "This is a test string and it has enough"
+ + " aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa ";
+
+ @Before
+ public void setUp() throws Exception {
+ super.setUp();
+
+ context.getProperties().put(CachedOutputStream.TEMP_DIR, "./target/cachedir");
+ context.getProperties().put(CachedOutputStream.THRESHOLD, "16");
+ deleteDirectory("./target/cachedir");
+ createDirectory("./target/cachedir");
+ }
+
+ @Test
+ public void testGetWithRelativePath() throws Exception {
+
+ String response = template.requestBody("http://localhost:8201/clipboard/download/file", " ", String.class);
+ assertEquals("should get the right response", TEST_STRING, response);
+
+ File file = new File("./target/cachedir");
+ String[] files = file.list();
+ assertTrue("There should not have any temp file", files.length == 0);
+
+ }
+
+ @Override
+ protected RouteBuilder createRouteBuilder() throws Exception {
+ return new RouteBuilder() {
+ @Override
+ public void configure() throws Exception {
+
+
+ from("jetty:http://localhost:8201/clipboard/download?chunked=true&matchOnUriPrefix=true")
+ .to("http://localhost:9101?bridgeEndpoint=true");
+
+ from("jetty:http://localhost:9101?chunked=true&matchOnUriPrefix=true")
+ .process(new Processor() {
+
+ public void process(Exchange exchange) throws Exception {
+ exchange.getOut().setBody(TEST_STRING);
+ }
+
+ });
+
+
+ }
+ };
+ }
+
+}
|
eb5df87c7ec4d7f62873dcf29108ddc2abcd13ca
|
ReactiveX-RxJava
|
avoiding some synchronization on combineLatest--
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/operators/OperationCombineLatest.java b/rxjava-core/src/main/java/rx/operators/OperationCombineLatest.java
index 382f8ba8aa..2d77c3d3ec 100644
--- a/rxjava-core/src/main/java/rx/operators/OperationCombineLatest.java
+++ b/rxjava-core/src/main/java/rx/operators/OperationCombineLatest.java
@@ -26,6 +26,7 @@
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
import org.junit.Test;
import org.mockito.InOrder;
@@ -125,17 +126,13 @@ private static class Aggregator<R> implements Func1<Observer<R>, Subscription> {
private final FuncN<R> combineLatestFunction;
private final AtomicBoolean running = new AtomicBoolean(true);
-
- // used as an internal lock for handling the latest values and the completed state of each observer
+
+ // Stores how many observers have already completed
+ private final AtomicInteger numCompleted = new AtomicInteger(0);
+
+ // Used as an internal lock for handling the latest values of each observer
private final Object lockObject = new Object();
- /**
- * Store when an observer completes.
- * <p>
- * Note that access to this set MUST BE SYNCHRONIZED via 'lockObject' above.
- * */
- private final Set<CombineObserver<R, ?>> completed = new HashSet<CombineObserver<R, ?>>();
-
/**
* The latest value from each observer
* <p>
@@ -175,17 +172,14 @@ <T> void addObserver(CombineObserver<R, T> w) {
* @param w The observer that has completed.
*/
<T> void complete(CombineObserver<R, T> w) {
- synchronized(lockObject) {
- // store that this CombineLatestObserver is completed
- completed.add(w);
- // if all CombineObservers are completed, we mark the whole thing as completed
- if (completed.size() == observers.size()) {
- if (running.get()) {
- // mark ourselves as done
- observer.onCompleted();
- // just to ensure we stop processing in case we receive more onNext/complete/error calls after this
- running.set(false);
- }
+ int completed = numCompleted.incrementAndGet();
+ // if all CombineObservers are completed, we mark the whole thing as completed
+ if (completed == observers.size()) {
+ if (running.get()) {
+ // mark ourselves as done
+ observer.onCompleted();
+ // just to ensure we stop processing in case we receive more onNext/complete/error calls after this
+ running.set(false);
}
}
}
@@ -228,14 +222,12 @@ <T> void next(CombineObserver<R, T> w, T arg) {
// remember that this observer now has a latest value set
hasLatestValue.add(w);
- // if all observers in the 'observers' list have a value, invoke the combineLatestFunction
- for (CombineObserver<R, ?> rw : observers) {
- if (!hasLatestValue.contains(rw)) {
- // we don't have a value yet for each observer to combine, so we don't have a combined value yet either
- return;
- }
+ if (hasLatestValue.size() < observers.size()) {
+ // we don't have a value yet for each observer to combine, so we don't have a combined value yet either
+ return;
}
- // if we get to here this means all the queues have data
+
+ // if we get to here this means all the observers have a latest value
int i = 0;
for (CombineObserver<R, ?> _w : observers) {
argsToCombineLatest[i++] = latestValue.get(_w);
|
59cedea0109946484c70601164689d499a8612b0
|
elasticsearch
|
Fix parsing of file based template loading--We support three different settings in templates--* "settings" : { "index" : { "number_of_shards" : 12 } }-* "settings" : { "index.number_of_shards" : 12 }-* "settings" : { "number_of_shards" : 12 }--The latter one was not supported by the fix in -4235--This commit fixes this issue and uses randomized testing to test any of the three cases above when running integration tests.--Closes -4411-
|
c
|
https://github.com/elastic/elasticsearch
|
diff --git a/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java b/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java
index 6a15044657a73..3ab16f53d2e49 100644
--- a/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java
+++ b/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java
@@ -303,7 +303,15 @@ public static IndexTemplateMetaData fromXContent(XContentParser parser) throws I
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if ("settings".equals(currentFieldName)) {
- builder.settings(ImmutableSettings.settingsBuilder().put(SettingsLoader.Helper.loadNestedFromMap(parser.mapOrdered())));
+ ImmutableSettings.Builder templateSettingsBuilder = ImmutableSettings.settingsBuilder();
+ for (Map.Entry<String, String> entry : SettingsLoader.Helper.loadNestedFromMap(parser.mapOrdered()).entrySet()) {
+ if (!entry.getKey().startsWith("index.")) {
+ templateSettingsBuilder.put("index." + entry.getKey(), entry.getValue());
+ } else {
+ templateSettingsBuilder.put(entry.getKey(), entry.getValue());
+ }
+ }
+ builder.settings(templateSettingsBuilder.build());
} else if ("mappings".equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
diff --git a/src/test/java/org/elasticsearch/indices/template/IndexTemplateFileLoadingTests.java b/src/test/java/org/elasticsearch/indices/template/IndexTemplateFileLoadingTests.java
index 606d78d166665..de451120370cf 100644
--- a/src/test/java/org/elasticsearch/indices/template/IndexTemplateFileLoadingTests.java
+++ b/src/test/java/org/elasticsearch/indices/template/IndexTemplateFileLoadingTests.java
@@ -38,7 +38,7 @@
/**
*
*/
-@ClusterScope(scope= Scope.SUITE, numNodes=1)
+@ClusterScope(scope=Scope.SUITE, numNodes=1)
public class IndexTemplateFileLoadingTests extends ElasticsearchIntegrationTest {
@Rule
@@ -57,7 +57,8 @@ protected Settings nodeSettings(int nodeOrdinal) {
templatesDir.mkdir();
File dst = new File(templatesDir, "template.json");
- String template = Streams.copyToStringFromClasspath("/org/elasticsearch/indices/template/template.json");
+ // random template, one uses the 'setting.index.number_of_shards', the other 'settings.number_of_shards'
+ String template = Streams.copyToStringFromClasspath("/org/elasticsearch/indices/template/template" + randomInt(1) + ".json");
Files.write(template, dst, Charsets.UTF_8);
} catch (Exception e) {
throw new RuntimeException(e);
diff --git a/src/test/java/org/elasticsearch/indices/template/template.json b/src/test/java/org/elasticsearch/indices/template/template0.json
similarity index 100%
rename from src/test/java/org/elasticsearch/indices/template/template.json
rename to src/test/java/org/elasticsearch/indices/template/template0.json
diff --git a/src/test/java/org/elasticsearch/indices/template/template1.json b/src/test/java/org/elasticsearch/indices/template/template1.json
new file mode 100644
index 0000000000000..f91866865e7f5
--- /dev/null
+++ b/src/test/java/org/elasticsearch/indices/template/template1.json
@@ -0,0 +1,7 @@
+{
+ "template" : "foo*",
+ "settings" : {
+ "number_of_shards": 10,
+ "number_of_replicas": 0
+ }
+}
\ No newline at end of file
diff --git a/src/test/java/org/elasticsearch/indices/template/template2.json b/src/test/java/org/elasticsearch/indices/template/template2.json
new file mode 100644
index 0000000000000..c48169f15a519
--- /dev/null
+++ b/src/test/java/org/elasticsearch/indices/template/template2.json
@@ -0,0 +1,9 @@
+{
+ "template" : "foo*",
+ "settings" : {
+ "index" : {
+ "number_of_shards": 10,
+ "number_of_replicas": 0
+ }
+ }
+}
\ No newline at end of file
|
6bacf96acc651018a8cb4a0036e26dde975b7c65
|
ReactiveX-RxJava
|
Cleanup Javadocs--
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/Observable.java b/rxjava-core/src/main/java/rx/Observable.java
index dad2ca7286..39058cdfbc 100644
--- a/rxjava-core/src/main/java/rx/Observable.java
+++ b/rxjava-core/src/main/java/rx/Observable.java
@@ -127,7 +127,7 @@ public static interface OnSubscribeFunc<T> extends Function {
/**
* Observable with Function to execute when subscribed to.
* <p>
- * NOTE: Use {@link #create(Func1)} to create an Observable instead of this method unless you
+ * NOTE: Use {@link #create(OnSubscribeFunc)} to create an Observable instead of this constructor unless you
* specifically have a need for inheritance.
*
* @param onSubscribe
diff --git a/rxjava-core/src/main/java/rx/observables/BlockingObservable.java b/rxjava-core/src/main/java/rx/observables/BlockingObservable.java
index 419a7e0c9d..aaf5b2adf8 100644
--- a/rxjava-core/src/main/java/rx/observables/BlockingObservable.java
+++ b/rxjava-core/src/main/java/rx/observables/BlockingObservable.java
@@ -109,7 +109,7 @@ private Subscription protectivelyWrapAndSubscribe(Observer<? super T> observer)
* <p>
* NOTE: This will block even if the Observable is asynchronous.
* <p>
- * This is similar to {@link #subscribe(Observer)}, but it blocks. Because it blocks it does
+ * This is similar to {@link Observable#subscribe(Observer)}, but it blocks. Because it blocks it does
* not need the {@link Observer#onCompleted()} or {@link Observer#onError(Throwable)} methods.
* <p>
* <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/B.forEach.png">
|
9eb049791037ecb65da18b562ff57804fbd6e942
|
ReactiveX-RxJava
|
Performance refactoring: OperatorSubscribeFunction--- migrated Func1 to OperatorSubscribeFunction for internal operator implementations-- do not wrap with AtomicObserver when it's a trusted operator--https://github.com/Netflix/RxJava/issues/104-
|
c
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/observables/Observable.java b/rxjava-core/src/main/java/rx/observables/Observable.java
index adcef9f5a9..79ad9fa84d 100644
--- a/rxjava-core/src/main/java/rx/observables/Observable.java
+++ b/rxjava-core/src/main/java/rx/observables/Observable.java
@@ -46,6 +46,7 @@
import rx.observables.operations.OperationToObservableList;
import rx.observables.operations.OperationToObservableSortedList;
import rx.observables.operations.OperationZip;
+import rx.observables.operations.OperatorSubscribeFunction;
import rx.util.AtomicObservableSubscription;
import rx.util.AtomicObserver;
import rx.util.functions.Action0;
@@ -74,7 +75,7 @@ public class Observable<T> {
private final Func1<Observer<T>, Subscription> onSubscribe;
- public Observable(Func1<Observer<T>, Subscription> onSubscribe) {
+ protected Observable(Func1<Observer<T>, Subscription> onSubscribe) {
this.onSubscribe = onSubscribe;
}
@@ -104,16 +105,23 @@ public Observable(Func1<Observer<T>, Subscription> onSubscribe) {
* to stop receiving notifications before the provider has finished sending them
*/
public Subscription subscribe(Observer<T> observer) {
- /*
- * Wrap the observer and subscription in Atomic* wrappers to:
- *
- * - ensure correct behavior of onNext, onCompleted and onError.
- * - allow the Observer to have access to the subscription in asynchronous execution for checking if unsubscribed occurred without onComplete/onError.
- * - handle both synchronous and asynchronous subscribe() execution flows
- */
- final AtomicObservableSubscription subscription = new AtomicObservableSubscription();
- final Observer<T> atomicObserver = new AtomicObserver<T>(observer, subscription);
- return subscription.wrap(onSubscribe.call(atomicObserver));
+ if (onSubscribe instanceof OperatorSubscribeFunction) {
+ /*
+ * This means it's a 'trusted' operator so we won't wrap it.
+ */
+ return onSubscribe.call(observer);
+ } else {
+ /*
+ * Wrap the observer and subscription in Atomic* wrappers to:
+ *
+ * - ensure correct behavior of onNext, onCompleted and onError.
+ * - allow the Observer to have access to the subscription in asynchronous execution for checking if unsubscribed occurred without onComplete/onError.
+ * - handle both synchronous and asynchronous subscribe() execution flows
+ */
+ final AtomicObservableSubscription subscription = new AtomicObservableSubscription();
+ final Observer<T> atomicObserver = new AtomicObserver<T>(observer, subscription);
+ return subscription.wrap(onSubscribe.call(atomicObserver));
+ }
};
@SuppressWarnings({ "rawtypes", "unchecked" })
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationCombineLatest.java b/rxjava-core/src/main/java/rx/observables/operations/OperationCombineLatest.java
index c1444e7e9b..d689a07bc0 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationCombineLatest.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationCombineLatest.java
@@ -103,7 +103,7 @@ public void onNext(Object args) {
*
* @param <R>
*/
- private static class Aggregator<R> implements Func1<Observer<R>, Subscription> {
+ private static class Aggregator<R> implements OperatorSubscribeFunction<R> {
private final FuncN<R> combineLatestFunction;
private Observer<R> Observer;
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationFilter.java b/rxjava-core/src/main/java/rx/observables/operations/OperationFilter.java
index b26ef84b24..119635cc2d 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationFilter.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationFilter.java
@@ -24,6 +24,7 @@
import rx.observables.Observable;
import rx.observables.Observer;
import rx.observables.Subscription;
+import rx.util.AtomicObservableSubscription;
import rx.util.functions.Func1;
public final class OperationFilter<T> {
@@ -32,10 +33,11 @@ public static <T> Func1<Observer<T>, Subscription> filter(Observable<T> that, Fu
return new Filter<T>(that, predicate);
}
- private static class Filter<T> implements Func1<Observer<T>, Subscription> {
+ private static class Filter<T> implements OperatorSubscribeFunction<T> {
private final Observable<T> that;
private final Func1<T, Boolean> predicate;
+ private final AtomicObservableSubscription subscription = new AtomicObservableSubscription();
public Filter(Observable<T> that, Func1<T, Boolean> predicate) {
this.that = that;
@@ -43,7 +45,7 @@ public Filter(Observable<T> that, Func1<T, Boolean> predicate) {
}
public Subscription call(final Observer<T> observer) {
- return that.subscribe(new Observer<T>() {
+ return subscription.wrap(that.subscribe(new Observer<T>() {
public void onNext(T value) {
try {
if ((boolean) predicate.call(value)) {
@@ -51,7 +53,8 @@ public void onNext(T value) {
}
} catch (Exception ex) {
observer.onError(ex);
- // TODO is there a way to tell 'that' to unsubscribe if we have an error?
+ // this will work if the sequence is asynchronous, it will have no effect on a synchronous observable
+ subscription.unsubscribe();
}
}
@@ -62,7 +65,7 @@ public void onError(Exception ex) {
public void onCompleted() {
observer.onCompleted();
}
- });
+ }));
}
}
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationLast.java b/rxjava-core/src/main/java/rx/observables/operations/OperationLast.java
index 4bdd125f1d..465ee7b391 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationLast.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationLast.java
@@ -40,7 +40,7 @@ public static <T> Func1<Observer<T>, Subscription> last(Observable<T> observable
return new Last<T>(observable);
}
- private static class Last<T> implements Func1<Observer<T>, Subscription> {
+ private static class Last<T> implements OperatorSubscribeFunction<T> {
private final AtomicReference<T> lastValue = new AtomicReference<T>();
private final Observable<T> that;
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationMap.java b/rxjava-core/src/main/java/rx/observables/operations/OperationMap.java
index c1a689814c..0f19468b9a 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationMap.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationMap.java
@@ -79,7 +79,7 @@ public static <T, R> Func1<Observer<R>, Subscription> mapMany(Observable<T> sequ
* @param <R>
* the type of the output sequence.
*/
- private static class MapObservable<T, R> implements Func1<Observer<R>, Subscription> {
+ private static class MapObservable<T, R> implements OperatorSubscribeFunction<R> {
public MapObservable(Observable<T> sequence, Func1<T, R> func) {
this.sequence = sequence;
this.func = func;
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationMaterialize.java b/rxjava-core/src/main/java/rx/observables/operations/OperationMaterialize.java
index 8c81d46ba5..f941c31edc 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationMaterialize.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationMaterialize.java
@@ -49,7 +49,7 @@ public static <T> Func1<Observer<Notification<T>>, Subscription> materialize(fin
return new MaterializeObservable<T>(sequence);
}
- private static class MaterializeObservable<T> implements Func1<Observer<Notification<T>>, Subscription> {
+ private static class MaterializeObservable<T> implements OperatorSubscribeFunction<Notification<T>> {
private final Observable<T> sequence;
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationMerge.java b/rxjava-core/src/main/java/rx/observables/operations/OperationMerge.java
index 21d3791950..10f701abb2 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationMerge.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationMerge.java
@@ -46,7 +46,7 @@ public final class OperationMerge {
*/
public static <T> Func1<Observer<T>, Subscription> merge(final Observable<Observable<T>> o) {
// wrap in a Func so that if a chain is built up, then asynchronously subscribed to twice we will have 2 instances of Take<T> rather than 1 handing both, which is not thread-safe.
- return new Func1<Observer<T>, Subscription>() {
+ return new OperatorSubscribeFunction<T>() {
@Override
public Subscription call(Observer<T> observer) {
@@ -56,7 +56,7 @@ public Subscription call(Observer<T> observer) {
}
public static <T> Func1<Observer<T>, Subscription> merge(final Observable<T>... sequences) {
- return merge(Observable.create(new Func1<Observer<Observable<T>>, Subscription>() {
+ return merge(Observable.create(new OperatorSubscribeFunction<Observable<T>>() {
private volatile boolean unsubscribed = false;
@Override
@@ -85,7 +85,7 @@ public void unsubscribe() {
}
public static <T> Func1<Observer<T>, Subscription> merge(final List<Observable<T>> sequences) {
- return merge(Observable.create(new Func1<Observer<Observable<T>>, Subscription>() {
+ return merge(Observable.create(new OperatorSubscribeFunction<Observable<T>>() {
private volatile boolean unsubscribed = false;
@@ -126,7 +126,7 @@ public void unsubscribe() {
*
* @param <T>
*/
- private static final class MergeObservable<T> implements Func1<Observer<T>, Subscription> {
+ private static final class MergeObservable<T> implements OperatorSubscribeFunction<T> {
private final Observable<Observable<T>> sequences;
private final MergeSubscription ourSubscription = new MergeSubscription();
private AtomicBoolean stopped = new AtomicBoolean(false);
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationMergeDelayError.java b/rxjava-core/src/main/java/rx/observables/operations/OperationMergeDelayError.java
index cc7d15c80a..11d0ddd662 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationMergeDelayError.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationMergeDelayError.java
@@ -58,7 +58,7 @@ public final class OperationMergeDelayError {
*/
public static <T> Func1<Observer<T>, Subscription> mergeDelayError(final Observable<Observable<T>> sequences) {
// wrap in a Func so that if a chain is built up, then asynchronously subscribed to twice we will have 2 instances of Take<T> rather than 1 handing both, which is not thread-safe.
- return new Func1<Observer<T>, Subscription>() {
+ return new OperatorSubscribeFunction<T>() {
@Override
public Subscription call(Observer<T> observer) {
@@ -68,7 +68,7 @@ public Subscription call(Observer<T> observer) {
}
public static <T> Func1<Observer<T>, Subscription> mergeDelayError(final Observable<T>... sequences) {
- return mergeDelayError(Observable.create(new Func1<Observer<Observable<T>>, Subscription>() {
+ return mergeDelayError(Observable.create(new OperatorSubscribeFunction<Observable<T>>() {
private volatile boolean unsubscribed = false;
@Override
@@ -97,7 +97,7 @@ public void unsubscribe() {
}
public static <T> Func1<Observer<T>, Subscription> mergeDelayError(final List<Observable<T>> sequences) {
- return mergeDelayError(Observable.create(new Func1<Observer<Observable<T>>, Subscription>() {
+ return mergeDelayError(Observable.create(new OperatorSubscribeFunction<Observable<T>>() {
private volatile boolean unsubscribed = false;
@@ -138,7 +138,7 @@ public void unsubscribe() {
*
* @param <T>
*/
- private static final class MergeDelayErrorObservable<T> implements Func1<Observer<T>, Subscription> {
+ private static final class MergeDelayErrorObservable<T> implements OperatorSubscribeFunction<T> {
private final Observable<Observable<T>> sequences;
private final MergeSubscription ourSubscription = new MergeSubscription();
private AtomicBoolean stopped = new AtomicBoolean(false);
@@ -848,7 +848,6 @@ private static class CaptureObserver implements Observer<String> {
@Override
public void onCompleted() {
- // TODO Auto-generated method stub
}
@@ -859,7 +858,6 @@ public void onError(Exception e) {
@Override
public void onNext(String args) {
- // TODO Auto-generated method stub
}
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationOnErrorResumeNextViaFunction.java b/rxjava-core/src/main/java/rx/observables/operations/OperationOnErrorResumeNextViaFunction.java
index 06d6bbdf05..d041b9b0b7 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationOnErrorResumeNextViaFunction.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationOnErrorResumeNextViaFunction.java
@@ -38,7 +38,7 @@ public static <T> Func1<Observer<T>, Subscription> onErrorResumeNextViaFunction(
return new OnErrorResumeNextViaFunction<T>(originalSequence, resumeFunction);
}
- private static class OnErrorResumeNextViaFunction<T> implements Func1<Observer<T>, Subscription> {
+ private static class OnErrorResumeNextViaFunction<T> implements OperatorSubscribeFunction<T> {
private final Func1<Exception, Observable<T>> resumeFunction;
private final Observable<T> originalSequence;
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationOnErrorResumeNextViaObservable.java b/rxjava-core/src/main/java/rx/observables/operations/OperationOnErrorResumeNextViaObservable.java
index c7da6bdee2..33fe9ee227 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationOnErrorResumeNextViaObservable.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationOnErrorResumeNextViaObservable.java
@@ -36,7 +36,7 @@ public static <T> Func1<Observer<T>, Subscription> onErrorResumeNextViaObservabl
return new OnErrorResumeNextViaObservable<T>(originalSequence, resumeSequence);
}
- private static class OnErrorResumeNextViaObservable<T> implements Func1<Observer<T>, Subscription> {
+ private static class OnErrorResumeNextViaObservable<T> implements OperatorSubscribeFunction<T> {
private final Observable<T> resumeSequence;
private final Observable<T> originalSequence;
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationOnErrorReturn.java b/rxjava-core/src/main/java/rx/observables/operations/OperationOnErrorReturn.java
index 7f76433c10..8b8a988c3b 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationOnErrorReturn.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationOnErrorReturn.java
@@ -41,7 +41,7 @@ public static <T> Func1<Observer<T>, Subscription> onErrorReturn(Observable<T> o
return new OnErrorReturn<T>(originalSequence, resumeFunction);
}
- private static class OnErrorReturn<T> implements Func1<Observer<T>, Subscription> {
+ private static class OnErrorReturn<T> implements OperatorSubscribeFunction<T> {
private final Func1<Exception, T> resumeFunction;
private final Observable<T> originalSequence;
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationScan.java b/rxjava-core/src/main/java/rx/observables/operations/OperationScan.java
index e7c09b0f83..7752bb2549 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationScan.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationScan.java
@@ -25,6 +25,7 @@
import rx.observables.Observable;
import rx.observables.Observer;
import rx.observables.Subscription;
+import rx.util.AtomicObservableSubscription;
import rx.util.functions.Func1;
import rx.util.functions.Func2;
@@ -61,10 +62,11 @@ public static <T> Func1<Observer<T>, Subscription> scan(Observable<T> sequence,
return new Accumulator<T>(sequence, null, accumulator);
}
- private static class Accumulator<T> implements Func1<Observer<T>, Subscription> {
+ private static class Accumulator<T> implements OperatorSubscribeFunction<T> {
private final Observable<T> sequence;
private final T initialValue;
private Func2<T, T, T> accumlatorFunction;
+ private final AtomicObservableSubscription subscription = new AtomicObservableSubscription();
private Accumulator(Observable<T> sequence, T initialValue, Func2<T, T, T> accumulator) {
this.sequence = sequence;
@@ -74,7 +76,7 @@ private Accumulator(Observable<T> sequence, T initialValue, Func2<T, T, T> accum
public Subscription call(final Observer<T> observer) {
- return sequence.subscribe(new Observer<T>() {
+ return subscription.wrap(sequence.subscribe(new Observer<T>() {
private T acc = initialValue;
private boolean hasSentInitialValue = false;
@@ -108,7 +110,8 @@ public synchronized void onNext(T value) {
observer.onNext(acc);
} catch (Exception ex) {
observer.onError(ex);
- // TODO is there a correct way to unsubscribe from the sequence?
+ // this will work if the sequence is asynchronous, it will have no effect on a synchronous observable
+ subscription.unsubscribe();
}
}
@@ -124,7 +127,7 @@ public synchronized void onCompleted() {
}
observer.onCompleted();
}
- });
+ }));
}
}
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationSkip.java b/rxjava-core/src/main/java/rx/observables/operations/OperationSkip.java
index 35c470af1e..15d4e76cf0 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationSkip.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationSkip.java
@@ -62,7 +62,7 @@ public Subscription call(Observer<T> observer) {
*
* @param <T>
*/
- private static class Skip<T> implements Func1<Observer<T>, Subscription> {
+ private static class Skip<T> implements OperatorSubscribeFunction<T> {
private final int num;
private final Observable<T> items;
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationSynchronize.java b/rxjava-core/src/main/java/rx/observables/operations/OperationSynchronize.java
index 6258c7703f..b6d3ad36c3 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationSynchronize.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationSynchronize.java
@@ -58,7 +58,7 @@ public static <T> Func1<Observer<T>, Subscription> synchronize(Observable<T> obs
return new Synchronize<T>(observable);
}
- private static class Synchronize<T> implements Func1<Observer<T>, Subscription> {
+ private static class Synchronize<T> implements OperatorSubscribeFunction<T> {
public Synchronize(Observable<T> innerObservable) {
this.innerObservable = innerObservable;
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationTake.java b/rxjava-core/src/main/java/rx/observables/operations/OperationTake.java
index 676887d8fb..7d6bed7b71 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationTake.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationTake.java
@@ -26,6 +26,7 @@
import rx.observables.Observable;
import rx.observables.Observer;
import rx.observables.Subscription;
+import rx.util.AtomicObservableSubscription;
import rx.util.functions.Func1;
/**
@@ -44,7 +45,7 @@ public final class OperationTake {
*/
public static <T> Func1<Observer<T>, Subscription> take(final Observable<T> items, final int num) {
// wrap in a Watchbable so that if a chain is built up, then asynchronously subscribed to twice we will have 2 instances of Take<T> rather than 1 handing both, which is not thread-safe.
- return new Func1<Observer<T>, Subscription>() {
+ return new OperatorSubscribeFunction<T>() {
@Override
public Subscription call(Observer<T> observer) {
@@ -65,9 +66,10 @@ public Subscription call(Observer<T> observer) {
*
* @param <T>
*/
- private static class Take<T> implements Func1<Observer<T>, Subscription> {
+ private static class Take<T> implements OperatorSubscribeFunction<T> {
private final int num;
private final Observable<T> items;
+ private final AtomicObservableSubscription subscription = new AtomicObservableSubscription();
Take(final Observable<T> items, final int num) {
this.num = num;
@@ -75,7 +77,7 @@ private static class Take<T> implements Func1<Observer<T>, Subscription> {
}
public Subscription call(Observer<T> observer) {
- return items.subscribe(new ItemObserver(observer));
+ return subscription.wrap(items.subscribe(new ItemObserver(observer)));
}
/**
@@ -105,8 +107,8 @@ public void onNext(T args) {
if (counter.getAndIncrement() < num) {
observer.onNext(args);
} else {
- observer.onCompleted();
- // TODO do we need to unsubscribe here?
+ // this will work if the sequence is asynchronous, it will have no effect on a synchronous observable
+ subscription.unsubscribe();
}
}
@@ -168,8 +170,7 @@ public void testUnsubscribeAfterTake() {
verify(aObserver, times(1)).onNext("one");
verify(aObserver, never()).onNext("two");
verify(aObserver, never()).onNext("three");
- // TODO commented this out for now as it's broken and I'm questioning whether it needs to be
- // verify(s, times(1)).unsubscribe();
+ verify(s, times(1)).unsubscribe();
}
private static class TestObservable extends Observable<String> {
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationToObservableIterable.java b/rxjava-core/src/main/java/rx/observables/operations/OperationToObservableIterable.java
index 0c58e0411c..802a25bac3 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationToObservableIterable.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationToObservableIterable.java
@@ -40,7 +40,7 @@ public static <T> Func1<Observer<T>, Subscription> toObservableIterable(Iterable
return new ToObservableIterable<T>(list);
}
- private static class ToObservableIterable<T> implements Func1<Observer<T>, Subscription> {
+ private static class ToObservableIterable<T> implements OperatorSubscribeFunction<T> {
public ToObservableIterable(Iterable<T> list) {
this.iterable = list;
}
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationToObservableList.java b/rxjava-core/src/main/java/rx/observables/operations/OperationToObservableList.java
index 4b4ef30e44..3c5673b860 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationToObservableList.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationToObservableList.java
@@ -37,7 +37,7 @@ public static <T> Func1<Observer<List<T>>, Subscription> toObservableList(Observ
return new ToObservableList<T>(that);
}
- private static class ToObservableList<T> implements Func1<Observer<List<T>>, Subscription> {
+ private static class ToObservableList<T> implements OperatorSubscribeFunction<List<T>> {
private final Observable<T> that;
final ConcurrentLinkedQueue<T> list = new ConcurrentLinkedQueue<T>();
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationToObservableSortedList.java b/rxjava-core/src/main/java/rx/observables/operations/OperationToObservableSortedList.java
index 7f5c99112d..de17b17345 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationToObservableSortedList.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationToObservableSortedList.java
@@ -64,7 +64,7 @@ public static <T> Func1<Observer<List<T>>, Subscription> toSortedList(Observable
return new ToObservableSortedList<T>(sequence, sortFunction);
}
- private static class ToObservableSortedList<T> implements Func1<Observer<List<T>>, Subscription> {
+ private static class ToObservableSortedList<T> implements OperatorSubscribeFunction<List<T>> {
private final Observable<T> that;
private final ConcurrentLinkedQueue<T> list = new ConcurrentLinkedQueue<T>();
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationZip.java b/rxjava-core/src/main/java/rx/observables/operations/OperationZip.java
index 0b4fa26fba..43e816afe3 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationZip.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationZip.java
@@ -112,7 +112,7 @@ public void onNext(T args) {
* @param <R>
*/
@ThreadSafe
- private static class Aggregator<R> implements Func1<Observer<R>, Subscription> {
+ private static class Aggregator<R> implements OperatorSubscribeFunction<R> {
private volatile AtomicObserverSingleThreaded<R> observer;
private final FuncN<R> zipFunction;
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperatorSubscribeFunction.java b/rxjava-core/src/main/java/rx/observables/operations/OperatorSubscribeFunction.java
new file mode 100644
index 0000000000..c755dff760
--- /dev/null
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperatorSubscribeFunction.java
@@ -0,0 +1,25 @@
+package rx.observables.operations;
+
+import rx.observables.Observable;
+import rx.observables.Observer;
+import rx.observables.Subscription;
+import rx.util.AtomicObserver;
+import rx.util.functions.Func1;
+
+/**
+ * A "marker" interface used for internal operators implementing the "subscribe" function and turned into Observables using {@link Observable#create(Func1)}.
+ * <p>
+ * This marker is used by it to treat these implementations as "trusted".
+ * <p>
+ * NOTE: If you use this interface you are declaring that your implementation:
+ * <ul>
+ * <li>is thread-safe</li>
+ * <li>doesn't need additional wrapping by {@link AtomicObserver}</li>
+ * <li>obeys the contract of onNext, onError, onComplete</li>
+ * </ul>
+ *
+ * @param <T>
+ */
+public interface OperatorSubscribeFunction<T> extends Func1<Observer<T>, Subscription> {
+
+}
diff --git a/rxjava-core/src/main/java/rx/util/functions/Functions.java b/rxjava-core/src/main/java/rx/util/functions/Functions.java
index 518283c87a..6b9237cdf1 100644
--- a/rxjava-core/src/main/java/rx/util/functions/Functions.java
+++ b/rxjava-core/src/main/java/rx/util/functions/Functions.java
@@ -91,12 +91,6 @@ public static FuncN from(final Object function) {
} else {
/* not an Rx Function so try language adaptors */
- /*
- * TODO the following code needs to be evaluated for performance
- *
- * The c.isInstance and keySet() functions may be areas of concern with as often as this will be executed
- */
-
// check for language adaptor
for (final Class c : languageAdaptors.keySet()) {
if (c.isInstance(function)) {
|
c907ce325e89b25d0ff29288f018adda35bbdeed
|
elasticsearch
|
[Test] make recovery slow down in- rerouteRecoveryTest aware of index size--
|
p
|
https://github.com/elastic/elasticsearch
|
diff --git a/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryTests.java b/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryTests.java
index c18e20fcd7598..3084a6a58d6b1 100644
--- a/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryTests.java
+++ b/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryTests.java
@@ -28,6 +28,7 @@
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand;
import org.elasticsearch.common.settings.ImmutableSettings;
+import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.recovery.RecoveryState.Stage;
import org.elasticsearch.indices.recovery.RecoveryState.Type;
@@ -100,18 +101,23 @@ private void assertOnGoingRecoveryState(RecoveryState state, int shardId, Type t
assertThat(state.getStage(), not(equalTo(Stage.DONE)));
}
- private void slowDownRecovery() {
+ private void slowDownRecovery(ByteSizeValue shardSize) {
+ long chunkSize = shardSize.bytes() / 10;
assertTrue(client().admin().cluster().prepareUpdateSettings()
.setTransientSettings(ImmutableSettings.builder()
- // let the default file chunk wait 2 seconds, not to delay the test for too long
- .put(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC, "256kb"))
+ // one chunk per sec..
+ .put(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC, chunkSize)
+ .put(RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE, chunkSize)
+ )
.get().isAcknowledged());
}
private void restoreRecoverySpeed() {
assertTrue(client().admin().cluster().prepareUpdateSettings()
.setTransientSettings(ImmutableSettings.builder()
- .put(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC, "20mb"))
+ .put(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC, "20mb")
+ .put(RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE, "512kb")
+ )
.get().isAcknowledged());
}
@@ -208,7 +214,7 @@ public void rerouteRecoveryTest() throws Exception {
String nodeA = internalCluster().startNode(settingsBuilder().put("gateway.type", "local"));
logger.info("--> create index on node: {}", nodeA);
- createAndPopulateIndex(INDEX_NAME, 1, SHARD_COUNT, REPLICA_COUNT);
+ ByteSizeValue shardSize = createAndPopulateIndex(INDEX_NAME, 1, SHARD_COUNT, REPLICA_COUNT).getShards()[0].getStats().getStore().size();
logger.info("--> start node B");
String nodeB = internalCluster().startNode(settingsBuilder().put("gateway.type", "local"));
@@ -216,7 +222,7 @@ public void rerouteRecoveryTest() throws Exception {
ensureGreen();
logger.info("--> slowing down recoveries");
- slowDownRecovery();
+ slowDownRecovery(shardSize);
logger.info("--> move shard from: {} to: {}", nodeA, nodeB);
client().admin().cluster().prepareReroute()
@@ -263,7 +269,7 @@ public void rerouteRecoveryTest() throws Exception {
assertFalse(client().admin().cluster().prepareHealth().setWaitForNodes("3").get().isTimedOut());
logger.info("--> slowing down recoveries");
- slowDownRecovery();
+ slowDownRecovery(shardSize);
logger.info("--> move replica shard from: {} to: {}", nodeA, nodeC);
client().admin().cluster().prepareReroute()
|
98249507cff3e363ecb4c5f06f14f0bb96da1ad5
|
elasticsearch
|
Add missing index name to indexing slow log--This was lost in refactoring even on the 2.x branch. The slow-log-is not per index not per shard anymore such that we don't add the-shard ID as the logger prefix. This commit adds back the index-name as part of the logging message not as a prefix on the logger-for better testabilitly.--Closes -17025-
|
c
|
https://github.com/elastic/elasticsearch
|
diff --git a/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java b/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java
index 5452daa7f077e..75d3d60daad9d 100644
--- a/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java
+++ b/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java
@@ -36,6 +36,7 @@
/**
*/
public final class IndexingSlowLog implements IndexingOperationListener {
+ private final Index index;
private boolean reformat;
private long indexWarnThreshold;
private long indexInfoThreshold;
@@ -85,6 +86,7 @@ public final class IndexingSlowLog implements IndexingOperationListener {
IndexingSlowLog(IndexSettings indexSettings, ESLogger indexLogger, ESLogger deleteLogger) {
this.indexLogger = indexLogger;
this.deleteLogger = deleteLogger;
+ this.index = indexSettings.getIndex();
indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING, this::setReformat);
this.reformat = indexSettings.getValue(INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING);
@@ -141,13 +143,13 @@ public void postIndex(Engine.Index index) {
private void postIndexing(ParsedDocument doc, long tookInNanos) {
if (indexWarnThreshold >= 0 && tookInNanos > indexWarnThreshold) {
- indexLogger.warn("{}", new SlowLogParsedDocumentPrinter(doc, tookInNanos, reformat, maxSourceCharsToLog));
+ indexLogger.warn("{}", new SlowLogParsedDocumentPrinter(index, doc, tookInNanos, reformat, maxSourceCharsToLog));
} else if (indexInfoThreshold >= 0 && tookInNanos > indexInfoThreshold) {
- indexLogger.info("{}", new SlowLogParsedDocumentPrinter(doc, tookInNanos, reformat, maxSourceCharsToLog));
+ indexLogger.info("{}", new SlowLogParsedDocumentPrinter(index, doc, tookInNanos, reformat, maxSourceCharsToLog));
} else if (indexDebugThreshold >= 0 && tookInNanos > indexDebugThreshold) {
- indexLogger.debug("{}", new SlowLogParsedDocumentPrinter(doc, tookInNanos, reformat, maxSourceCharsToLog));
+ indexLogger.debug("{}", new SlowLogParsedDocumentPrinter(index, doc, tookInNanos, reformat, maxSourceCharsToLog));
} else if (indexTraceThreshold >= 0 && tookInNanos > indexTraceThreshold) {
- indexLogger.trace("{}", new SlowLogParsedDocumentPrinter(doc, tookInNanos, reformat, maxSourceCharsToLog));
+ indexLogger.trace("{}", new SlowLogParsedDocumentPrinter(index, doc, tookInNanos, reformat, maxSourceCharsToLog));
}
}
@@ -156,9 +158,11 @@ static final class SlowLogParsedDocumentPrinter {
private final long tookInNanos;
private final boolean reformat;
private final int maxSourceCharsToLog;
+ private final Index index;
- SlowLogParsedDocumentPrinter(ParsedDocument doc, long tookInNanos, boolean reformat, int maxSourceCharsToLog) {
+ SlowLogParsedDocumentPrinter(Index index, ParsedDocument doc, long tookInNanos, boolean reformat, int maxSourceCharsToLog) {
this.doc = doc;
+ this.index = index;
this.tookInNanos = tookInNanos;
this.reformat = reformat;
this.maxSourceCharsToLog = maxSourceCharsToLog;
@@ -167,6 +171,7 @@ static final class SlowLogParsedDocumentPrinter {
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
+ sb.append(index).append(" ");
sb.append("took[").append(TimeValue.timeValueNanos(tookInNanos)).append("], took_millis[").append(TimeUnit.NANOSECONDS.toMillis(tookInNanos)).append("], ");
sb.append("type[").append(doc.type()).append("], ");
sb.append("id[").append(doc.id()).append("], ");
diff --git a/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java b/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java
index e367636651125..9e05122322a90 100644
--- a/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java
+++ b/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java
@@ -36,24 +36,30 @@
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.not;
+import static org.hamcrest.Matchers.startsWith;
public class IndexingSlowLogTests extends ESTestCase {
public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException {
BytesReference source = JsonXContent.contentBuilder().startObject().field("foo", "bar").endObject().bytes();
ParsedDocument pd = new ParsedDocument(new StringField("uid", "test:id", Store.YES), new LegacyIntField("version", 1, Store.YES), "id",
"test", null, 0, -1, null, source, null);
-
+ Index index = new Index("foo", "123");
// Turning off document logging doesn't log source[]
- SlowLogParsedDocumentPrinter p = new SlowLogParsedDocumentPrinter(pd, 10, true, 0);
+ SlowLogParsedDocumentPrinter p = new SlowLogParsedDocumentPrinter(index, pd, 10, true, 0);
assertThat(p.toString(), not(containsString("source[")));
// Turning on document logging logs the whole thing
- p = new SlowLogParsedDocumentPrinter(pd, 10, true, Integer.MAX_VALUE);
+ p = new SlowLogParsedDocumentPrinter(index, pd, 10, true, Integer.MAX_VALUE);
assertThat(p.toString(), containsString("source[{\"foo\":\"bar\"}]"));
// And you can truncate the source
- p = new SlowLogParsedDocumentPrinter(pd, 10, true, 3);
+ p = new SlowLogParsedDocumentPrinter(index, pd, 10, true, 3);
+ assertThat(p.toString(), containsString("source[{\"f]"));
+
+ // And you can truncate the source
+ p = new SlowLogParsedDocumentPrinter(index, pd, 10, true, 3);
assertThat(p.toString(), containsString("source[{\"f]"));
+ assertThat(p.toString(), startsWith("[foo/123] took"));
}
public void testReformatSetting() {
|
e5ec49b123c4070355182f04397d8c8c347c9aff
|
hbase
|
HBASE-10818. Add integration test for bulkload- with replicas (Nick Dimiduk and Devaraj Das)--
|
p
|
https://github.com/apache/hbase
|
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java
index ca3a8f0e36e3..8ea27bfd5536 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java
@@ -32,7 +32,6 @@
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.ipc.RpcClient;
import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
-import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.ToolRunner;
@@ -222,42 +221,6 @@ protected Set<String> getColumnFamilies() {
return null;
}
- /**
- * Modify a table, synchronous. Waiting logic similar to that of {@code admin.rb#alter_status}.
- */
- private static void modifyTableSync(HBaseAdmin admin, HTableDescriptor desc) throws Exception {
- admin.modifyTable(desc.getTableName(), desc);
- Pair<Integer, Integer> status = new Pair<Integer, Integer>() {{
- setFirst(0);
- setSecond(0);
- }};
- for (int i = 0; status.getFirst() != 0 && i < 500; i++) { // wait up to 500 seconds
- status = admin.getAlterStatus(desc.getTableName());
- if (status.getSecond() != 0) {
- LOG.debug(status.getSecond() - status.getFirst() + "/" + status.getSecond()
- + " regions updated.");
- Thread.sleep(1 * 1000l);
- } else {
- LOG.debug("All regions updated.");
- }
- }
- if (status.getSecond() != 0) {
- throw new Exception("Failed to update replica count after 500 seconds.");
- }
- }
-
- /**
- * Set the number of Region replicas.
- */
- private static void setReplicas(HBaseAdmin admin, TableName table, int replicaCount)
- throws Exception {
- admin.disableTable(table);
- HTableDescriptor desc = admin.getTableDescriptor(table);
- desc.setRegionReplication(replicaCount);
- modifyTableSync(admin, desc);
- admin.enableTable(table);
- }
-
public void test() throws Exception {
int maxIters = 3;
String mr = nomapred ? "--nomapred" : "";
@@ -294,7 +257,7 @@ public void test() throws Exception {
// disable monkey, enable region replicas, enable monkey
cleanUpMonkey("Altering table.");
LOG.debug("Altering " + tableName + " replica count to " + replicaCount);
- setReplicas(util.getHBaseAdmin(), tableName, replicaCount);
+ util.setReplicas(util.getHBaseAdmin(), tableName, replicaCount);
setUpMonkey();
startMonkey();
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java
index dd4415bf5258..4112014bd2c9 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java
@@ -18,8 +18,6 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import static org.junit.Assert.assertEquals;
-
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
@@ -28,6 +26,7 @@
import java.util.Map;
import java.util.Random;
import java.util.Set;
+import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.lang.RandomStringUtils;
@@ -38,14 +37,25 @@
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.IntegrationTestBase;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.IntegrationTests;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Consistency;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.regionserver.InternalScanner;
+import org.apache.hadoop.hbase.regionserver.RegionScanner;
+import org.apache.hadoop.hbase.regionserver.StorefileRefresherChore;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.RegionSplitter;
@@ -69,6 +79,9 @@
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
/**
* Test Bulk Load and MR on a distributed cluster.
* It starts an MR job that creates linked chains
@@ -99,15 +112,17 @@
* hbase.IntegrationTestBulkLoad.tableName
* The name of the table.
*
+ * hbase.IntegrationTestBulkLoad.replicaCount
+ * How many region replicas to configure for the table under test.
*/
@Category(IntegrationTests.class)
public class IntegrationTestBulkLoad extends IntegrationTestBase {
private static final Log LOG = LogFactory.getLog(IntegrationTestBulkLoad.class);
- private static byte[] CHAIN_FAM = Bytes.toBytes("L");
- private static byte[] SORT_FAM = Bytes.toBytes("S");
- private static byte[] DATA_FAM = Bytes.toBytes("D");
+ private static final byte[] CHAIN_FAM = Bytes.toBytes("L");
+ private static final byte[] SORT_FAM = Bytes.toBytes("S");
+ private static final byte[] DATA_FAM = Bytes.toBytes("D");
private static String CHAIN_LENGTH_KEY = "hbase.IntegrationTestBulkLoad.chainLength";
private static int CHAIN_LENGTH = 500000;
@@ -123,9 +138,73 @@ public class IntegrationTestBulkLoad extends IntegrationTestBase {
private static String TABLE_NAME_KEY = "hbase.IntegrationTestBulkLoad.tableName";
private static String TABLE_NAME = "IntegrationTestBulkLoad";
+ private static String NUM_REPLICA_COUNT_KEY = "hbase.IntegrationTestBulkLoad.replicaCount";
+ private static int NUM_REPLICA_COUNT_DEFAULT = 1;
+
+ private static final String OPT_LOAD = "load";
+ private static final String OPT_CHECK = "check";
+
+ private boolean load = false;
+ private boolean check = false;
+
+ public static class SlowMeCoproScanOperations extends BaseRegionObserver {
+ static final AtomicLong sleepTime = new AtomicLong(2000);
+ Random r = new Random();
+ AtomicLong countOfNext = new AtomicLong(0);
+ AtomicLong countOfOpen = new AtomicLong(0);
+ public SlowMeCoproScanOperations() {}
+ @Override
+ public RegionScanner preScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> e,
+ final Scan scan, final RegionScanner s) throws IOException {
+ if (countOfOpen.incrementAndGet() % 4 == 0) { //slowdown openScanner randomly
+ slowdownCode(e);
+ }
+ return s;
+ }
+
+ @Override
+ public boolean preScannerNext(final ObserverContext<RegionCoprocessorEnvironment> e,
+ final InternalScanner s, final List<Result> results,
+ final int limit, final boolean hasMore) throws IOException {
+ //this will slow down a certain next operation if the conditions are met. The slowness
+ //will allow the call to go to a replica
+ if (countOfNext.incrementAndGet() % 4 == 0) {
+ slowdownCode(e);
+ }
+ return true;
+ }
+ protected void slowdownCode(final ObserverContext<RegionCoprocessorEnvironment> e) {
+ if (e.getEnvironment().getRegion().getRegionInfo().getReplicaId() == 0) {
+ try {
+ if (sleepTime.get() > 0) {
+ LOG.info("Sleeping for " + sleepTime.get() + " ms");
+ Thread.sleep(sleepTime.get());
+ }
+ } catch (InterruptedException e1) {
+ LOG.error(e1);
+ }
+ }
+ }
+ }
+
+ /**
+ * Modify table {@code getTableName()} to carry {@link SlowMeCoproScanOperations}.
+ */
+ private void installSlowingCoproc() throws IOException, InterruptedException {
+ int replicaCount = conf.getInt(NUM_REPLICA_COUNT_KEY, NUM_REPLICA_COUNT_DEFAULT);
+ if (replicaCount == NUM_REPLICA_COUNT_DEFAULT) return;
+
+ TableName t = TableName.valueOf(getTablename());
+ HBaseAdmin admin = util.getHBaseAdmin();
+ HTableDescriptor desc = admin.getTableDescriptor(t);
+ desc.addCoprocessor(SlowMeCoproScanOperations.class.getName());
+ HBaseTestingUtility.modifyTableSync(admin, desc);
+ }
+
@Test
public void testBulkLoad() throws Exception {
runLoad();
+ installSlowingCoproc();
runCheck();
}
@@ -145,7 +224,7 @@ private byte[][] getSplits(int numRegions) {
return split.split(numRegions);
}
- private void setupTable() throws IOException {
+ private void setupTable() throws IOException, InterruptedException {
if (util.getHBaseAdmin().tableExists(getTablename())) {
util.deleteTable(getTablename());
}
@@ -155,6 +234,12 @@ private void setupTable() throws IOException {
new byte[][]{CHAIN_FAM, SORT_FAM, DATA_FAM},
getSplits(16)
);
+
+ int replicaCount = conf.getInt(NUM_REPLICA_COUNT_KEY, NUM_REPLICA_COUNT_DEFAULT);
+ if (replicaCount == NUM_REPLICA_COUNT_DEFAULT) return;
+
+ TableName t = TableName.valueOf(getTablename());
+ HBaseTestingUtility.setReplicas(util.getHBaseAdmin(), t, replicaCount);
}
private void runLinkedListMRJob(int iteration) throws Exception {
@@ -556,23 +641,23 @@ private void runCheck() throws IOException, ClassNotFoundException, InterruptedE
Path p = util.getDataTestDirOnTestFS(jobName);
Job job = new Job(conf);
-
job.setJarByClass(getClass());
+ job.setJobName(jobName);
job.setPartitionerClass(NaturalKeyPartitioner.class);
job.setGroupingComparatorClass(NaturalKeyGroupingComparator.class);
job.setSortComparatorClass(CompositeKeyComparator.class);
- Scan s = new Scan();
- s.addFamily(CHAIN_FAM);
- s.addFamily(SORT_FAM);
- s.setMaxVersions(1);
- s.setCacheBlocks(false);
- s.setBatch(1000);
+ Scan scan = new Scan();
+ scan.addFamily(CHAIN_FAM);
+ scan.addFamily(SORT_FAM);
+ scan.setMaxVersions(1);
+ scan.setCacheBlocks(false);
+ scan.setBatch(1000);
TableMapReduceUtil.initTableMapperJob(
Bytes.toBytes(getTablename()),
- new Scan(),
+ scan,
LinkedListCheckingMapper.class,
LinkKey.class,
LinkChain.class,
@@ -595,6 +680,10 @@ private void runCheck() throws IOException, ClassNotFoundException, InterruptedE
public void setUpCluster() throws Exception {
util = getTestingUtil(getConf());
util.initializeCluster(1);
+ int replicaCount = getConf().getInt(NUM_REPLICA_COUNT_KEY, NUM_REPLICA_COUNT_DEFAULT);
+ if (LOG.isDebugEnabled() && replicaCount != NUM_REPLICA_COUNT_DEFAULT) {
+ LOG.debug("Region Replicas enabled: " + replicaCount);
+ }
// Scale this up on a real cluster
if (util.isDistributedCluster()) {
@@ -607,12 +696,6 @@ public void setUpCluster() throws Exception {
}
}
- private static final String OPT_LOAD = "load";
- private static final String OPT_CHECK = "check";
-
- private boolean load = false;
- private boolean check = false;
-
@Override
protected void addOptions() {
super.addOptions();
@@ -632,6 +715,7 @@ public int runTestFromCommandLine() throws Exception {
if (load) {
runLoad();
} else if (check) {
+ installSlowingCoproc();
runCheck();
} else {
testBulkLoad();
@@ -655,5 +739,4 @@ public static void main(String[] args) throws Exception {
int status = ToolRunner.run(conf, new IntegrationTestBulkLoad(), args);
System.exit(status);
}
-
-}
\ No newline at end of file
+}
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java
index 66f31552d7e9..eb3bb706b0d2 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java
@@ -234,6 +234,7 @@ public static class TimeBoundedMultiThreadedReader extends MultiThreadedReader {
protected AtomicLong timedOutReads = new AtomicLong();
protected long runTime;
protected Thread timeoutThread;
+ protected AtomicLong staleReads = new AtomicLong();
public TimeBoundedMultiThreadedReader(LoadTestDataGenerator dataGen, Configuration conf,
TableName tableName, double verifyPercent) throws IOException {
@@ -263,6 +264,7 @@ public void waitForFinish() {
@Override
protected String progressInfo() {
StringBuilder builder = new StringBuilder(super.progressInfo());
+ appendToStatus(builder, "stale_reads", staleReads.get());
appendToStatus(builder, "get_timeouts", timedOutReads.get());
return builder.toString();
}
@@ -327,6 +329,9 @@ protected void verifyResultsAndUpdateMetrics(boolean verify, Get[] gets, long el
Result[] results, HTableInterface table, boolean isNullExpected)
throws IOException {
super.verifyResultsAndUpdateMetrics(verify, gets, elapsedNano, results, table, isNullExpected);
+ for (Result r : results) {
+ if (r.isStale()) staleReads.incrementAndGet();
+ }
// we actually do not timeout and cancel the reads after timeout. We just wait for the RPC
// to complete, but if the request took longer than timeout, we treat that as error.
if (elapsedNano > timeoutNano) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java
index 7eb7871d3af6..e8e6e8bb73e9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java
@@ -65,6 +65,7 @@ public class TableRecordReaderImpl {
private TaskAttemptContext context = null;
private Method getCounter = null;
private long numRestarts = 0;
+ private long numStale = 0;
private long timestamp;
private int rowcount;
private boolean logScannerActivity = false;
@@ -203,6 +204,7 @@ public boolean nextKeyValue() throws IOException, InterruptedException {
try {
try {
value = this.scanner.next();
+ if (value != null && value.isStale()) numStale++;
if (logScannerActivity) {
rowcount ++;
if (rowcount >= logPerRowCount) {
@@ -230,6 +232,7 @@ public boolean nextKeyValue() throws IOException, InterruptedException {
scanner.next(); // skip presumed already mapped row
}
value = scanner.next();
+ if (value != null && value.isStale()) numStale++;
numRestarts++;
}
if (value != null && value.size() > 0) {
@@ -270,11 +273,11 @@ private void updateCounters() throws IOException {
ScanMetrics scanMetrics = ProtobufUtil.toScanMetrics(serializedMetrics);
- updateCounters(scanMetrics, numRestarts, getCounter, context);
+ updateCounters(scanMetrics, numRestarts, getCounter, context, numStale);
}
protected static void updateCounters(ScanMetrics scanMetrics, long numScannerRestarts,
- Method getCounter, TaskAttemptContext context) {
+ Method getCounter, TaskAttemptContext context, long numStale) {
// we can get access to counters only if hbase uses new mapreduce APIs
if (getCounter == null) {
return;
@@ -289,6 +292,8 @@ protected static void updateCounters(ScanMetrics scanMetrics, long numScannerRes
}
((Counter) getCounter.invoke(context, HBASE_COUNTER_GROUP_NAME,
"NUM_SCANNER_RESTARTS")).increment(numScannerRestarts);
+ ((Counter) getCounter.invoke(context, HBASE_COUNTER_GROUP_NAME,
+ "NUM_SCAN_RESULTS_STALE")).increment(numStale);
} catch (Exception e) {
LOG.debug("can't update counter." + StringUtils.stringifyException(e));
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java
index f8d4d1805990..8071c5648b6c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java
@@ -152,7 +152,7 @@ public boolean nextKeyValue() throws IOException, InterruptedException {
if (result) {
ScanMetrics scanMetrics = delegate.getScanner().getScanMetrics();
if (scanMetrics != null && context != null) {
- TableRecordReaderImpl.updateCounters(scanMetrics, 0, getCounter, context);
+ TableRecordReaderImpl.updateCounters(scanMetrics, 0, getCounter, context, 0);
}
}
return result;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index 79bda274d7e5..3b64b735dea6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -92,6 +92,7 @@
import org.apache.hadoop.hbase.util.JVMClusterUtil;
import org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread;
import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
+import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.RegionSplitter;
import org.apache.hadoop.hbase.util.RetryCounter;
import org.apache.hadoop.hbase.util.Threads;
@@ -1463,6 +1464,44 @@ public HTable createTable(byte[] tableName, byte[][] families, byte[][] splitRow
return new HTable(getConfiguration(), tableName);
}
+ /**
+ * Modify a table, synchronous. Waiting logic similar to that of {@code admin.rb#alter_status}.
+ */
+ public static void modifyTableSync(HBaseAdmin admin, HTableDescriptor desc)
+ throws IOException, InterruptedException {
+ admin.modifyTable(desc.getTableName(), desc);
+ Pair<Integer, Integer> status = new Pair<Integer, Integer>() {{
+ setFirst(0);
+ setSecond(0);
+ }};
+ for (int i = 0; status.getFirst() != 0 && i < 500; i++) { // wait up to 500 seconds
+ status = admin.getAlterStatus(desc.getTableName());
+ if (status.getSecond() != 0) {
+ LOG.debug(status.getSecond() - status.getFirst() + "/" + status.getSecond()
+ + " regions updated.");
+ Thread.sleep(1 * 1000l);
+ } else {
+ LOG.debug("All regions updated.");
+ break;
+ }
+ }
+ if (status.getSecond() != 0) {
+ throw new IOException("Failed to update replica count after 500 seconds.");
+ }
+ }
+
+ /**
+ * Set the number of Region replicas.
+ */
+ public static void setReplicas(HBaseAdmin admin, TableName table, int replicaCount)
+ throws IOException, InterruptedException {
+ admin.disableTable(table);
+ HTableDescriptor desc = admin.getTableDescriptor(table);
+ desc.setRegionReplication(replicaCount);
+ modifyTableSync(admin, desc);
+ admin.enableTable(table);
+ }
+
/**
* Drop an existing table
* @param tableName existing table
|
1fa7b71cf82cc30757ecf5d2a8e0cfba654ed469
|
hbase
|
HBASE-14807 TestWALLockup is flakey--
|
c
|
https://github.com/apache/hbase
|
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java
index a7014434f62b..708801033e24 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java
@@ -20,7 +20,6 @@
import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.concurrent.CountDownLatch;
@@ -30,6 +29,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
@@ -103,7 +103,7 @@ String getName() {
* <p>First I need to set up some mocks for Server and RegionServerServices. I also need to
* set up a dodgy WAL that will throw an exception when we go to append to it.
*/
- @Test (timeout=30000)
+ @Test (timeout=15000)
public void testLockupWhenSyncInMiddleOfZigZagSetup() throws IOException {
// A WAL that we can have throw exceptions when a flag is set.
class DodgyFSLog extends FSHLog {
@@ -209,15 +209,21 @@ public long getLength() throws IOException {
put.addColumn(COLUMN_FAMILY_BYTES, Bytes.toBytes("1"), bytes);
WALKey key = new WALKey(region.getRegionInfo().getEncodedNameAsBytes(), htd.getTableName());
WALEdit edit = new WALEdit();
+ CellScanner CellScanner = put.cellScanner();
+ assertTrue(CellScanner.advance());
+ edit.add(CellScanner.current());
// Put something in memstore and out in the WAL. Do a big number of appends so we push
// out other side of the ringbuffer. If small numbers, stuff doesn't make it to WAL
for (int i = 0; i < 1000; i++) {
- dodgyWAL.append(htd, region.getRegionInfo(), key, edit, true);
+ region.put(put);
}
// Set it so we start throwing exceptions.
+ LOG.info("SET throwing of exception on append");
dodgyWAL.throwException = true;
- // This append provokes a WAL roll.
+ // This append provokes a WAL roll request
dodgyWAL.append(htd, region.getRegionInfo(), key, edit, true);
+ // Now wait until the dodgy WAL is latched.
+ while (dodgyWAL.latch.getCount() <= 0) Threads.sleep(1);
boolean exception = false;
try {
dodgyWAL.sync();
@@ -229,20 +235,25 @@ public long getLength() throws IOException {
// Get a memstore flush going too so we have same hung profile as up in the issue over
// in HBASE-14317. Flush hangs trying to get sequenceid because the ringbuffer is held up
// by the zigzaglatch waiting on syncs to come home.
- Thread t = new Thread ("flusher") {
+ Thread t = new Thread ("Flusher") {
public void run() {
try {
+ if (region.getMemstoreSize() <= 0) {
+ throw new IOException("memstore size=" + region.getMemstoreSize());
+ }
region.flush(false);
} catch (IOException e) {
+ // Can fail trying to flush in middle of a roll. Not a failure. Will succeed later
+ // when roll completes.
LOG.info("In flush", e);
- fail();
}
+ LOG.info("Exiting");
};
};
t.setDaemon(true);
t.start();
- // Wait till it gets into flushing. It will get stuck on getSequenceId. Then proceed.
- while (!region.writestate.flushing) Threads.sleep(1);
+ // Wait until
+ while (dodgyWAL.latch.getCount() > 0) Threads.sleep(1);
// Now assert I got a new WAL file put in place even though loads of errors above.
assertTrue(originalWAL != dodgyWAL.getCurrentFileName());
// Can I append to it?
|
b2b67bd48d228d919db25ca3afd26b70ebf58897
|
camel
|
CAMEL-6013 fixed the issue that Validator- component fails on XSD with indirect relative import--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@1438352 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/main/java/org/apache/camel/component/validator/DefaultLSResourceResolver.java b/camel-core/src/main/java/org/apache/camel/component/validator/DefaultLSResourceResolver.java
index bb4831950f1a7..842b20f2213b1 100644
--- a/camel-core/src/main/java/org/apache/camel/component/validator/DefaultLSResourceResolver.java
+++ b/camel-core/src/main/java/org/apache/camel/component/validator/DefaultLSResourceResolver.java
@@ -36,12 +36,21 @@ public class DefaultLSResourceResolver implements LSResourceResolver {
private final CamelContext camelContext;
private final String resourceUri;
private final String resourcePath;
+ private String relatedURI;
public DefaultLSResourceResolver(CamelContext camelContext, String resourceUri) {
this.camelContext = camelContext;
this.resourceUri = resourceUri;
this.resourcePath = FileUtil.onlyPath(resourceUri);
}
+
+ private String getUri(String systemId) {
+ if (resourcePath != null) {
+ return FileUtil.onlyPath(resourceUri) + "/" + systemId;
+ } else {
+ return systemId;
+ }
+ }
@Override
public LSInput resolveResource(String type, String namespaceURI, String publicId, String systemId, String baseURI) {
@@ -50,7 +59,13 @@ public LSInput resolveResource(String type, String namespaceURI, String publicId
throw new IllegalArgumentException(String.format("Resource: %s refers an invalid resource without SystemId."
+ " Invalid resource has type: %s, namespaceURI: %s, publicId: %s, systemId: %s, baseURI: %s", resourceUri, type, namespaceURI, publicId, systemId, baseURI));
}
- return new DefaultLSInput(publicId, systemId, baseURI);
+ // Build up the relative path for using
+ if (baseURI == null) {
+ relatedURI = getUri(systemId);
+ } else {
+ relatedURI = FileUtil.onlyPath(relatedURI) + "/" + systemId;
+ }
+ return new DefaultLSInput(publicId, systemId, baseURI, relatedURI);
}
private final class DefaultLSInput implements LSInput {
@@ -58,23 +73,25 @@ private final class DefaultLSInput implements LSInput {
private final String publicId;
private final String systemId;
private final String baseURI;
+ private final String relatedURI;
private final String uri;
+
- private DefaultLSInput(String publicId, String systemId, String baseURI) {
+ private DefaultLSInput(String publicId, String systemId, String basedURI, String relatedURI) {
this.publicId = publicId;
this.systemId = systemId;
- this.baseURI = baseURI;
+ this.baseURI = basedURI;
+ this.relatedURI = relatedURI;
this.uri = getInputUri();
}
private String getInputUri() {
// find the xsd with relative path
- if (ObjectHelper.isNotEmpty(baseURI)) {
- String inputUri = getUri(getRelativePath(baseURI));
+ if (ObjectHelper.isNotEmpty(relatedURI)) {
try {
- ResourceHelper.resolveMandatoryResourceAsInputStream(camelContext.getClassResolver(), inputUri);
- return inputUri;
+ ResourceHelper.resolveMandatoryResourceAsInputStream(camelContext.getClassResolver(), relatedURI);
+ return relatedURI;
} catch (IOException e) {
// ignore the exception
}
@@ -83,28 +100,6 @@ private String getInputUri() {
return getUri("");
}
- private String getRelativePath(String base) {
- String userDir = "";
- String answer = "";
- if (ObjectHelper.isNotEmpty(base)) {
- try {
- userDir = FileUtil.getUserDir().toURI().toASCIIString();
- } catch (Exception ex) {
- // do nothing here
- }
- // get the relative path from the userdir
- if (ObjectHelper.isNotEmpty(base) && base.startsWith("file://") && userDir.startsWith("file:")) {
- // skip the protocol part
- base = base.substring(7);
- userDir = userDir.substring(5);
- if (base.startsWith(userDir)) {
- answer = FileUtil.onlyPath(base.substring(userDir.length())) + "/";
- }
- }
- }
- return answer;
- }
-
private String getUri(String relativePath) {
if (resourcePath != null) {
return FileUtil.onlyPath(resourceUri) + "/" + relativePath + systemId;
diff --git a/tests/camel-itest/src/test/java/org/apache/camel/itest/validator/ValidatorSchemaImportTest.java b/tests/camel-itest/src/test/java/org/apache/camel/itest/validator/ValidatorSchemaImportTest.java
index 0c5dfd6aa979c..caed57ed5c9ef 100644
--- a/tests/camel-itest/src/test/java/org/apache/camel/itest/validator/ValidatorSchemaImportTest.java
+++ b/tests/camel-itest/src/test/java/org/apache/camel/itest/validator/ValidatorSchemaImportTest.java
@@ -113,6 +113,35 @@ public void configure() throws Exception {
MockEndpoint.assertIsSatisfied(validEndpoint, invalidEndpoint, finallyEndpoint);
}
+
+ /**
+ * Test for the valid schema location relative to a path other than the validating schema
+ * @throws Exception
+ */
+ @Test
+ public void testChildParentUncleSchemaImport() throws Exception {
+ context.addRoutes(new RouteBuilder() {
+ @Override
+ public void configure() throws Exception {
+ from("direct:start")
+ .doTry()
+ .to("validator:org/apache/camel/component/validator/childparentuncle/child/child.xsd")
+ .to("mock:valid")
+ .doCatch(ValidationException.class)
+ .to("mock:invalid")
+ .doFinally()
+ .to("mock:finally")
+ .end();
+ }
+ });
+ validEndpoint.expectedMessageCount(1);
+ finallyEndpoint.expectedMessageCount(1);
+
+ template.sendBody("direct:start",
+ "<childuser xmlns='http://foo.com/bar'><user><id>1</id><username>Test User</username></user></childuser>");
+
+ MockEndpoint.assertIsSatisfied(validEndpoint, invalidEndpoint, finallyEndpoint);
+ }
@Override
@Before
diff --git a/tests/camel-itest/src/test/resources/org/apache/camel/component/validator/childparentuncle/child/child.xsd b/tests/camel-itest/src/test/resources/org/apache/camel/component/validator/childparentuncle/child/child.xsd
new file mode 100644
index 0000000000000..3cf6b48cf1c21
--- /dev/null
+++ b/tests/camel-itest/src/test/resources/org/apache/camel/component/validator/childparentuncle/child/child.xsd
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<xs:schema elementFormDefault="qualified" version="1.0"
+ targetNamespace="http://foo.com/bar"
+ xmlns:tns="http://foo.com/bar"
+ xmlns:xs="http://www.w3.org/2001/XMLSchema">
+
+ <xs:include schemaLocation="../deeper/parent/parent.xsd"/>
+ <xs:element name="childuser">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element ref="tns:user" />
+ </xs:sequence>
+ </xs:complexType>
+ </xs:element>
+
+</xs:schema>
+
diff --git a/tests/camel-itest/src/test/resources/org/apache/camel/component/validator/childparentuncle/deeper/parent/parent.xsd b/tests/camel-itest/src/test/resources/org/apache/camel/component/validator/childparentuncle/deeper/parent/parent.xsd
new file mode 100644
index 0000000000000..01aec8e42ce1a
--- /dev/null
+++ b/tests/camel-itest/src/test/resources/org/apache/camel/component/validator/childparentuncle/deeper/parent/parent.xsd
@@ -0,0 +1,17 @@
+<xs:schema elementFormDefault="qualified" version="1.0"
+ targetNamespace="http://foo.com/bar"
+ xmlns:tns="http://foo.com/bar"
+ xmlns:xs="http://www.w3.org/2001/XMLSchema">
+
+<xs:include schemaLocation="../uncle/uncle.xsd"/>
+<xs:element name="parent">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element name="id" type="xs:int"/>
+ <xs:element name="username" type="xs:string"/>
+ </xs:sequence>
+ </xs:complexType>
+ </xs:element>
+
+</xs:schema>
+
diff --git a/tests/camel-itest/src/test/resources/org/apache/camel/component/validator/childparentuncle/deeper/uncle/uncle.xsd b/tests/camel-itest/src/test/resources/org/apache/camel/component/validator/childparentuncle/deeper/uncle/uncle.xsd
new file mode 100644
index 0000000000000..8f708d8ecdc9f
--- /dev/null
+++ b/tests/camel-itest/src/test/resources/org/apache/camel/component/validator/childparentuncle/deeper/uncle/uncle.xsd
@@ -0,0 +1,17 @@
+<xs:schema elementFormDefault="qualified" version="1.0"
+ targetNamespace="http://foo.com/bar"
+ xmlns:tns="http://foo.com/bar"
+ xmlns:xs="http://www.w3.org/2001/XMLSchema">
+
+
+ <xs:element name="user">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element name="id" type="xs:int"/>
+ <xs:element name="username" type="xs:string"/>
+ </xs:sequence>
+ </xs:complexType>
+ </xs:element>
+
+</xs:schema>
+
|
ab8caaab8f294a1c90109499dc57770253528446
|
camel
|
Updaed the mail component so that mail headers- are correctly copied over from message to message. This highlighted the fact- that th TO: should always be what is specified on the outbound endpoint- instead of the To: in the message. Added a test the validates headers are- preserved.--git-svn-id: https://svn.apache.org/repos/asf/activemq/camel/trunk@537937 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/main/java/org/apache/camel/CamelTemplate.java b/camel-core/src/main/java/org/apache/camel/CamelTemplate.java
index aa33dfae5268c..d19c27d2dc80b 100644
--- a/camel-core/src/main/java/org/apache/camel/CamelTemplate.java
+++ b/camel-core/src/main/java/org/apache/camel/CamelTemplate.java
@@ -146,6 +146,26 @@ public void process(Exchange exchange) {
});
return extractResultBody(result);
}
+
+ /**
+ * Sends the body to an endpoint with the specified headers and header values
+ *
+ * @param endpointUri the endpoint URI to send to
+ * @param body the payload send
+ * @return the result
+ */
+ public Object sendBody(String endpointUri, final Object body, final Map<String, Object> headers) {
+ E result = send(endpointUri, new Processor() {
+ public void process(Exchange exchange) {
+ Message in = exchange.getIn();
+ for (Map.Entry<String, Object> header : headers.entrySet()) {
+ in.setHeader(header.getKey(), header.getValue());
+ }
+ in.setBody(body);
+ }
+ });
+ return extractResultBody(result);
+ }
// Methods using the default endpoint
//-----------------------------------------------------------------------
diff --git a/camel-mail/src/main/java/org/apache/camel/component/mail/MailBinding.java b/camel-mail/src/main/java/org/apache/camel/component/mail/MailBinding.java
index 96ab82165d8bf..583c8534e09ad 100644
--- a/camel-mail/src/main/java/org/apache/camel/component/mail/MailBinding.java
+++ b/camel-mail/src/main/java/org/apache/camel/component/mail/MailBinding.java
@@ -17,15 +17,16 @@
*/
package org.apache.camel.component.mail;
-import org.apache.camel.Exchange;
+import java.util.Map;
+import java.util.Set;
+import javax.mail.Address;
import javax.mail.Message;
import javax.mail.MessagingException;
-import javax.mail.Address;
-import javax.mail.internet.MimeMessage;
import javax.mail.internet.InternetAddress;
-import java.util.Map;
-import java.util.Set;
+import javax.mail.internet.MimeMessage;
+
+import org.apache.camel.Exchange;
/**
* A Strategy used to convert between a Camel {@Exchange} and {@Message} to and from a
@@ -38,11 +39,11 @@ public void populateMailMessage(MailEndpoint endpoint, MimeMessage mimeMessage,
try {
appendMailHeaders(mimeMessage, exchange.getIn());
- if (empty(mimeMessage.getAllRecipients())) {
- // lets default the address to the endpoint destination
- String destination = endpoint.getConfiguration().getDestination();
+ String destination = endpoint.getConfiguration().getDestination();
+ if (destination != null ) {
mimeMessage.setRecipients(Message.RecipientType.TO, destination);
}
+
if (empty(mimeMessage.getFrom())) {
// lets default the address to the endpoint destination
String from = endpoint.getConfiguration().getFrom();
@@ -84,7 +85,23 @@ protected void appendMailHeaders(MimeMessage mimeMessage, org.apache.camel.Messa
Object headerValue = entry.getValue();
if (headerValue != null) {
if (shouldOutputHeader(camelMessage, headerName, headerValue)) {
- mimeMessage.setHeader(headerName, headerValue.toString());
+
+ String[] values = new String[]{};
+ Class stringArrayClazz = values.getClass();
+
+ // Mail messages can repeat the same header...
+ if( headerValue.getClass() == stringArrayClazz ) {
+ mimeMessage.removeHeader(headerName);
+ values = (String[]) headerValue;
+ for (int i = 0; i < values.length; i++) {
+ mimeMessage.addHeader(headerName, values[i]);
+ }
+ } else if( headerValue.getClass() == String.class ) {
+ mimeMessage.setHeader(headerName, (String) headerValue);
+ } else {
+ // Unknown type? then use toString()
+ mimeMessage.setHeader(headerName, headerValue.toString());
+ }
}
}
}
diff --git a/camel-mail/src/main/java/org/apache/camel/component/mail/MailMessage.java b/camel-mail/src/main/java/org/apache/camel/component/mail/MailMessage.java
index 1100a66c1538e..f0ed90ee74e4a 100644
--- a/camel-mail/src/main/java/org/apache/camel/component/mail/MailMessage.java
+++ b/camel-mail/src/main/java/org/apache/camel/component/mail/MailMessage.java
@@ -19,6 +19,7 @@
import org.apache.camel.impl.DefaultMessage;
+import javax.mail.Header;
import javax.mail.Message;
import javax.mail.MessagingException;
import java.util.Enumeration;
@@ -67,7 +68,7 @@ public void setMessage(Message mailMessage) {
}
public Object getHeader(String name) {
- Object answer = null;
+ String[] answer = null;
if (mailMessage != null) {
try {
answer = mailMessage.getHeader(name);
@@ -77,9 +78,12 @@ public Object getHeader(String name) {
}
}
if (answer == null) {
- answer = super.getHeader(name);
+ return super.getHeader(name);
}
- return answer;
+ if( answer.length > 0 ) {
+ return answer[0];
+ }
+ return null;
}
@Override
@@ -105,15 +109,16 @@ protected void populateInitialHeaders(Map<String, Object> map) {
catch (MessagingException e) {
throw new MessageHeaderNamesAccessException(e);
}
- while (names.hasMoreElements()) {
- String name = names.nextElement().toString();
- try {
- Object value = mailMessage.getHeader(name);
- map.put(name, value);
- }
- catch (MessagingException e) {
- throw new MessageHeaderAccessException(name, e);
- }
+
+ System.out.println("Copying....");
+ try {
+ while (names.hasMoreElements()) {
+ Header header = (Header) names.nextElement();
+ map.put(header.getName(), header.getValue());
+ System.out.println("Set: "+header.getName()+"="+header.getValue());
+ }
+ }catch (Throwable e) {
+ throw new MessageHeaderNamesAccessException(e);
}
}
}
diff --git a/camel-mail/src/main/java/org/apache/camel/component/mail/MessageHeaderNamesAccessException.java b/camel-mail/src/main/java/org/apache/camel/component/mail/MessageHeaderNamesAccessException.java
index e66353460aefd..1e4a0a1954e32 100644
--- a/camel-mail/src/main/java/org/apache/camel/component/mail/MessageHeaderNamesAccessException.java
+++ b/camel-mail/src/main/java/org/apache/camel/component/mail/MessageHeaderNamesAccessException.java
@@ -17,7 +17,6 @@
*/
package org.apache.camel.component.mail;
-import javax.mail.MessagingException;
/**
* @version $Revision:520964 $
@@ -25,7 +24,7 @@
public class MessageHeaderNamesAccessException extends RuntimeMailException {
private static final long serialVersionUID = -6744171518099741324L;
- public MessageHeaderNamesAccessException(MessagingException e) {
+ public MessageHeaderNamesAccessException(Throwable e) {
super("Failed to acess the Mail message property names", e);
}
}
diff --git a/camel-mail/src/test/java/org/apache/camel/component/mail/MailRouteTest.java b/camel-mail/src/test/java/org/apache/camel/component/mail/MailRouteTest.java
index 21573151273f6..9a2c7eaba20e7 100644
--- a/camel-mail/src/test/java/org/apache/camel/component/mail/MailRouteTest.java
+++ b/camel-mail/src/test/java/org/apache/camel/component/mail/MailRouteTest.java
@@ -18,6 +18,7 @@
package org.apache.camel.component.mail;
import org.apache.camel.ContextTestSupport;
+import org.apache.camel.Exchange;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.builder.RouteBuilder;
import static org.apache.camel.util.ObjectHelper.asString;
@@ -27,6 +28,7 @@
import javax.mail.Message;
import javax.mail.MessagingException;
import java.io.IOException;
+import java.util.HashMap;
/**
* @version $Revision: 1.1 $
@@ -38,7 +40,9 @@ public void testSendAndReceiveMails() throws Exception {
resultEndpoint = (MockEndpoint) resolveMandatoryEndpoint("mock:result");
resultEndpoint.expectedBodiesReceived("hello world!");
- template.sendBody("smtp://james@localhost", "hello world!");
+ HashMap<String, Object> headers = new HashMap<String, Object>();
+ headers.put("reply-to", "reply1@localhost");
+ template.sendBody("smtp://james@localhost", "hello world!", headers);
// lets test the first sent worked
assertMailboxReceivedMessages("james@localhost");
@@ -48,7 +52,12 @@ public void testSendAndReceiveMails() throws Exception {
// lets test the receive worked
resultEndpoint.assertIsSatisfied();
-
+
+ // Validate that the headers were preserved.
+ Exchange exchange = resultEndpoint.getReceivedExchanges().get(0);
+ String replyTo = (String) exchange.getIn().getHeader("reply-to");
+ assertEquals( "reply1@localhost", replyTo);
+
assertMailboxReceivedMessages("copy@localhost");
}
|
01e706466e561557d8591b3031cd85ae39b0559a
|
intellij-community
|
gradle: correctly set TestModuleProperties for- modules containing '-' in names (IDEA-151590)--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/platform/external-system-api/src/com/intellij/openapi/externalSystem/model/project/ModuleData.java b/platform/external-system-api/src/com/intellij/openapi/externalSystem/model/project/ModuleData.java
index 4179681ab59ba..c2c06d2180435 100644
--- a/platform/external-system-api/src/com/intellij/openapi/externalSystem/model/project/ModuleData.java
+++ b/platform/external-system-api/src/com/intellij/openapi/externalSystem/model/project/ModuleData.java
@@ -3,7 +3,6 @@
import com.intellij.ide.highlighter.ModuleFileType;
import com.intellij.openapi.externalSystem.model.ProjectSystemId;
import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil;
-import com.intellij.openapi.util.io.FileUtil;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
@@ -58,7 +57,7 @@ protected ModuleData(@NotNull String id,
@NotNull String internalName,
@NotNull String moduleFileDirectoryPath,
@NotNull String externalConfigPath) {
- super(owner, externalName, FileUtil.sanitizeFileName(internalName));
+ super(owner, externalName, internalName);
myId = id;
myModuleTypeId = typeId;
myExternalConfigPath = externalConfigPath;
diff --git a/plugins/gradle/src/org/jetbrains/plugins/gradle/service/project/BaseGradleProjectResolverExtension.java b/plugins/gradle/src/org/jetbrains/plugins/gradle/service/project/BaseGradleProjectResolverExtension.java
index 2a7f6b2eb6c25..9f67df07d6893 100644
--- a/plugins/gradle/src/org/jetbrains/plugins/gradle/service/project/BaseGradleProjectResolverExtension.java
+++ b/plugins/gradle/src/org/jetbrains/plugins/gradle/service/project/BaseGradleProjectResolverExtension.java
@@ -234,8 +234,8 @@ public DataNode<ModuleData> createModule(@NotNull IdeaModule gradleModule, @NotN
}
@NotNull
- public String getInternalModuleName(@NotNull IdeaModule gradleModule, @NotNull String sourceSetName) {
- return gradleModule.getName() + "_" + sourceSetName;
+ private static String getInternalModuleName(@NotNull IdeaModule gradleModule, @NotNull String sourceSetName) {
+ return FileUtil.sanitizeFileName(gradleModule.getName() + "_" + sourceSetName);
}
@Override
diff --git a/plugins/gradle/testSources/org/jetbrains/plugins/gradle/importing/GradleMiscImportingTest.java b/plugins/gradle/testSources/org/jetbrains/plugins/gradle/importing/GradleMiscImportingTest.java
index c28a1745846c8..88c0cb575d438 100644
--- a/plugins/gradle/testSources/org/jetbrains/plugins/gradle/importing/GradleMiscImportingTest.java
+++ b/plugins/gradle/testSources/org/jetbrains/plugins/gradle/importing/GradleMiscImportingTest.java
@@ -59,6 +59,20 @@ public void testTestModuleProperties() throws Exception {
assertSame(productionModule, testModuleProperties.getProductionModule());
}
+ @Test
+ public void testTestModulePropertiesForModuleWithHyphenInName() throws Exception {
+ createSettingsFile("rootProject.name='my-project'");
+ importProject(
+ "apply plugin: 'java'"
+ );
+
+ assertModules("my-project", "my_project_main", "my_project_test");
+
+ final Module testModule = getModule("my_project_test");
+ TestModuleProperties testModuleProperties = TestModuleProperties.getInstance(testModule);
+ assertEquals("my_project_main", testModuleProperties.getProductionModuleName());
+ }
+
@Test
public void testInheritProjectJdkForModules() throws Exception {
importProject(
|
10f91cb0d91dab0e19103754bc2c9bf4b40e96a3
|
camel
|
CAMEL-2795: Fixed tests--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@952909 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/camel
|
diff --git a/components/camel-jetty/src/test/java/org/apache/camel/component/jetty/jettyproducer/JettyHttpProducerTimeoutTest.java b/components/camel-jetty/src/test/java/org/apache/camel/component/jetty/jettyproducer/JettyHttpProducerTimeoutTest.java
index e1347e87e2e01..6a223d47cea26 100644
--- a/components/camel-jetty/src/test/java/org/apache/camel/component/jetty/jettyproducer/JettyHttpProducerTimeoutTest.java
+++ b/components/camel-jetty/src/test/java/org/apache/camel/component/jetty/jettyproducer/JettyHttpProducerTimeoutTest.java
@@ -16,6 +16,7 @@
*/
package org.apache.camel.component.jetty.jettyproducer;
+import org.apache.camel.Exchange;
import org.apache.camel.ExchangeTimedOutException;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.test.junit4.CamelTestSupport;
@@ -38,13 +39,11 @@ public void testTimeout() throws Exception {
// give Jetty time to startup properly
Thread.sleep(1000);
- try {
- template.request(url, null);
- fail("Should have thrown a timeout exception");
- } catch (Exception e) {
- ExchangeTimedOutException cause = assertIsInstanceOf(ExchangeTimedOutException.class, e.getCause());
- assertEquals(2000, cause.getTimeout());
- }
+ Exchange reply = template.request(url, null);
+ Exception e = reply.getException();
+ assertNotNull("Should have thrown an exception", e);
+ ExchangeTimedOutException cause = assertIsInstanceOf(ExchangeTimedOutException.class, e);
+ assertEquals(2000, cause.getTimeout());
}
@Override
|
03351581d05f135185ba6e487986f14667b25cbe
|
hbase
|
HBASE-4606 Remove spam in HCM and fix a- list.size == 0--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1185326 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/CHANGES.txt b/CHANGES.txt
index 0645ae1adc7e..22fafea1dab9 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -623,6 +623,7 @@ Release 0.92.0 - Unreleased
HBASE-4558 Refactor TestOpenedRegionHandler and TestOpenRegionHandler.(Ram)
HBASE-4558 Addendum for TestMasterFailover (Ram) - Breaks the build
HBASE-4568 Make zk dump jsp response faster
+ HBASE-4606 Remove spam in HCM and fix a list.size == 0
TASKS
diff --git a/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java b/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java
index 406e5e345493..f7fac44e8752 100644
--- a/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java
+++ b/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java
@@ -1400,12 +1400,9 @@ public <R> void processBatchCallback(
throw new IllegalArgumentException(
"argument results must be the same size as argument list");
}
- if (list.size() == 0) {
+ if (list.isEmpty()) {
return;
}
- if (LOG.isDebugEnabled()) {
- LOG.debug("expecting "+results.length+" results");
- }
// Keep track of the most recent servers for any given item for better
// exceptional reporting. We keep HRegionLocation to save on parsing.
|
f75570734bb41a61d66b10922b6fc1ed48119067
|
hadoop
|
YARN-2582. Fixed Log CLI and Web UI for showing- aggregated logs of LRS. Contributed Xuan Gong.--(cherry picked from commit e90718fa5a0e7c18592af61534668acebb9db51b)-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 92512356d7e15..ccfc1db7d70bc 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -346,6 +346,9 @@ Release 2.6.0 - UNRELEASED
YARN-2673. Made timeline client put APIs retry if ConnectException happens.
(Li Lu via zjshen)
+ YARN-2582. Fixed Log CLI and Web UI for showing aggregated logs of LRS. (Xuan
+ Gong via zjshen)
+
OPTIMIZATIONS
BUG FIXES
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java
index eb6169cf36868..0b34a46281baf 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java
@@ -31,7 +31,6 @@
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
-import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.yarn.api.records.ApplicationId;
@@ -39,8 +38,6 @@
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
-import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat;
-import org.apache.hadoop.yarn.logaggregation.LogAggregationUtils;
import org.apache.hadoop.yarn.logaggregation.LogCLIHelpers;
import org.apache.hadoop.yarn.util.ConverterUtils;
@@ -113,17 +110,16 @@ public int run(String[] args) throws Exception {
System.err.println("Invalid ApplicationId specified");
return -1;
}
-
+
try {
int resultCode = verifyApplicationState(appId);
if (resultCode != 0) {
- System.out.println("Application has not completed." +
- " Logs are only available after an application completes");
+ System.out.println("Logs are not avaiable right now.");
return resultCode;
}
} catch (Exception e) {
- System.err.println("Unable to get ApplicationState." +
- " Attempting to fetch logs directly from the filesystem.");
+ System.err.println("Unable to get ApplicationState."
+ + " Attempting to fetch logs directly from the filesystem.");
}
LogCLIHelpers logCliHelper = new LogCLIHelpers();
@@ -141,18 +137,9 @@ public int run(String[] args) throws Exception {
printHelpMessage(printOpts);
resultCode = -1;
} else {
- Path remoteRootLogDir =
- new Path(getConf().get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR,
- YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR));
- AggregatedLogFormat.LogReader reader =
- new AggregatedLogFormat.LogReader(getConf(),
- LogAggregationUtils.getRemoteNodeLogFileForApp(
- remoteRootLogDir,
- appId,
- appOwner,
- ConverterUtils.toNodeId(nodeAddress),
- LogAggregationUtils.getRemoteNodeLogDirSuffix(getConf())));
- resultCode = logCliHelper.dumpAContainerLogs(containerIdStr, reader, System.out);
+ resultCode =
+ logCliHelper.dumpAContainersLogs(appIdStr, containerIdStr,
+ nodeAddress, appOwner);
}
return resultCode;
@@ -167,10 +154,10 @@ private int verifyApplicationState(ApplicationId appId) throws IOException,
switch (appReport.getYarnApplicationState()) {
case NEW:
case NEW_SAVING:
- case ACCEPTED:
case SUBMITTED:
- case RUNNING:
return -1;
+ case ACCEPTED:
+ case RUNNING:
case FAILED:
case FINISHED:
case KILLED:
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java
index f02f3358a25f5..132dca245c5d6 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java
@@ -25,21 +25,38 @@
import static org.mockito.Mockito.mock;
import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintStream;
import java.io.PrintWriter;
+import java.io.Writer;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
import org.junit.Assert;
-
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
+import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl;
+import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
+import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
+import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat;
+import org.apache.hadoop.yarn.logaggregation.LogAggregationUtils;
import org.apache.hadoop.yarn.logaggregation.LogCLIHelpers;
import org.junit.Before;
import org.junit.Test;
@@ -138,6 +155,116 @@ public void testHelpMessage() throws Exception {
Assert.assertEquals(appReportStr, sysOutStream.toString());
}
+ @Test (timeout = 15000)
+ public void testFetchApplictionLogs() throws Exception {
+ String remoteLogRootDir = "target/logs/";
+ Configuration configuration = new Configuration();
+ configuration.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, true);
+ configuration
+ .set(YarnConfiguration.NM_REMOTE_APP_LOG_DIR, remoteLogRootDir);
+ configuration.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true);
+ configuration.set(YarnConfiguration.YARN_ADMIN_ACL, "admin");
+ FileSystem fs = FileSystem.get(configuration);
+
+ UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
+ ApplicationId appId = ApplicationIdPBImpl.newInstance(0, 1);
+ ApplicationAttemptId appAttemptId =
+ ApplicationAttemptIdPBImpl.newInstance(appId, 1);
+ ContainerId containerId1 = ContainerIdPBImpl.newInstance(appAttemptId, 1);
+ ContainerId containerId2 = ContainerIdPBImpl.newInstance(appAttemptId, 2);
+
+ NodeId nodeId = NodeId.newInstance("localhost", 1234);
+
+ // create local logs
+ String rootLogDir = "target/LocalLogs";
+ Path rootLogDirPath = new Path(rootLogDir);
+ if (fs.exists(rootLogDirPath)) {
+ fs.delete(rootLogDirPath, true);
+ }
+ assertTrue(fs.mkdirs(rootLogDirPath));
+
+ Path appLogsDir = new Path(rootLogDirPath, appId.toString());
+ if (fs.exists(appLogsDir)) {
+ fs.delete(appLogsDir, true);
+ }
+ assertTrue(fs.mkdirs(appLogsDir));
+ List<String> rootLogDirs = Arrays.asList(rootLogDir);
+
+ // create container logs in localLogDir
+ createContainerLogInLocalDir(appLogsDir, containerId1, fs);
+ createContainerLogInLocalDir(appLogsDir, containerId2, fs);
+
+ Path path =
+ new Path(remoteLogRootDir + ugi.getShortUserName()
+ + "/logs/application_0_0001");
+ if (fs.exists(path)) {
+ fs.delete(path, true);
+ }
+ assertTrue(fs.mkdirs(path));
+ // upload container logs into remote directory
+ uploadContainerLogIntoRemoteDir(ugi, configuration, rootLogDirs, nodeId,
+ containerId1, path, fs);
+ uploadContainerLogIntoRemoteDir(ugi, configuration, rootLogDirs, nodeId,
+ containerId2, path, fs);
+
+ YarnClient mockYarnClient =
+ createMockYarnClient(YarnApplicationState.FINISHED);
+ LogsCLI cli = new LogsCLIForTest(mockYarnClient);
+ cli.setConf(configuration);
+
+ int exitCode = cli.run(new String[] { "-applicationId", appId.toString() });
+ assertTrue(exitCode == 0);
+ assertTrue(sysOutStream.toString().contains(
+ "Hello container_0_0001_01_000001!"));
+ assertTrue(sysOutStream.toString().contains(
+ "Hello container_0_0001_01_000002!"));
+ sysOutStream.reset();
+
+ exitCode =
+ cli.run(new String[] { "-applicationId", appId.toString(),
+ "-nodeAddress", nodeId.toString(), "-containerId",
+ containerId1.toString() });
+ assertTrue(exitCode == 0);
+ assertTrue(sysOutStream.toString().contains(
+ "Hello container_0_0001_01_000001!"));
+
+ fs.delete(new Path(remoteLogRootDir), true);
+ fs.delete(new Path(rootLogDir), true);
+ }
+
+ private static void createContainerLogInLocalDir(Path appLogsDir,
+ ContainerId containerId, FileSystem fs) throws Exception {
+ Path containerLogsDir = new Path(appLogsDir, containerId.toString());
+ if (fs.exists(containerLogsDir)) {
+ fs.delete(containerLogsDir, true);
+ }
+ assertTrue(fs.mkdirs(containerLogsDir));
+ Writer writer =
+ new FileWriter(new File(containerLogsDir.toString(), "sysout"));
+ writer.write("Hello " + containerId + "!");
+ writer.close();
+ }
+
+ private static void uploadContainerLogIntoRemoteDir(UserGroupInformation ugi,
+ Configuration configuration, List<String> rootLogDirs, NodeId nodeId,
+ ContainerId containerId, Path appDir, FileSystem fs) throws Exception {
+ Path path =
+ new Path(appDir, LogAggregationUtils.getNodeString(nodeId)
+ + System.currentTimeMillis());
+ AggregatedLogFormat.LogWriter writer =
+ new AggregatedLogFormat.LogWriter(configuration, path, ugi);
+ writer.writeApplicationOwner(ugi.getUserName());
+
+ Map<ApplicationAccessType, String> appAcls =
+ new HashMap<ApplicationAccessType, String>();
+ appAcls.put(ApplicationAccessType.VIEW_APP, ugi.getUserName());
+ writer.writeApplicationACLs(appAcls);
+ writer.append(new AggregatedLogFormat.LogKey(containerId),
+ new AggregatedLogFormat.LogValue(rootLogDirs, containerId,
+ UserGroupInformation.getCurrentUser().getShortUserName()));
+ writer.close();
+ }
+
private YarnClient createMockYarnClient(YarnApplicationState appState)
throws YarnException, IOException {
YarnClient mockClient = mock(YarnClient.class);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogAggregationUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogAggregationUtils.java
index fe4983e70b2b1..34c9100cc8ba7 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogAggregationUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogAggregationUtils.java
@@ -110,4 +110,9 @@ public static String getRemoteNodeLogDirSuffix(Configuration conf) {
public static String getNodeString(NodeId nodeId) {
return nodeId.toString().replace(":", "_");
}
+
+ @VisibleForTesting
+ public static String getNodeString(String nodeId) {
+ return nodeId.toString().replace(":", "_");
+ }
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java
index 3bafdb35438af..9efdef891d2e5 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java
@@ -52,19 +52,47 @@ public int dumpAContainersLogs(String appId, String containerId,
YarnConfiguration.NM_REMOTE_APP_LOG_DIR,
YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR));
String suffix = LogAggregationUtils.getRemoteNodeLogDirSuffix(getConf());
- Path logPath = LogAggregationUtils.getRemoteNodeLogFileForApp(
+ Path remoteAppLogDir = LogAggregationUtils.getRemoteAppLogDir(
remoteRootLogDir, ConverterUtils.toApplicationId(appId), jobOwner,
- ConverterUtils.toNodeId(nodeId), suffix);
- AggregatedLogFormat.LogReader reader;
+ suffix);
+ RemoteIterator<FileStatus> nodeFiles;
try {
- reader = new AggregatedLogFormat.LogReader(getConf(), logPath);
- } catch (FileNotFoundException fnfe) {
- System.out.println("Logs not available at " + logPath.toString());
- System.out
- .println("Log aggregation has not completed or is not enabled.");
+ Path qualifiedLogDir =
+ FileContext.getFileContext(getConf()).makeQualified(
+ remoteAppLogDir);
+ nodeFiles =
+ FileContext.getFileContext(qualifiedLogDir.toUri(), getConf())
+ .listStatus(remoteAppLogDir);
+ } catch (FileNotFoundException fnf) {
+ logDirNotExist(remoteAppLogDir.toString());
+ return -1;
+ }
+ boolean foundContainerLogs = false;
+ while (nodeFiles.hasNext()) {
+ FileStatus thisNodeFile = nodeFiles.next();
+ String fileName = thisNodeFile.getPath().getName();
+ if (fileName.contains(LogAggregationUtils.getNodeString(nodeId))
+ && !fileName.endsWith(LogAggregationUtils.TMP_FILE_SUFFIX)) {
+ AggregatedLogFormat.LogReader reader = null;
+ try {
+ reader =
+ new AggregatedLogFormat.LogReader(getConf(),
+ thisNodeFile.getPath());
+ if (dumpAContainerLogs(containerId, reader, System.out) > -1) {
+ foundContainerLogs = true;
+ }
+ } finally {
+ if (reader != null) {
+ reader.close();
+ }
+ }
+ }
+ }
+ if (!foundContainerLogs) {
+ containerLogNotFound(containerId);
return -1;
}
- return dumpAContainerLogs(containerId, reader, System.out);
+ return 0;
}
@Private
@@ -81,8 +109,7 @@ public int dumpAContainerLogs(String containerIdStr,
}
if (valueStream == null) {
- System.out.println("Logs for container " + containerIdStr
- + " are not present in this log-file.");
+ containerLogNotFound(containerIdStr);
return -1;
}
@@ -114,42 +141,49 @@ public int dumpAllContainersLogs(ApplicationId appId, String appOwner,
nodeFiles = FileContext.getFileContext(qualifiedLogDir.toUri(),
getConf()).listStatus(remoteAppLogDir);
} catch (FileNotFoundException fnf) {
- System.out.println("Logs not available at " + remoteAppLogDir.toString());
- System.out
- .println("Log aggregation has not completed or is not enabled.");
+ logDirNotExist(remoteAppLogDir.toString());
return -1;
}
+ boolean foundAnyLogs = false;
while (nodeFiles.hasNext()) {
FileStatus thisNodeFile = nodeFiles.next();
- AggregatedLogFormat.LogReader reader = new AggregatedLogFormat.LogReader(
- getConf(), new Path(remoteAppLogDir, thisNodeFile.getPath().getName()));
- try {
+ if (!thisNodeFile.getPath().getName()
+ .endsWith(LogAggregationUtils.TMP_FILE_SUFFIX)) {
+ AggregatedLogFormat.LogReader reader =
+ new AggregatedLogFormat.LogReader(getConf(), thisNodeFile.getPath());
+ try {
+
+ DataInputStream valueStream;
+ LogKey key = new LogKey();
+ valueStream = reader.next(key);
- DataInputStream valueStream;
- LogKey key = new LogKey();
- valueStream = reader.next(key);
-
- while (valueStream != null) {
- String containerString = "\n\nContainer: " + key + " on "
- + thisNodeFile.getPath().getName();
- out.println(containerString);
- out.println(StringUtils.repeat("=", containerString.length()));
- while (true) {
- try {
- LogReader.readAContainerLogsForALogType(valueStream, out);
- } catch (EOFException eof) {
- break;
+ while (valueStream != null) {
+ String containerString =
+ "\n\nContainer: " + key + " on " + thisNodeFile.getPath().getName();
+ out.println(containerString);
+ out.println(StringUtils.repeat("=", containerString.length()));
+ while (true) {
+ try {
+ LogReader.readAContainerLogsForALogType(valueStream, out);
+ foundAnyLogs = true;
+ } catch (EOFException eof) {
+ break;
+ }
}
- }
- // Next container
- key = new LogKey();
- valueStream = reader.next(key);
+ // Next container
+ key = new LogKey();
+ valueStream = reader.next(key);
+ }
+ } finally {
+ reader.close();
}
- } finally {
- reader.close();
}
}
+ if (! foundAnyLogs) {
+ emptyLogDir(remoteAppLogDir.toString());
+ return -1;
+ }
return 0;
}
@@ -162,4 +196,18 @@ public void setConf(Configuration conf) {
public Configuration getConf() {
return this.conf;
}
+
+ private static void containerLogNotFound(String containerId) {
+ System.out.println("Logs for container " + containerId
+ + " are not present in this log-file.");
+ }
+
+ private static void logDirNotExist(String remoteAppLogDir) {
+ System.out.println(remoteAppLogDir + "does not exist.");
+ System.out.println("Log aggregation has not completed or is not enabled.");
+ }
+
+ private static void emptyLogDir(String remoteAppLogDir) {
+ System.out.println(remoteAppLogDir + "does not have any log files.");
+ }
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java
index 2b83e6941e4f8..16e635994b533 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java
@@ -30,7 +30,10 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileContext;
+import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ApplicationId;
@@ -59,113 +62,127 @@ public class AggregatedLogsBlock extends HtmlBlock {
@Override
protected void render(Block html) {
- AggregatedLogFormat.LogReader reader = null;
- try {
- ContainerId containerId = verifyAndGetContainerId(html);
- NodeId nodeId = verifyAndGetNodeId(html);
- String appOwner = verifyAndGetAppOwner(html);
- LogLimits logLimits = verifyAndGetLogLimits(html);
- if (containerId == null || nodeId == null || appOwner == null
- || appOwner.isEmpty() || logLimits == null) {
- return;
- }
-
- ApplicationId applicationId = containerId.getApplicationAttemptId()
- .getApplicationId();
- String logEntity = $(ENTITY_STRING);
- if (logEntity == null || logEntity.isEmpty()) {
- logEntity = containerId.toString();
- }
+ ContainerId containerId = verifyAndGetContainerId(html);
+ NodeId nodeId = verifyAndGetNodeId(html);
+ String appOwner = verifyAndGetAppOwner(html);
+ LogLimits logLimits = verifyAndGetLogLimits(html);
+ if (containerId == null || nodeId == null || appOwner == null
+ || appOwner.isEmpty() || logLimits == null) {
+ return;
+ }
- if (!conf.getBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED,
- YarnConfiguration.DEFAULT_LOG_AGGREGATION_ENABLED)) {
- html.h1()
- ._("Aggregation is not enabled. Try the nodemanager at " + nodeId)
- ._();
- return;
- }
+ ApplicationId applicationId = containerId.getApplicationAttemptId()
+ .getApplicationId();
+ String logEntity = $(ENTITY_STRING);
+ if (logEntity == null || logEntity.isEmpty()) {
+ logEntity = containerId.toString();
+ }
- Path remoteRootLogDir = new Path(conf.get(
- YarnConfiguration.NM_REMOTE_APP_LOG_DIR,
- YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR));
+ if (!conf.getBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED,
+ YarnConfiguration.DEFAULT_LOG_AGGREGATION_ENABLED)) {
+ html.h1()
+ ._("Aggregation is not enabled. Try the nodemanager at " + nodeId)
+ ._();
+ return;
+ }
- try {
- reader = new AggregatedLogFormat.LogReader(conf,
- LogAggregationUtils.getRemoteNodeLogFileForApp(remoteRootLogDir,
- applicationId, appOwner, nodeId,
- LogAggregationUtils.getRemoteNodeLogDirSuffix(conf)));
- } catch (FileNotFoundException e) {
- // ACLs not available till the log file is opened.
- html.h1()
- ._("Logs not available for " + logEntity
- + ". Aggregation may not be complete, "
- + "Check back later or try the nodemanager at " + nodeId)._();
- return;
- } catch (IOException e) {
- html.h1()._("Error getting logs for " + logEntity)._();
- LOG.error("Error getting logs for " + logEntity, e);
- return;
- }
+ Path remoteRootLogDir = new Path(conf.get(
+ YarnConfiguration.NM_REMOTE_APP_LOG_DIR,
+ YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR));
+ Path remoteAppDir = LogAggregationUtils.getRemoteAppLogDir(
+ remoteRootLogDir, applicationId, appOwner,
+ LogAggregationUtils.getRemoteNodeLogDirSuffix(conf));
+ RemoteIterator<FileStatus> nodeFiles;
+ try {
+ Path qualifiedLogDir =
+ FileContext.getFileContext(conf).makeQualified(
+ remoteAppDir);
+ nodeFiles =
+ FileContext.getFileContext(qualifiedLogDir.toUri(), conf)
+ .listStatus(remoteAppDir);
+ } catch (FileNotFoundException fnf) {
+ html.h1()
+ ._("Logs not available for " + logEntity
+ + ". Aggregation may not be complete, "
+ + "Check back later or try the nodemanager at " + nodeId)._();
+ return;
+ } catch (Exception ex) {
+ html.h1()
+ ._("Error getting logs at " + nodeId)._();
+ return;
+ }
- String owner = null;
- Map<ApplicationAccessType, String> appAcls = null;
- try {
- owner = reader.getApplicationOwner();
- appAcls = reader.getApplicationAcls();
- } catch (IOException e) {
- html.h1()._("Error getting logs for " + logEntity)._();
- LOG.error("Error getting logs for " + logEntity, e);
- return;
- }
- ApplicationACLsManager aclsManager = new ApplicationACLsManager(conf);
- aclsManager.addApplication(applicationId, appAcls);
+ boolean foundLog = false;
+ String desiredLogType = $(CONTAINER_LOG_TYPE);
+ try {
+ while (nodeFiles.hasNext()) {
+ AggregatedLogFormat.LogReader reader = null;
+ try {
+ FileStatus thisNodeFile = nodeFiles.next();
+ if (!thisNodeFile.getPath().getName()
+ .contains(LogAggregationUtils.getNodeString(nodeId))
+ || thisNodeFile.getPath().getName()
+ .endsWith(LogAggregationUtils.TMP_FILE_SUFFIX)) {
+ continue;
+ }
+ reader =
+ new AggregatedLogFormat.LogReader(conf, thisNodeFile.getPath());
+
+ String owner = null;
+ Map<ApplicationAccessType, String> appAcls = null;
+ try {
+ owner = reader.getApplicationOwner();
+ appAcls = reader.getApplicationAcls();
+ } catch (IOException e) {
+ LOG.error("Error getting logs for " + logEntity, e);
+ continue;
+ }
+ ApplicationACLsManager aclsManager = new ApplicationACLsManager(conf);
+ aclsManager.addApplication(applicationId, appAcls);
- String remoteUser = request().getRemoteUser();
- UserGroupInformation callerUGI = null;
- if (remoteUser != null) {
- callerUGI = UserGroupInformation.createRemoteUser(remoteUser);
- }
- if (callerUGI != null
- && !aclsManager.checkAccess(callerUGI,
+ String remoteUser = request().getRemoteUser();
+ UserGroupInformation callerUGI = null;
+ if (remoteUser != null) {
+ callerUGI = UserGroupInformation.createRemoteUser(remoteUser);
+ }
+ if (callerUGI != null && !aclsManager.checkAccess(callerUGI,
ApplicationAccessType.VIEW_APP, owner, applicationId)) {
- html.h1()
- ._("User [" + remoteUser
- + "] is not authorized to view the logs for " + logEntity)._();
- return;
- }
+ html.h1()
+ ._("User [" + remoteUser
+ + "] is not authorized to view the logs for " + logEntity
+ + " in log file [" + thisNodeFile.getPath().getName() + "]")._();
+ LOG.error("User [" + remoteUser
+ + "] is not authorized to view the logs for " + logEntity);
+ continue;
+ }
- String desiredLogType = $(CONTAINER_LOG_TYPE);
- try {
- AggregatedLogFormat.ContainerLogsReader logReader = reader
+ AggregatedLogFormat.ContainerLogsReader logReader = reader
.getContainerLogsReader(containerId);
- if (logReader == null) {
- html.h1()
- ._("Logs not available for " + logEntity
- + ". Could be caused by the rentention policy")._();
- return;
- }
-
- boolean foundLog = readContainerLogs(html, logReader, logLimits,
- desiredLogType);
-
- if (!foundLog) {
- if (desiredLogType.isEmpty()) {
- html.h1("No logs available for container " + containerId.toString());
- } else {
- html.h1("Unable to locate '" + desiredLogType
- + "' log for container " + containerId.toString());
+ if (logReader == null) {
+ continue;
}
- return;
+
+ foundLog = readContainerLogs(html, logReader, logLimits,
+ desiredLogType);
+ } catch (IOException ex) {
+ LOG.error("Error getting logs for " + logEntity, ex);
+ continue;
+ } finally {
+ if (reader != null)
+ reader.close();
}
- } catch (IOException e) {
- html.h1()._("Error getting logs for " + logEntity)._();
- LOG.error("Error getting logs for " + logEntity, e);
- return;
}
- } finally {
- if (reader != null) {
- reader.close();
+ if (!foundLog) {
+ if (desiredLogType.isEmpty()) {
+ html.h1("No logs available for container " + containerId.toString());
+ } else {
+ html.h1("Unable to locate '" + desiredLogType
+ + "' log for container " + containerId.toString());
+ }
}
+ } catch (IOException e) {
+ html.h1()._("Error getting logs for " + logEntity)._();
+ LOG.error("Error getting logs for " + logEntity, e);
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java
index 502d2dc2b584a..0a17433c44fca 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java
@@ -47,7 +47,6 @@
import org.apache.hadoop.yarn.webapp.view.BlockForTest;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import org.apache.hadoop.yarn.webapp.view.HtmlBlockForTest;
-import org.junit.Ignore;
import org.junit.Test;
import static org.mockito.Mockito.*;
@@ -149,10 +148,8 @@ public void testAggregatedLogsBlock() throws Exception {
}
/**
* Log files was deleted.
- * TODO: YARN-2582: fix log web ui for Long Running application
* @throws Exception
*/
- @Ignore
@Test
public void testNoLogs() throws Exception {
|
2a06ba4e96e97adaedaeea96f2a23c5d47b36059
|
restlet-framework-java
|
- Renamed DirectoryResource into- DirectoryServerResource to match its new super class name.--
|
p
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/modules/org.restlet/src/org/restlet/engine/local/DirectoryResource.java b/modules/org.restlet/src/org/restlet/engine/local/DirectoryServerResource.java
similarity index 99%
rename from modules/org.restlet/src/org/restlet/engine/local/DirectoryResource.java
rename to modules/org.restlet/src/org/restlet/engine/local/DirectoryServerResource.java
index 3efb783457..ebcd6aef19 100644
--- a/modules/org.restlet/src/org/restlet/engine/local/DirectoryResource.java
+++ b/modules/org.restlet/src/org/restlet/engine/local/DirectoryServerResource.java
@@ -66,7 +66,7 @@
* @author Jerome Louvel
* @author Thierry Boileau
*/
-public class DirectoryResource extends ServerResource {
+public class DirectoryServerResource extends ServerResource {
/**
* The local base name of the resource. For example, "foo.en" and
diff --git a/modules/org.restlet/src/org/restlet/resource/Directory.java b/modules/org.restlet/src/org/restlet/resource/Directory.java
index 6b4bdcc783..77923ca837 100644
--- a/modules/org.restlet/src/org/restlet/resource/Directory.java
+++ b/modules/org.restlet/src/org/restlet/resource/Directory.java
@@ -40,7 +40,7 @@
import org.restlet.data.MediaType;
import org.restlet.data.Reference;
import org.restlet.data.ReferenceList;
-import org.restlet.engine.local.DirectoryResource;
+import org.restlet.engine.local.DirectoryServerResource;
import org.restlet.engine.util.AlphaNumericComparator;
import org.restlet.engine.util.AlphabeticalComparator;
import org.restlet.representation.Representation;
@@ -138,7 +138,7 @@ public Directory(Context context, Reference rootLocalReference) {
this.listingAllowed = false;
this.modifiable = false;
this.negotiateContent = true;
- setTargetClass(DirectoryResource.class);
+ setTargetClass(DirectoryServerResource.class);
}
/**
|
6de7fd15b7b4f42263d847fe462552110c405bb3
|
ReactiveX-RxJava
|
Move last 6 remaining unit tests out.--
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/operators/OperationToFuture.java b/rxjava-core/src/main/java/rx/operators/OperationToFuture.java
index a3ecc49efe..d4433da9d6 100644
--- a/rxjava-core/src/main/java/rx/operators/OperationToFuture.java
+++ b/rxjava-core/src/main/java/rx/operators/OperationToFuture.java
@@ -15,9 +15,6 @@
*/
package rx.operators;
-import static org.junit.Assert.*;
-
-import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
@@ -25,13 +22,9 @@
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicReference;
-import org.junit.Test;
-
import rx.Observable;
-import rx.Observable.OnSubscribeFunc;
import rx.Observer;
import rx.Subscription;
-import rx.subscriptions.Subscriptions;
/**
* Returns a Future representing the single value emitted by an Observable.
@@ -136,52 +129,4 @@ private T getValue() throws ExecutionException {
}
- @Test
- public void testToFuture() throws InterruptedException, ExecutionException {
- Observable<String> obs = Observable.from("one");
- Future<String> f = toFuture(obs);
- assertEquals("one", f.get());
- }
-
- @Test
- public void testToFutureList() throws InterruptedException, ExecutionException {
- Observable<String> obs = Observable.from("one", "two", "three");
- Future<List<String>> f = toFuture(obs.toList());
- assertEquals("one", f.get().get(0));
- assertEquals("two", f.get().get(1));
- assertEquals("three", f.get().get(2));
- }
-
- @Test(expected = ExecutionException.class)
- public void testExceptionWithMoreThanOneElement() throws InterruptedException, ExecutionException {
- Observable<String> obs = Observable.from("one", "two");
- Future<String> f = toFuture(obs);
- assertEquals("one", f.get());
- // we expect an exception since there are more than 1 element
- }
-
- @Test
- public void testToFutureWithException() {
- Observable<String> obs = Observable.create(new OnSubscribeFunc<String>() {
-
- @Override
- public Subscription onSubscribe(Observer<? super String> observer) {
- observer.onNext("one");
- observer.onError(new TestException());
- return Subscriptions.empty();
- }
- });
-
- Future<String> f = toFuture(obs);
- try {
- f.get();
- fail("expected exception");
- } catch (Throwable e) {
- assertEquals(TestException.class, e.getCause().getClass());
- }
- }
-
- private static class TestException extends RuntimeException {
- private static final long serialVersionUID = 1L;
- }
}
diff --git a/rxjava-core/src/main/java/rx/operators/OperationToIterator.java b/rxjava-core/src/main/java/rx/operators/OperationToIterator.java
index 59debafcb9..2fcd51872e 100644
--- a/rxjava-core/src/main/java/rx/operators/OperationToIterator.java
+++ b/rxjava-core/src/main/java/rx/operators/OperationToIterator.java
@@ -15,20 +15,13 @@
*/
package rx.operators;
-import static org.junit.Assert.*;
-
import java.util.Iterator;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
-import org.junit.Test;
-
import rx.Notification;
import rx.Observable;
-import rx.Observable.OnSubscribeFunc;
import rx.Observer;
-import rx.Subscription;
-import rx.subscriptions.Subscriptions;
import rx.util.Exceptions;
/**
@@ -108,47 +101,4 @@ public void remove() {
};
}
- @Test
- public void testToIterator() {
- Observable<String> obs = Observable.from("one", "two", "three");
-
- Iterator<String> it = toIterator(obs);
-
- assertEquals(true, it.hasNext());
- assertEquals("one", it.next());
-
- assertEquals(true, it.hasNext());
- assertEquals("two", it.next());
-
- assertEquals(true, it.hasNext());
- assertEquals("three", it.next());
-
- assertEquals(false, it.hasNext());
-
- }
-
- @Test(expected = TestException.class)
- public void testToIteratorWithException() {
- Observable<String> obs = Observable.create(new OnSubscribeFunc<String>() {
-
- @Override
- public Subscription onSubscribe(Observer<? super String> observer) {
- observer.onNext("one");
- observer.onError(new TestException());
- return Subscriptions.empty();
- }
- });
-
- Iterator<String> it = toIterator(obs);
-
- assertEquals(true, it.hasNext());
- assertEquals("one", it.next());
-
- assertEquals(true, it.hasNext());
- it.next();
- }
-
- private static class TestException extends RuntimeException {
- private static final long serialVersionUID = 1L;
- }
}
diff --git a/rxjava-core/src/test/java/rx/operators/OperationToFutureTest.java b/rxjava-core/src/test/java/rx/operators/OperationToFutureTest.java
new file mode 100644
index 0000000000..c18131e5c7
--- /dev/null
+++ b/rxjava-core/src/test/java/rx/operators/OperationToFutureTest.java
@@ -0,0 +1,68 @@
+package rx.operators;
+
+import static org.junit.Assert.*;
+import static rx.operators.OperationToFuture.*;
+
+import java.util.List;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
+
+import org.junit.Test;
+
+import rx.Observable;
+import rx.Observable.OnSubscribeFunc;
+import rx.Observer;
+import rx.Subscription;
+import rx.subscriptions.Subscriptions;
+
+public class OperationToFutureTest {
+
+ @Test
+ public void testToFuture() throws InterruptedException, ExecutionException {
+ Observable<String> obs = Observable.from("one");
+ Future<String> f = toFuture(obs);
+ assertEquals("one", f.get());
+ }
+
+ @Test
+ public void testToFutureList() throws InterruptedException, ExecutionException {
+ Observable<String> obs = Observable.from("one", "two", "three");
+ Future<List<String>> f = toFuture(obs.toList());
+ assertEquals("one", f.get().get(0));
+ assertEquals("two", f.get().get(1));
+ assertEquals("three", f.get().get(2));
+ }
+
+ @Test(expected = ExecutionException.class)
+ public void testExceptionWithMoreThanOneElement() throws InterruptedException, ExecutionException {
+ Observable<String> obs = Observable.from("one", "two");
+ Future<String> f = toFuture(obs);
+ assertEquals("one", f.get());
+ // we expect an exception since there are more than 1 element
+ }
+
+ @Test
+ public void testToFutureWithException() {
+ Observable<String> obs = Observable.create(new OnSubscribeFunc<String>() {
+
+ @Override
+ public Subscription onSubscribe(Observer<? super String> observer) {
+ observer.onNext("one");
+ observer.onError(new TestException());
+ return Subscriptions.empty();
+ }
+ });
+
+ Future<String> f = toFuture(obs);
+ try {
+ f.get();
+ fail("expected exception");
+ } catch (Throwable e) {
+ assertEquals(TestException.class, e.getCause().getClass());
+ }
+ }
+
+ private static class TestException extends RuntimeException {
+ private static final long serialVersionUID = 1L;
+ }
+}
diff --git a/rxjava-core/src/test/java/rx/operators/OperationToIteratorTest.java b/rxjava-core/src/test/java/rx/operators/OperationToIteratorTest.java
new file mode 100644
index 0000000000..1994bd4a0c
--- /dev/null
+++ b/rxjava-core/src/test/java/rx/operators/OperationToIteratorTest.java
@@ -0,0 +1,62 @@
+package rx.operators;
+
+import static org.junit.Assert.*;
+import static rx.operators.OperationToIterator.*;
+
+import java.util.Iterator;
+
+import org.junit.Test;
+
+import rx.Observable;
+import rx.Observable.OnSubscribeFunc;
+import rx.Observer;
+import rx.Subscription;
+import rx.subscriptions.Subscriptions;
+
+public class OperationToIteratorTest {
+
+ @Test
+ public void testToIterator() {
+ Observable<String> obs = Observable.from("one", "two", "three");
+
+ Iterator<String> it = toIterator(obs);
+
+ assertEquals(true, it.hasNext());
+ assertEquals("one", it.next());
+
+ assertEquals(true, it.hasNext());
+ assertEquals("two", it.next());
+
+ assertEquals(true, it.hasNext());
+ assertEquals("three", it.next());
+
+ assertEquals(false, it.hasNext());
+
+ }
+
+ @Test(expected = TestException.class)
+ public void testToIteratorWithException() {
+ Observable<String> obs = Observable.create(new OnSubscribeFunc<String>() {
+
+ @Override
+ public Subscription onSubscribe(Observer<? super String> observer) {
+ observer.onNext("one");
+ observer.onError(new TestException());
+ return Subscriptions.empty();
+ }
+ });
+
+ Iterator<String> it = toIterator(obs);
+
+ assertEquals(true, it.hasNext());
+ assertEquals("one", it.next());
+
+ assertEquals(true, it.hasNext());
+ it.next();
+ }
+
+ private static class TestException extends RuntimeException {
+ private static final long serialVersionUID = 1L;
+ }
+
+}
|
b6eb90370ad063bff5f74d4dc90632fe7ac6ccd3
|
hadoop
|
YARN-1555. Fixed test failures in- applicationhistoryservice.* (Vinod Kumar Vavilapalli via mayank) svn merge- --ignore-ancestry -c 1556753 ../YARN-321--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1562207 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index ac11f9c9b73ba..ca2b99d8a52c1 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -519,6 +519,9 @@ Branch YARN-321: Generic ApplicationHistoryService
YARN-1413. Implemented serving of aggregated-logs in the ApplicationHistory
server. (Mayank Bansal via vinodkv)
+ YARN-1555. Fixed test failures in applicationhistoryservice.* (Vinod Kumar
+ Vavilapalli via mayank)
+
Release 2.2.0 - 2013-10-13
INCOMPATIBLE CHANGES
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
index ca68eb63ca672..f90ae0946ef19 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
@@ -86,7 +86,7 @@ protected void writeContainerFinishData(ContainerId containerId)
throws IOException {
store.containerFinished(
ContainerFinishData.newInstance(containerId, 0, containerId.toString(),
- "http://localhost:0/", 0, ContainerState.COMPLETE));
+ "http://localhost:0/log", 0, ContainerState.COMPLETE));
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
index 3c6d69d2d6477..0ad48e262e73f 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
@@ -40,7 +40,7 @@ public void testStartStopServer() throws Exception {
Configuration config = new YarnConfiguration();
historyServer.init(config);
assertEquals(STATE.INITED, historyServer.getServiceState());
- assertEquals(3, historyServer.getServices().size());
+ assertEquals(2, historyServer.getServices().size());
ApplicationHistoryClientService historyService = historyServer
.getClientService();
assertNotNull(historyServer.getClientService());
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java
index 2de688139a384..c5633fb30cd8c 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java
@@ -287,7 +287,7 @@ public void testSingleContainer() throws Exception {
container.getString("assignedNodeId"));
assertEquals(Priority.newInstance(containerId.getId()).toString(),
container.getString("priority"));
- assertEquals("http://localhost:0/", container.getString("logUrl"));
+ assertEquals("http://localhost:0/log", container.getString("logUrl"));
assertEquals(ContainerState.COMPLETE.toString(),
container.getString("containerState"));
}
|
9cbf3e289bfad5b7aa2d235b6e696ccee56f299b
|
restlet-framework-java
|
- Fixed HTTPS issues in internal connector.--
|
c
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/modules/org.restlet.ext.ssl/src/org/restlet/ext/ssl/internal/SslConnection.java b/modules/org.restlet.ext.ssl/src/org/restlet/ext/ssl/internal/SslConnection.java
index 5ea1accecf..4746a2c5fb 100644
--- a/modules/org.restlet.ext.ssl/src/org/restlet/ext/ssl/internal/SslConnection.java
+++ b/modules/org.restlet.ext.ssl/src/org/restlet/ext/ssl/internal/SslConnection.java
@@ -269,6 +269,10 @@ public SSLSession getSslSession() {
private void handleSslHandshake() throws IOException {
HandshakeStatus hs = getSslHandshakeStatus();
+ if (getLogger().isLoggable(Level.FINER)) {
+ getLogger().log(Level.FINER, "Handling SSL handshake: " + hs);
+ }
+
if (hs != HandshakeStatus.NOT_HANDSHAKING) {
switch (getSslHandshakeStatus()) {
case FINISHED:
@@ -300,6 +304,11 @@ private void handleSslHandshake() throws IOException {
* @throws IOException
*/
public synchronized void handleSslResult() throws IOException {
+ if (getLogger().isLoggable(Level.FINER)) {
+ getLogger().log(Level.FINER,
+ "Handling SSL result: " + getSslEngineStatus());
+ }
+
switch (getSslEngineStatus()) {
case BUFFER_OVERFLOW:
if (getLogger().isLoggable(Level.FINER)) {
@@ -398,10 +407,8 @@ public void run() {
}
if (getLogger().isLoggable(Level.FINER)) {
- getLogger().log(
- Level.FINER,
- "Done running delegated tasks. "
- + SslConnection.this.toString());
+ getLogger().log(Level.FINER,
+ "Done running delegated tasks");
}
try {
@@ -410,6 +417,8 @@ public void run() {
getLogger().log(Level.INFO,
"Unable to handle SSL handshake", e);
}
+
+ getHelper().getController().wakeup();
}
});
}
|
3d7ec9ba69f2936d30b7ea34cb46369ac5e0de04
|
orientdb
|
Profiler: - all the metrics now have a- description and a type - supported new HTTP REST command /profiler/metadata- to retrieve all the metadata--
|
a
|
https://github.com/orientechnologies/orientdb
|
diff --git a/client/src/main/java/com/orientechnologies/orient/client/remote/OStorageRemote.java b/client/src/main/java/com/orientechnologies/orient/client/remote/OStorageRemote.java
index a396b241130..40e47de7a63 100644
--- a/client/src/main/java/com/orientechnologies/orient/client/remote/OStorageRemote.java
+++ b/client/src/main/java/com/orientechnologies/orient/client/remote/OStorageRemote.java
@@ -1588,8 +1588,11 @@ protected OChannelBinaryClient beginRequest(final byte iCommand) throws IOExcept
throw new OStorageException("Cannot acquire a connection because the thread has been interrupted");
}
- final long elapsed = Orient.instance().getProfiler()
- .stopChrono("system.network.connectionPool.waitingTime", startToWait);
+ final long elapsed = Orient
+ .instance()
+ .getProfiler()
+ .stopChrono("system.network.connectionPool.waitingTime", "Waiting for a free connection from the pool of channels",
+ startToWait);
if (debug)
System.out.println("Waiting for connection = elapsed: " + elapsed);
diff --git a/commons/src/main/java/com/orientechnologies/common/profiler/OProfiler.java b/commons/src/main/java/com/orientechnologies/common/profiler/OProfiler.java
index 0ee95c2dc8e..8cfa7cd3ab3 100644
--- a/commons/src/main/java/com/orientechnologies/common/profiler/OProfiler.java
+++ b/commons/src/main/java/com/orientechnologies/common/profiler/OProfiler.java
@@ -22,6 +22,7 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Map.Entry;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.ConcurrentHashMap;
@@ -40,20 +41,26 @@
* @copyrights Orient Technologies.com
*/
public class OProfiler extends OSharedResourceAbstract implements OProfilerMBean {
- protected long recordingFrom = -1;
- protected Map<OProfilerHookValue, String> hooks = new ConcurrentHashMap<OProfiler.OProfilerHookValue, String>();
- protected Date lastReset = new Date();
+ public enum METRIC_TYPE {
+ CHRONO, COUNTER, STAT, SIZE, ENABLED, TIMES, TEXT
+ }
+
+ protected long recordingFrom = -1;
+ protected Map<String, OProfilerHookValue> hooks = new ConcurrentHashMap<String, OProfilerHookValue>();
+ protected Date lastReset = new Date();
- protected OProfilerData realTime = new OProfilerData();
- protected OProfilerData lastSnapshot;
- protected List<OProfilerData> snapshots = new ArrayList<OProfilerData>();
- protected List<OProfilerData> summaries = new ArrayList<OProfilerData>();
+ protected ConcurrentHashMap<String, String> dictionary = new ConcurrentHashMap<String, String>();
+ protected ConcurrentHashMap<String, METRIC_TYPE> types = new ConcurrentHashMap<String, METRIC_TYPE>();
+ protected OProfilerData realTime = new OProfilerData();
+ protected OProfilerData lastSnapshot;
+ protected List<OProfilerData> snapshots = new ArrayList<OProfilerData>();
+ protected List<OProfilerData> summaries = new ArrayList<OProfilerData>();
- protected int elapsedToCreateSnapshot = 0;
- protected int maxSnapshots = 0;
- protected int maxSummaries = 0;
- protected final static Timer timer = new Timer(true);
- protected TimerTask archiverTask;
+ protected int elapsedToCreateSnapshot = 0;
+ protected int maxSnapshots = 0;
+ protected int maxSummaries = 0;
+ protected final static Timer timer = new Timer(true);
+ protected TimerTask archiverTask;
public interface OProfilerHookValue {
public Object getValue();
@@ -148,6 +155,8 @@ public void stopRecording() {
lastSnapshot = null;
realTime.clear();
+ dictionary.clear();
+ types.clear();
if (archiverTask != null)
archiverTask.cancel();
@@ -205,15 +214,17 @@ public void createSnapshot() {
}
}
- public void updateCounter(final String iStatName, final long iPlus) {
- if (iStatName == null || recordingFrom < 0)
+ public void updateCounter(final String iName, final String iDescription, final long iPlus) {
+ if (iName == null || recordingFrom < 0)
return;
+ updateMetadata(iName, iDescription, METRIC_TYPE.COUNTER);
+
acquireSharedLock();
try {
if (lastSnapshot != null)
- lastSnapshot.updateCounter(iStatName, iPlus);
- realTime.updateCounter(iStatName, iPlus);
+ lastSnapshot.updateCounter(iName, iPlus);
+ realTime.updateCounter(iName, iPlus);
} finally {
releaseSharedLock();
}
@@ -302,6 +313,31 @@ public String toJSON(final String iQuery, final String iFrom, final String iTo)
return buffer.toString();
}
+ public String metadataToJSON() {
+ final StringBuilder buffer = new StringBuilder();
+
+ buffer.append("{ \"metadata\": {\n ");
+ boolean first = true;
+ for (Entry<String, String> entry : dictionary.entrySet()) {
+ final String key = entry.getKey();
+
+ if (first)
+ first = false;
+ else
+ buffer.append(",\n ");
+ buffer.append('"');
+ buffer.append(key);
+ buffer.append("\":{\"description\":\"");
+ buffer.append(entry.getValue());
+ buffer.append("\",\"type\":\"");
+ buffer.append(types.get(key));
+ buffer.append("\"}");
+ }
+ buffer.append("} }");
+
+ return buffer.toString();
+ }
+
public String dump() {
final float maxMem = Runtime.getRuntime().maxMemory() / 1000000f;
final float totMem = Runtime.getRuntime().totalMemory() / 1000000f;
@@ -343,28 +379,36 @@ public long startChrono() {
return System.currentTimeMillis();
}
- public long stopChrono(final String iName, final long iStartTime) {
+ public long stopChrono(final String iName, final String iDescription, final long iStartTime) {
+ return stopChrono(iName, iDescription, iStartTime, null);
+ }
+
+ public long stopChrono(final String iName, final String iDescription, final long iStartTime, final String iPayload) {
// CHECK IF CHRONOS ARE ACTIVED
if (recordingFrom < 0)
return -1;
+ updateMetadata(iName, iDescription, METRIC_TYPE.CHRONO);
+
acquireSharedLock();
try {
if (lastSnapshot != null)
- lastSnapshot.stopChrono(iName, iStartTime);
- return realTime.stopChrono(iName, iStartTime);
+ lastSnapshot.stopChrono(iName, iStartTime, iPayload);
+ return realTime.stopChrono(iName, iStartTime, iPayload);
} finally {
releaseSharedLock();
}
}
- public long updateStat(final String iName, final long iValue) {
+ public long updateStat(final String iName, final String iDescription, final long iValue) {
// CHECK IF CHRONOS ARE ACTIVED
if (recordingFrom < 0)
return -1;
+ updateMetadata(iName, iDescription, METRIC_TYPE.STAT);
+
acquireSharedLock();
try {
@@ -426,19 +470,14 @@ public String dumpHookValues() {
buffer.append(String.format("\n%50s | Value |", "Name"));
buffer.append(String.format("\n%50s +-------------------------------------------------------------------+", ""));
- final List<String> names = new ArrayList<String>(hooks.values());
+ final List<String> names = new ArrayList<String>(hooks.keySet());
Collections.sort(names);
for (String k : names) {
- for (Map.Entry<OProfilerHookValue, String> v : hooks.entrySet()) {
- if (v.getValue().equals(k)) {
- final OProfilerHookValue hook = v.getKey();
- if (hook != null) {
- final Object hookValue = hook.getValue();
- buffer.append(String.format("\n%-50s | %-65s |", k, hookValue != null ? hookValue.toString() : "null"));
- }
- break;
- }
+ final OProfilerHookValue v = hooks.get(k);
+ if (v != null) {
+ final Object hookValue = v.getValue();
+ buffer.append(String.format("\n%-50s | %-65s |", k, hookValue != null ? hookValue.toString() : "null"));
}
}
@@ -451,14 +490,8 @@ public String dumpHookValues() {
}
public Object getHookValue(final String iName) {
- for (Map.Entry<OProfilerHookValue, String> v : hooks.entrySet()) {
- if (v.getValue().equals(iName)) {
- final OProfilerHookValue h = v.getKey();
- if (h != null)
- return h.getValue();
- }
- }
- return null;
+ final OProfilerHookValue v = hooks.get(iName);
+ return v != null ? v.getValue() : null;
}
public String[] getCountersAsString() {
@@ -519,21 +552,18 @@ public OProfilerEntry getChrono(final String iChronoName) {
}
}
- public void registerHookValue(final String iName, final OProfilerHookValue iHookValue) {
+ public void registerHookValue(final String iName, final String iDescription, final METRIC_TYPE iType,
+ final OProfilerHookValue iHookValue) {
unregisterHookValue(iName);
- hooks.put(iHookValue, iName);
+ updateMetadata(iName, iDescription, iType);
+ hooks.put(iName, iHookValue);
}
public void unregisterHookValue(final String iName) {
if (recordingFrom < 0)
return;
- for (Map.Entry<OProfilerHookValue, String> entry : hooks.entrySet()) {
- if (entry.getValue().equals(iName)) {
- hooks.remove(entry.getKey());
- break;
- }
- }
+ hooks.remove(iName);
}
public void setAutoDump(final int iSeconds) {
@@ -559,9 +589,17 @@ protected Map<String, Object> archiveHooks() {
final Map<String, Object> result = new HashMap<String, Object>();
- for (Map.Entry<OProfilerHookValue, String> v : hooks.entrySet())
- result.put(v.getValue(), v.getKey().getValue());
+ for (Map.Entry<String, OProfilerHookValue> v : hooks.entrySet())
+ result.put(v.getKey(), v.getValue().getValue());
return result;
}
+
+ /**
+ * Updates the metric metadata.
+ */
+ protected void updateMetadata(final String iName, final String iDescription, final METRIC_TYPE iType) {
+ if (dictionary.putIfAbsent(iName, iDescription) == null)
+ types.put(iName, iType);
+ }
}
diff --git a/commons/src/main/java/com/orientechnologies/common/profiler/OProfilerData.java b/commons/src/main/java/com/orientechnologies/common/profiler/OProfilerData.java
index d47b27c34cd..f5fbdfa0dc9 100644
--- a/commons/src/main/java/com/orientechnologies/common/profiler/OProfilerData.java
+++ b/commons/src/main/java/com/orientechnologies/common/profiler/OProfilerData.java
@@ -52,6 +52,8 @@ public class OProfilerEntry {
public long max = 0;
public long average = 0;
public long total = 0;
+ public String payLoad;
+ public String description;
public void toJSON(final StringBuilder buffer) {
buffer.append(String.format("\"%s\":{", name));
@@ -61,6 +63,8 @@ public void toJSON(final StringBuilder buffer) {
buffer.append(String.format("\"%s\":%d,", "max", max));
buffer.append(String.format("\"%s\":%d,", "average", average));
buffer.append(String.format("\"%s\":%d", "total", total));
+ if (payLoad != null)
+ buffer.append(String.format("\"%s\":%d", "payload", payLoad));
buffer.append("}");
}
@@ -275,8 +279,8 @@ public String dumpCounters() {
}
}
- public long stopChrono(final String iName, final long iStartTime) {
- return updateEntry(chronos, iName, System.currentTimeMillis() - iStartTime);
+ public long stopChrono(final String iName, final long iStartTime, final String iPayload) {
+ return updateEntry(chronos, iName, System.currentTimeMillis() - iStartTime, iPayload);
}
public String dumpChronos() {
@@ -284,7 +288,7 @@ public String dumpChronos() {
}
public long updateStat(final String iName, final long iValue) {
- return updateEntry(stats, iName, iValue);
+ return updateEntry(stats, iName, iValue, null);
}
public String dumpStats() {
@@ -417,7 +421,8 @@ public OProfilerEntry getChrono(final String iChronoName) {
}
}
- protected synchronized long updateEntry(final Map<String, OProfilerEntry> iValues, final String iName, final long iValue) {
+ protected synchronized long updateEntry(final Map<String, OProfilerEntry> iValues, final String iName, final long iValue,
+ final String iPayload) {
synchronized (iValues) {
OProfilerEntry c = iValues.get(iName);
@@ -428,6 +433,7 @@ protected synchronized long updateEntry(final Map<String, OProfilerEntry> iValue
}
c.name = iName;
+ c.payLoad = iPayload;
c.entries++;
c.last = iValue;
c.total += c.last;
diff --git a/commons/src/main/java/com/orientechnologies/common/profiler/OProfilerMBean.java b/commons/src/main/java/com/orientechnologies/common/profiler/OProfilerMBean.java
index 1dda7eb8795..3108b9f2004 100644
--- a/commons/src/main/java/com/orientechnologies/common/profiler/OProfilerMBean.java
+++ b/commons/src/main/java/com/orientechnologies/common/profiler/OProfilerMBean.java
@@ -19,7 +19,7 @@
public interface OProfilerMBean {
- public void updateCounter(String iStatName, long iPlus);
+ public void updateCounter(String iStatName, String iDescription, long iPlus);
public long getCounter(String iStatName);
@@ -29,7 +29,7 @@ public interface OProfilerMBean {
public long startChrono();
- public long stopChrono(String iName, long iStartTime);
+ public long stopChrono(String iName, String iDescription, long iStartTime);
public String dumpChronos();
diff --git a/core/src/main/java/com/orientechnologies/orient/core/cache/OAbstractRecordCache.java b/core/src/main/java/com/orientechnologies/orient/core/cache/OAbstractRecordCache.java
index ced33367b78..3a30f3525eb 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/cache/OAbstractRecordCache.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/cache/OAbstractRecordCache.java
@@ -18,6 +18,7 @@
import java.util.HashSet;
import java.util.Set;
+import com.orientechnologies.common.profiler.OProfiler.METRIC_TYPE;
import com.orientechnologies.common.profiler.OProfiler.OProfilerHookValue;
import com.orientechnologies.orient.core.Orient;
import com.orientechnologies.orient.core.id.ORID;
@@ -136,23 +137,29 @@ public int getMaxSize() {
public void startup() {
underlying.startup();
- Orient.instance().getProfiler().registerHookValue(profilerPrefix + "enabled", new OProfilerHookValue() {
- public Object getValue() {
- return isEnabled();
- }
- });
-
- Orient.instance().getProfiler().registerHookValue(profilerPrefix + "current", new OProfilerHookValue() {
- public Object getValue() {
- return getSize();
- }
- });
-
- Orient.instance().getProfiler().registerHookValue(profilerPrefix + "max", new OProfilerHookValue() {
- public Object getValue() {
- return getMaxSize();
- }
- });
+ Orient.instance().getProfiler()
+ .registerHookValue(profilerPrefix + "enabled", "Cache enabled", METRIC_TYPE.ENABLED, new OProfilerHookValue() {
+ public Object getValue() {
+ return isEnabled();
+ }
+ });
+
+ Orient.instance().getProfiler()
+ .registerHookValue(profilerPrefix + "current", "Number of entries in cache", METRIC_TYPE.SIZE, new OProfilerHookValue() {
+ public Object getValue() {
+ return getSize();
+ }
+ });
+
+ Orient
+ .instance()
+ .getProfiler()
+ .registerHookValue(profilerPrefix + "max", "Maximum number of entries in cache", METRIC_TYPE.SIZE,
+ new OProfilerHookValue() {
+ public Object getValue() {
+ return getMaxSize();
+ }
+ });
}
/**
diff --git a/core/src/main/java/com/orientechnologies/orient/core/cache/OLevel1RecordCache.java b/core/src/main/java/com/orientechnologies/orient/core/cache/OLevel1RecordCache.java
index dbaf91912a3..d93922eb43d 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/cache/OLevel1RecordCache.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/cache/OLevel1RecordCache.java
@@ -107,7 +107,10 @@ record = secondary.retrieveRecord(rid);
underlying.unlock(rid);
}
- Orient.instance().getProfiler().updateCounter(record != null ? CACHE_HIT : CACHE_MISS, 1L);
+ if (record != null)
+ Orient.instance().getProfiler().updateCounter(CACHE_HIT, "Record found in Level1 Cache", 1L);
+ else
+ Orient.instance().getProfiler().updateCounter(CACHE_MISS, "Record not found in Level1 Cache", 1L);
return record;
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/cache/OLevel2RecordCache.java b/core/src/main/java/com/orientechnologies/orient/core/cache/OLevel2RecordCache.java
index a5098822e1e..bbea14b51ac 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/cache/OLevel2RecordCache.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/cache/OLevel2RecordCache.java
@@ -107,7 +107,7 @@ protected ORecordInternal<?> retrieveRecord(final ORID iRID) {
record = underlying.remove(iRID);
if (record == null || record.isDirty()) {
- Orient.instance().getProfiler().updateCounter(CACHE_MISS, 1);
+ Orient.instance().getProfiler().updateCounter(CACHE_MISS, "Record not found in Level2 Cache", +1);
return null;
}
@@ -122,7 +122,7 @@ record = underlying.remove(iRID);
underlying.unlock(iRID);
}
- Orient.instance().getProfiler().updateCounter(CACHE_HIT, 1);
+ Orient.instance().getProfiler().updateCounter(CACHE_HIT, "Record found in Level2 Cache", +1);
return record;
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/db/document/ODatabaseDocumentTx.java b/core/src/main/java/com/orientechnologies/orient/core/db/document/ODatabaseDocumentTx.java
index a79c4dc9cd2..78bf420dea5 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/db/document/ODatabaseDocumentTx.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/db/document/ODatabaseDocumentTx.java
@@ -96,7 +96,7 @@ private void releaseIndexes(Collection<? extends OIndex<?>> indexesToRelease) {
}
@Override
- public void freeze(boolean throwException) {
+ public void freeze(final boolean throwException) {
if (!(getStorage() instanceof OStorageLocal)) {
OLogManager.instance().error(this,
"We can not freeze non local storage. " + "If you use remote client please use OServerAdmin instead.");
@@ -114,7 +114,7 @@ public void freeze(boolean throwException) {
super.freeze(throwException);
- Orient.instance().getProfiler().stopChrono("document.database.freeze", startTime);
+ Orient.instance().getProfiler().stopChrono("document.database.freeze", "Time to freeze the database", startTime);
}
@Override
@@ -136,7 +136,7 @@ public void freeze() {
super.freeze();
- Orient.instance().getProfiler().stopChrono("document.database.freeze", startTime);
+ Orient.instance().getProfiler().stopChrono("document.database.freeze", "Time to freeze the database", startTime);
}
@Override
@@ -154,7 +154,7 @@ public void release() {
Collection<? extends OIndex<?>> indexes = getMetadata().getIndexManager().getIndexes();
releaseIndexes(indexes);
- Orient.instance().getProfiler().stopChrono("document.database.release", startTime);
+ Orient.instance().getProfiler().stopChrono("document.database.release", "Time to release the database", startTime);
}
/**
@@ -363,7 +363,7 @@ public <RET extends ORecordInternal<?>> RET save(final ORecordInternal<?> iRecor
for (; i < clusterIds.length; ++i)
if (clusterIds[i] == id)
break;
-
+
if (i == clusterIds.length)
throw new IllegalArgumentException("Cluster name " + iClusterName + " is not configured to store the class "
+ doc.getClassName());
diff --git a/core/src/main/java/com/orientechnologies/orient/core/index/OIndexMVRBTreeAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/index/OIndexMVRBTreeAbstract.java
index 2723a820cd1..a045295f13e 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/index/OIndexMVRBTreeAbstract.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/index/OIndexMVRBTreeAbstract.java
@@ -32,6 +32,7 @@
import com.orientechnologies.common.concur.resource.OSharedResourceAdaptiveExternal;
import com.orientechnologies.common.listener.OProgressListener;
import com.orientechnologies.common.log.OLogManager;
+import com.orientechnologies.common.profiler.OProfiler.METRIC_TYPE;
import com.orientechnologies.common.profiler.OProfiler.OProfilerHookValue;
import com.orientechnologies.common.serialization.types.OBinarySerializer;
import com.orientechnologies.orient.core.Orient;
@@ -143,7 +144,8 @@ public OIndexInternal<?> create(final String iName, final OIndexDefinition iInde
iValueSerializer, indexDefinition.getTypes().length, maxUpdatesBeforeSave);
}
} else
- map = new OMVRBTreeDatabaseLazySave<Object, T>(iClusterIndexName, new OSimpleKeySerializer(), iValueSerializer, 1, maxUpdatesBeforeSave);
+ map = new OMVRBTreeDatabaseLazySave<Object, T>(iClusterIndexName, new OSimpleKeySerializer(), iValueSerializer, 1,
+ maxUpdatesBeforeSave);
installHooks(iDatabase);
@@ -720,7 +722,7 @@ protected void installHooks(final ODatabaseRecord iDatabase) {
final OJVMProfiler profiler = Orient.instance().getProfiler();
final String profilerPrefix = profiler.getDatabaseMetric(iDatabase.getName(), "index." + name + '.');
- profiler.registerHookValue(profilerPrefix + "items", new OProfilerHookValue() {
+ profiler.registerHookValue(profilerPrefix + "items", "Index size", METRIC_TYPE.SIZE, new OProfilerHookValue() {
public Object getValue() {
acquireSharedLock();
try {
@@ -731,23 +733,26 @@ public Object getValue() {
}
});
- profiler.registerHookValue(profilerPrefix + "entryPointSize", new OProfilerHookValue() {
- public Object getValue() {
- return map != null ? map.getEntryPointSize() : "-";
- }
- });
+ profiler.registerHookValue(profilerPrefix + "entryPointSize", "Number of entrypoints in an index", METRIC_TYPE.SIZE,
+ new OProfilerHookValue() {
+ public Object getValue() {
+ return map != null ? map.getEntryPointSize() : "-";
+ }
+ });
- profiler.registerHookValue(profilerPrefix + "maxUpdateBeforeSave", new OProfilerHookValue() {
- public Object getValue() {
- return map != null ? map.getMaxUpdatesBeforeSave() : "-";
- }
- });
+ profiler.registerHookValue(profilerPrefix + "maxUpdateBeforeSave", "Maximum number of updates in a index before force saving",
+ METRIC_TYPE.SIZE, new OProfilerHookValue() {
+ public Object getValue() {
+ return map != null ? map.getMaxUpdatesBeforeSave() : "-";
+ }
+ });
- profiler.registerHookValue(profilerPrefix + "optimizationThreshold", new OProfilerHookValue() {
- public Object getValue() {
- return map != null ? map.getOptimizeThreshold() : "-";
- }
- });
+ profiler.registerHookValue(profilerPrefix + "optimizationThreshold",
+ "Number of times as threshold to execute a background index optimization", METRIC_TYPE.SIZE, new OProfilerHookValue() {
+ public Object getValue() {
+ return map != null ? map.getOptimizeThreshold() : "-";
+ }
+ });
Orient.instance().getMemoryWatchDog().addListener(watchDog);
iDatabase.registerListener(this);
@@ -921,8 +926,7 @@ public String getDatabaseName() {
}
private int lazyUpdates() {
- return isAutomatic() ?
- OGlobalConfiguration.INDEX_AUTO_LAZY_UPDATES.getValueAsInteger() :
- OGlobalConfiguration.INDEX_MANUAL_LAZY_UPDATES.getValueAsInteger();
+ return isAutomatic() ? OGlobalConfiguration.INDEX_AUTO_LAZY_UPDATES.getValueAsInteger()
+ : OGlobalConfiguration.INDEX_MANUAL_LAZY_UPDATES.getValueAsInteger();
}
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/memory/OMemoryWatchDog.java b/core/src/main/java/com/orientechnologies/orient/core/memory/OMemoryWatchDog.java
index 92ef9f96c30..b575a75e142 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/memory/OMemoryWatchDog.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/memory/OMemoryWatchDog.java
@@ -23,6 +23,7 @@
import com.orientechnologies.common.io.OFileUtils;
import com.orientechnologies.common.log.OLogManager;
+import com.orientechnologies.common.profiler.OProfiler.METRIC_TYPE;
import com.orientechnologies.common.profiler.OProfiler.OProfilerHookValue;
import com.orientechnologies.orient.core.Orient;
@@ -63,11 +64,15 @@ public OMemoryWatchDog() {
}
public void run() {
- Orient.instance().getProfiler().registerHookValue("system.memory.alerts", new OProfilerHookValue() {
- public Object getValue() {
- return alertTimes;
- }
- });
+ Orient
+ .instance()
+ .getProfiler()
+ .registerHookValue("system.memory.alerts", "Number of alerts received by JVM to free memory resources",
+ METRIC_TYPE.COUNTER, new OProfilerHookValue() {
+ public Object getValue() {
+ return alertTimes;
+ }
+ });
while (true) {
try {
@@ -96,7 +101,7 @@ public Object getValue() {
}
}
- Orient.instance().getProfiler().stopChrono("OMemoryWatchDog.freeResources", timer);
+ Orient.instance().getProfiler().stopChrono("OMemoryWatchDog.freeResources", "WatchDog free resources", timer);
} catch (Exception e) {
} finally {
diff --git a/core/src/main/java/com/orientechnologies/orient/core/metadata/OMetadata.java b/core/src/main/java/com/orientechnologies/orient/core/metadata/OMetadata.java
index ce69fde9ab8..1bc3f51b6a0 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/metadata/OMetadata.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/metadata/OMetadata.java
@@ -66,7 +66,8 @@ public void load() {
if (schemaClusterId == -1 || getDatabase().countClusterElements(CLUSTER_INTERNAL_NAME) == 0)
return;
} finally {
- PROFILER.stopChrono(PROFILER.getDatabaseMetric(getDatabase().getName(), "metadata.load"), timer);
+ PROFILER.stopChrono(PROFILER.getDatabaseMetric(getDatabase().getName(), "metadata.load"), "Loading of database metadata",
+ timer);
}
}
@@ -168,7 +169,7 @@ public void reload() {
schema.reload();
indexManager.load();
security.load();
- //functionLibrary.load();
+ // functionLibrary.load();
}
/**
diff --git a/core/src/main/java/com/orientechnologies/orient/core/metadata/function/OFunction.java b/core/src/main/java/com/orientechnologies/orient/core/metadata/function/OFunction.java
index 87bd31aaeaa..962b0244455 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/metadata/function/OFunction.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/metadata/function/OFunction.java
@@ -151,8 +151,11 @@ public Object execute(final Map<Object, Object> iArgs) {
final Object result = command.execute(iArgs);
if (Orient.instance().getProfiler().isRecording())
- Orient.instance().getProfiler()
- .stopChrono("db." + ODatabaseRecordThreadLocal.INSTANCE.get().getName() + ".function.execute", start);
+ Orient
+ .instance()
+ .getProfiler()
+ .stopChrono("db." + ODatabaseRecordThreadLocal.INSTANCE.get().getName() + ".function.execute",
+ "Time to execute a function", start);
return result;
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/profiler/OJVMProfiler.java b/core/src/main/java/com/orientechnologies/orient/core/profiler/OJVMProfiler.java
index 3fbe0e18913..4850208431b 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/profiler/OJVMProfiler.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/profiler/OJVMProfiler.java
@@ -34,60 +34,64 @@ public class OJVMProfiler extends OProfiler implements OMemoryWatchDog.Listener
private final int metricProcessors = Runtime.getRuntime().availableProcessors();
public OJVMProfiler() {
- registerHookValue(getSystemMetric("config.cpus"), new OProfilerHookValue() {
+ registerHookValue(getSystemMetric("config.cpus"), "Number of CPUs", METRIC_TYPE.COUNTER, new OProfilerHookValue() {
@Override
public Object getValue() {
return metricProcessors;
}
});
- registerHookValue(getSystemMetric("config.os.name"), new OProfilerHookValue() {
+ registerHookValue(getSystemMetric("config.os.name"), "Operative System name", METRIC_TYPE.TEXT, new OProfilerHookValue() {
@Override
public Object getValue() {
return System.getProperty("os.name");
}
});
- registerHookValue(getSystemMetric("config.os.version"), new OProfilerHookValue() {
+ registerHookValue(getSystemMetric("config.os.version"), "Operative System version", METRIC_TYPE.TEXT, new OProfilerHookValue() {
@Override
public Object getValue() {
return System.getProperty("os.version");
}
});
- registerHookValue(getSystemMetric("config.os.arch"), new OProfilerHookValue() {
- @Override
- public Object getValue() {
- return System.getProperty("os.arch");
- }
- });
- registerHookValue(getSystemMetric("config.java.vendor"), new OProfilerHookValue() {
+ registerHookValue(getSystemMetric("config.os.arch"), "Operative System architecture", METRIC_TYPE.TEXT,
+ new OProfilerHookValue() {
+ @Override
+ public Object getValue() {
+ return System.getProperty("os.arch");
+ }
+ });
+ registerHookValue(getSystemMetric("config.java.vendor"), "Java vendor", METRIC_TYPE.TEXT, new OProfilerHookValue() {
@Override
public Object getValue() {
return System.getProperty("java.vendor");
}
});
- registerHookValue(getSystemMetric("config.java.version"), new OProfilerHookValue() {
+ registerHookValue(getSystemMetric("config.java.version"), "Java version", METRIC_TYPE.TEXT, new OProfilerHookValue() {
@Override
public Object getValue() {
return System.getProperty("java.version");
}
});
- registerHookValue(getProcessMetric("runtime.availableMemory"), new OProfilerHookValue() {
- @Override
- public Object getValue() {
- return Runtime.getRuntime().freeMemory();
- }
- });
- registerHookValue(getProcessMetric("runtime.maxMemory"), new OProfilerHookValue() {
- @Override
- public Object getValue() {
- return Runtime.getRuntime().maxMemory();
- }
- });
- registerHookValue(getProcessMetric("runtime.totalMemory"), new OProfilerHookValue() {
- @Override
- public Object getValue() {
- return Runtime.getRuntime().totalMemory();
- }
- });
+ registerHookValue(getProcessMetric("runtime.availableMemory"), "Available memory for the process", METRIC_TYPE.SIZE,
+ new OProfilerHookValue() {
+ @Override
+ public Object getValue() {
+ return Runtime.getRuntime().freeMemory();
+ }
+ });
+ registerHookValue(getProcessMetric("runtime.maxMemory"), "Maximum memory usable for the process", METRIC_TYPE.SIZE,
+ new OProfilerHookValue() {
+ @Override
+ public Object getValue() {
+ return Runtime.getRuntime().maxMemory();
+ }
+ });
+ registerHookValue(getProcessMetric("runtime.totalMemory"), "Total memory used by the process", METRIC_TYPE.SIZE,
+ new OProfilerHookValue() {
+ @Override
+ public Object getValue() {
+ return Runtime.getRuntime().totalMemory();
+ }
+ });
final File[] roots = File.listRoots();
for (final File root : roots) {
@@ -98,21 +102,21 @@ public Object getValue() {
final String metricPrefix = "system.disk." + volumeName;
- registerHookValue(metricPrefix + ".totalSpace", new OProfilerHookValue() {
+ registerHookValue(metricPrefix + ".totalSpace", "Total used disk space", METRIC_TYPE.SIZE, new OProfilerHookValue() {
@Override
public Object getValue() {
return root.getTotalSpace();
}
});
- registerHookValue(metricPrefix + ".freeSpace", new OProfilerHookValue() {
+ registerHookValue(metricPrefix + ".freeSpace", "Total free disk space", METRIC_TYPE.SIZE, new OProfilerHookValue() {
@Override
public Object getValue() {
return root.getFreeSpace();
}
});
- registerHookValue(metricPrefix + ".usableSpace", new OProfilerHookValue() {
+ registerHookValue(metricPrefix + ".usableSpace", "Total usable disk space", METRIC_TYPE.SIZE, new OProfilerHookValue() {
@Override
public Object getValue() {
return root.getUsableSpace();
diff --git a/core/src/main/java/com/orientechnologies/orient/core/serialization/OMemoryStream.java b/core/src/main/java/com/orientechnologies/orient/core/serialization/OMemoryStream.java
index 87b40c16aa4..852cda87ce9 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/serialization/OMemoryStream.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/serialization/OMemoryStream.java
@@ -19,6 +19,7 @@
import java.io.OutputStream;
import java.util.Arrays;
+import com.orientechnologies.common.profiler.OProfiler.METRIC_TYPE;
import com.orientechnologies.common.profiler.OProfiler.OProfilerHookValue;
import com.orientechnologies.common.util.OArrays;
import com.orientechnologies.orient.core.Orient;
@@ -39,11 +40,15 @@ public class OMemoryStream extends OutputStream {
private static long metricResize = 0;
static {
- Orient.instance().getProfiler().registerHookValue("system.memory.stream.resize", new OProfilerHookValue() {
- public Object getValue() {
- return metricResize;
- }
- });
+ Orient
+ .instance()
+ .getProfiler()
+ .registerHookValue("system.memory.stream.resize", "Number of resizes of memory stream buffer", METRIC_TYPE.COUNTER,
+ new OProfilerHookValue() {
+ public Object getValue() {
+ return metricResize;
+ }
+ });
}
public OMemoryStream() {
diff --git a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerCSVAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerCSVAbstract.java
index bd58e6814cd..0e319456138 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerCSVAbstract.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerCSVAbstract.java
@@ -245,7 +245,7 @@ public void fieldToStream(final ODocument iRecord, final StringBuilder iOutput,
if (link != null)
// OVERWRITE CONTENT
iRecord.field(iName, link);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.link2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.link2string"), "Serialize link to string", timer);
}
break;
}
@@ -255,7 +255,8 @@ public void fieldToStream(final ODocument iRecord, final StringBuilder iOutput,
if (iValue instanceof ORecordLazyList && ((ORecordLazyList) iValue).getStreamedContent() != null) {
iOutput.append(((ORecordLazyList) iValue).getStreamedContent());
- PROFILER.updateCounter(PROFILER.getProcessMetric("serializer.record.string.linkList2string.cached"), +1);
+ PROFILER.updateCounter(PROFILER.getProcessMetric("serializer.record.string.linkList2string.cached"),
+ "Serialize linklist to string in stream mode", +1);
} else {
final ORecordLazyList coll;
final Iterator<OIdentifiable> it;
@@ -273,7 +274,8 @@ public void fieldToStream(final ODocument iRecord, final StringBuilder iOutput,
if (coll.getStreamedContent() != null) {
// APPEND STREAMED CONTENT
iOutput.append(coll.getStreamedContent());
- PROFILER.updateCounter(PROFILER.getProcessMetric("serializer.record.string.linkList2string.cached"), +1);
+ PROFILER.updateCounter(PROFILER.getProcessMetric("serializer.record.string.linkList2string.cached"),
+ "Serialize linklist to string in stream mode", +1);
it = coll.newItemsIterator();
} else
it = coll.rawIterator();
@@ -302,7 +304,8 @@ public void fieldToStream(final ODocument iRecord, final StringBuilder iOutput,
}
iOutput.append(OStringSerializerHelper.COLLECTION_END);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.linkList2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.linkList2string"), "Serialize linklist to string",
+ timer);
break;
}
@@ -320,7 +323,8 @@ public void fieldToStream(final ODocument iRecord, final StringBuilder iOutput,
coll = (OMVRBTreeRIDSet) iValue;
linkSetToStream(iOutput, iRecord, coll);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.linkSet2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.linkSet2string"), "Serialize linkset to string",
+ timer);
break;
}
@@ -366,7 +370,8 @@ public void fieldToStream(final ODocument iRecord, final StringBuilder iOutput,
}
iOutput.append(OStringSerializerHelper.MAP_END);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.linkMap2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.linkMap2string"), "Serialize linkmap to string",
+ timer);
break;
}
@@ -377,22 +382,26 @@ public void fieldToStream(final ODocument iRecord, final StringBuilder iOutput,
iOutput.append(OStringSerializerHelper.EMBEDDED_END);
} else if (iValue != null)
iOutput.append(iValue.toString());
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.embed2string"), timer);
+ PROFILER
+ .stopChrono(PROFILER.getProcessMetric("serializer.record.string.embed2string"), "Serialize embedded to string", timer);
break;
case EMBEDDEDLIST:
embeddedCollectionToStream(null, iObjHandler, iOutput, iLinkedClass, iLinkedType, iValue, iMarshalledRecords, iSaveOnlyDirty);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.embedList2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.embedList2string"),
+ "Serialize embeddedlist to string", timer);
break;
case EMBEDDEDSET:
embeddedCollectionToStream(null, iObjHandler, iOutput, iLinkedClass, iLinkedType, iValue, iMarshalledRecords, iSaveOnlyDirty);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.embedSet2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.embedSet2string"), "Serialize embeddedset to string",
+ timer);
break;
case EMBEDDEDMAP: {
embeddedMapToStream(null, iObjHandler, iOutput, iLinkedClass, iLinkedType, iValue, iMarshalledRecords, iSaveOnlyDirty);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.embedMap2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.embedMap2string"), "Serialize embeddedmap to string",
+ timer);
break;
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerStringAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerStringAbstract.java
index 625a40922b3..e5cb2dd33c6 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerStringAbstract.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerStringAbstract.java
@@ -79,7 +79,8 @@ public ORecordInternal<?> fromStream(final byte[] iSource, final ORecordInternal
return fromString(OBinaryProtocol.bytes2string(iSource), iRecord, iFields);
} finally {
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.fromStream"), timer);
+ PROFILER
+ .stopChrono(PROFILER.getProcessMetric("serializer.record.string.fromStream"), "Deserialize record from stream", timer);
}
}
@@ -91,7 +92,7 @@ public byte[] toStream(final ORecordInternal<?> iRecord, boolean iOnlyDelta) {
OSerializationLongIdThreadLocal.INSTANCE.get(), iOnlyDelta, true).toString());
} finally {
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.toStream"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.toStream"), "Serialize record to stream", timer);
}
}
@@ -165,62 +166,64 @@ public static void fieldTypeToString(final StringBuilder iBuffer, OType iType, f
switch (iType) {
case STRING:
simpleValueToStream(iBuffer, iType, iValue);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.string2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.string2string"), "Serialize string to string", timer);
break;
case BOOLEAN:
simpleValueToStream(iBuffer, iType, iValue);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.bool2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.bool2string"), "Serialize boolean to string", timer);
break;
case INTEGER:
simpleValueToStream(iBuffer, iType, iValue);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.int2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.int2string"), "Serialize integer to string", timer);
break;
case FLOAT:
simpleValueToStream(iBuffer, iType, iValue);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.float2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.float2string"), "Serialize float to string", timer);
break;
case DECIMAL:
simpleValueToStream(iBuffer, iType, iValue);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.decimal2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.decimal2string"), "Serialize decimal to string",
+ timer);
break;
case LONG:
simpleValueToStream(iBuffer, iType, iValue);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.long2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.long2string"), "Serialize long to string", timer);
break;
case DOUBLE:
simpleValueToStream(iBuffer, iType, iValue);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.double2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.double2string"), "Serialize double to string", timer);
break;
case SHORT:
simpleValueToStream(iBuffer, iType, iValue);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.short2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.short2string"), "Serialize short to string", timer);
break;
case BYTE:
simpleValueToStream(iBuffer, iType, iValue);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.byte2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.byte2string"), "Serialize byte to string", timer);
break;
case BINARY:
simpleValueToStream(iBuffer, iType, iValue);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.binary2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.binary2string"), "Serialize binary to string", timer);
break;
case DATE:
simpleValueToStream(iBuffer, iType, iValue);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.date2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.date2string"), "Serialize date to string", timer);
break;
case DATETIME:
simpleValueToStream(iBuffer, iType, iValue);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.datetime2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.datetime2string"), "Serialize datetime to string",
+ timer);
break;
case LINK:
@@ -228,35 +231,39 @@ public static void fieldTypeToString(final StringBuilder iBuffer, OType iType, f
((ORecordId) iValue).toString(iBuffer);
else
((ORecord<?>) iValue).getIdentity().toString(iBuffer);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.link2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.link2string"), "Serialize link to string", timer);
break;
case EMBEDDEDSET:
ORecordSerializerSchemaAware2CSV.INSTANCE.embeddedCollectionToStream(ODatabaseRecordThreadLocal.INSTANCE.getIfDefined(),
null, iBuffer, null, null, iValue, null, true);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.embedSet2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.embedSet2string"), "Serialize embeddedset to string",
+ timer);
break;
case EMBEDDEDLIST:
ORecordSerializerSchemaAware2CSV.INSTANCE.embeddedCollectionToStream(ODatabaseRecordThreadLocal.INSTANCE.getIfDefined(),
null, iBuffer, null, null, iValue, null, true);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.embedList2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.embedList2string"),
+ "Serialize embeddedlist to string", timer);
break;
case EMBEDDEDMAP:
ORecordSerializerSchemaAware2CSV.INSTANCE.embeddedMapToStream(ODatabaseRecordThreadLocal.INSTANCE.getIfDefined(), null,
iBuffer, null, null, iValue, null, true);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.embedMap2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.embedMap2string"), "Serialize embeddedmap to string",
+ timer);
break;
case EMBEDDED:
OStringSerializerEmbedded.INSTANCE.toStream(iBuffer, iValue);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.embed2string"), timer);
+ PROFILER
+ .stopChrono(PROFILER.getProcessMetric("serializer.record.string.embed2string"), "Serialize embedded to string", timer);
break;
case CUSTOM:
OStringSerializerAnyStreamable.INSTANCE.toStream(iBuffer, iValue);
- PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.custom2string"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.custom2string"), "Serialize custom to string", timer);
break;
default:
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/OIndexProxy.java b/core/src/main/java/com/orientechnologies/orient/core/sql/OIndexProxy.java
index d943a5a3c75..877353beb90 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/sql/OIndexProxy.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/sql/OIndexProxy.java
@@ -361,13 +361,15 @@ private void updateStatistic(OIndex<?> index) {
final OJVMProfiler profiler = Orient.instance().getProfiler();
if (profiler.isRecording()) {
- Orient.instance().getProfiler().updateCounter(profiler.getDatabaseMetric(index.getDatabaseName(), "query.indexUsed"), 1);
+ Orient.instance().getProfiler()
+ .updateCounter(profiler.getDatabaseMetric(index.getDatabaseName(), "query.indexUsed"), "Used index in query", +1);
final int paramCount = index.getDefinition().getParamCount();
if (paramCount > 1) {
final String profiler_prefix = profiler.getDatabaseMetric(index.getDatabaseName(), "query.compositeIndexUsed");
- profiler.updateCounter(profiler_prefix, 1);
- profiler.updateCounter(profiler_prefix + "." + paramCount, 1);
+ profiler.updateCounter(profiler_prefix, "Used composite index in query", +1);
+ profiler.updateCounter(profiler_prefix + "." + paramCount, "Used composite index in query with " + paramCount + " params",
+ +1);
}
}
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperator.java b/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperator.java
index f569a69665a..b4b3ade7f36 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperator.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperator.java
@@ -212,15 +212,16 @@ protected void updateProfiler(final OCommandContext iContext, final OIndex<?> in
final OJVMProfiler profiler = Orient.instance().getProfiler();
if (profiler.isRecording()) {
- profiler.updateCounter(profiler.getDatabaseMetric(index.getDatabaseName(), "query.indexUsed"), +1);
+ profiler.updateCounter(profiler.getDatabaseMetric(index.getDatabaseName(), "query.indexUsed"), "Used index in query", +1);
int params = indexDefinition.getParamCount();
if (params > 1) {
final String profiler_prefix = profiler.getDatabaseMetric(index.getDatabaseName(), "query.compositeIndexUsed");
- profiler.updateCounter(profiler_prefix, 1);
- profiler.updateCounter(profiler_prefix + "." + params, 1);
- profiler.updateCounter(profiler_prefix + "." + params + '.' + keyParams.size(), 1);
+ profiler.updateCounter(profiler_prefix, "Used composite index in query", +1);
+ profiler.updateCounter(profiler_prefix + "." + params, "Used composite index in query with " + params + " params", +1);
+ profiler.updateCounter(profiler_prefix + "." + params + '.' + keyParams.size(), "Used composite index in query with "
+ + params + " params and " + keyParams.size() + " keys", +1);
}
}
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/OStorageEmbedded.java b/core/src/main/java/com/orientechnologies/orient/core/storage/OStorageEmbedded.java
index 09c69c82fe1..20b2a420f59 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/storage/OStorageEmbedded.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/storage/OStorageEmbedded.java
@@ -100,8 +100,11 @@ public Object executeCommand(final OCommandRequestText iCommand, final OCommandE
throw new OCommandExecutionException("Error on execution of command: " + iCommand, e);
} finally {
- Orient.instance().getProfiler()
- .stopChrono("db." + ODatabaseRecordThreadLocal.INSTANCE.get().getName() + ".command." + iCommand.getText(), beginTime);
+ Orient
+ .instance()
+ .getProfiler()
+ .stopChrono("db." + ODatabaseRecordThreadLocal.INSTANCE.get().getName() + ".command." + iCommand.getText(),
+ "Execution of command", beginTime);
}
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/fs/OFileMMap.java b/core/src/main/java/com/orientechnologies/orient/core/storage/fs/OFileMMap.java
index 0bb4f0b43c1..1d7ab3d53c3 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/storage/fs/OFileMMap.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/storage/fs/OFileMMap.java
@@ -26,6 +26,7 @@
import com.orientechnologies.common.io.OFileUtils;
import com.orientechnologies.common.io.OIOException;
import com.orientechnologies.common.log.OLogManager;
+import com.orientechnologies.common.profiler.OProfiler.METRIC_TYPE;
import com.orientechnologies.common.profiler.OProfiler.OProfilerHookValue;
import com.orientechnologies.common.util.OByteBufferUtils;
import com.orientechnologies.orient.core.Orient;
@@ -61,21 +62,33 @@ public class OFileMMap extends OAbstractFile {
private static long metricNonPooledBufferUsed = 0;
static {
- Orient.instance().getProfiler().registerHookValue("system.file.mmap.pooledBufferCreated", new OProfilerHookValue() {
- public Object getValue() {
- return metricPooledBufferCreated;
- }
- });
- Orient.instance().getProfiler().registerHookValue("system.file.mmap.pooledBufferUsed", new OProfilerHookValue() {
- public Object getValue() {
- return metricPooledBufferUsed;
- }
- });
- Orient.instance().getProfiler().registerHookValue("system.file.mmap.nonPooledBufferUsed", new OProfilerHookValue() {
- public Object getValue() {
- return metricNonPooledBufferUsed;
- }
- });
+ Orient
+ .instance()
+ .getProfiler()
+ .registerHookValue("system.file.mmap.pooledBufferCreated", "Number of file buffers created", METRIC_TYPE.COUNTER,
+ new OProfilerHookValue() {
+ public Object getValue() {
+ return metricPooledBufferCreated;
+ }
+ });
+ Orient
+ .instance()
+ .getProfiler()
+ .registerHookValue("system.file.mmap.pooledBufferUsed", "Number of times a file buffers has been reused",
+ METRIC_TYPE.COUNTER, new OProfilerHookValue() {
+ public Object getValue() {
+ return metricPooledBufferUsed;
+ }
+ });
+ Orient
+ .instance()
+ .getProfiler()
+ .registerHookValue("system.file.mmap.nonPooledBufferUsed", "Number of times a file buffers has not been reused",
+ METRIC_TYPE.COUNTER, new OProfilerHookValue() {
+ public Object getValue() {
+ return metricNonPooledBufferUsed;
+ }
+ });
}
@Override
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/fs/OMMapBufferEntry.java b/core/src/main/java/com/orientechnologies/orient/core/storage/fs/OMMapBufferEntry.java
index d43703a7964..1d417a9e5b6 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/storage/fs/OMMapBufferEntry.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/storage/fs/OMMapBufferEntry.java
@@ -89,9 +89,9 @@ boolean flush() {
if (dirty)
OLogManager.instance().debug(this, "Cannot commit memory buffer to disk after %d retries", FORCE_RETRY);
else
- PROFILER.updateCounter(PROFILER.getProcessMetric("file.mmap.pagesCommitted"), +1);
+ PROFILER.updateCounter(PROFILER.getProcessMetric("file.mmap.pagesCommitted"), "Memory mapped pages committed to disk", +1);
- PROFILER.stopChrono(PROFILER.getProcessMetric("file.mmap.commitPages"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("file.mmap.commitPages"), "Commit memory mapped pages to disk", timer);
return !dirty;
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/fs/OMMapManagerNew.java b/core/src/main/java/com/orientechnologies/orient/core/storage/fs/OMMapManagerNew.java
index e721884e808..d1308807006 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/storage/fs/OMMapManagerNew.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/storage/fs/OMMapManagerNew.java
@@ -21,6 +21,7 @@
import java.util.concurrent.ConcurrentHashMap;
import com.orientechnologies.common.concur.lock.OLockManager;
+import com.orientechnologies.common.profiler.OProfiler.METRIC_TYPE;
import com.orientechnologies.common.profiler.OProfiler.OProfilerHookValue;
import com.orientechnologies.orient.core.Orient;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
@@ -48,16 +49,24 @@ public class OMMapManagerNew extends OMMapManagerAbstract implements OMMapManage
private long metricReusedPages = 0;
public void init() {
- Orient.instance().getProfiler().registerHookValue("system.file.mmap.mappedPages", new OProfilerHookValue() {
- public Object getValue() {
- return metricMappedPages;
- }
- });
- Orient.instance().getProfiler().registerHookValue("system.file.mmap.reusedPages", new OProfilerHookValue() {
- public Object getValue() {
- return metricReusedPages;
- }
- });
+ Orient
+ .instance()
+ .getProfiler()
+ .registerHookValue("system.file.mmap.mappedPages", "Number of memory mapped pages used", METRIC_TYPE.COUNTER,
+ new OProfilerHookValue() {
+ public Object getValue() {
+ return metricMappedPages;
+ }
+ });
+ Orient
+ .instance()
+ .getProfiler()
+ .registerHookValue("system.file.mmap.reusedPages", "Number of times memory mapped pages have been reused",
+ METRIC_TYPE.TIMES, new OProfilerHookValue() {
+ public Object getValue() {
+ return metricReusedPages;
+ }
+ });
}
/**
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/fs/OMMapManagerOld.java b/core/src/main/java/com/orientechnologies/orient/core/storage/fs/OMMapManagerOld.java
index 9a3930b157d..fca60dddd1b 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/storage/fs/OMMapManagerOld.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/storage/fs/OMMapManagerOld.java
@@ -29,6 +29,7 @@
import com.orientechnologies.common.io.OFileUtils;
import com.orientechnologies.common.io.OIOException;
import com.orientechnologies.common.log.OLogManager;
+import com.orientechnologies.common.profiler.OProfiler.METRIC_TYPE;
import com.orientechnologies.common.profiler.OProfiler.OProfilerHookValue;
import com.orientechnologies.orient.core.Orient;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
@@ -66,68 +67,110 @@ public void init() {
maxMemory = OGlobalConfiguration.FILE_MMAP_MAX_MEMORY.getValueAsLong();
setOverlapStrategy(OGlobalConfiguration.FILE_MMAP_OVERLAP_STRATEGY.getValueAsInteger());
- Orient.instance().getProfiler().registerHookValue("system.file.mmap.totalMemory", new OProfilerHookValue() {
- public Object getValue() {
- return totalMemory;
- }
- });
-
- Orient.instance().getProfiler().registerHookValue("system.file.mmap.maxMemory", new OProfilerHookValue() {
- public Object getValue() {
- return maxMemory;
- }
- });
-
- Orient.instance().getProfiler().registerHookValue("system.file.mmap.blockSize", new OProfilerHookValue() {
- public Object getValue() {
- return blockSize;
- }
- });
-
- Orient.instance().getProfiler().registerHookValue("system.file.mmap.blocks", new OProfilerHookValue() {
- public Object getValue() {
- lock.readLock().lock();
- try {
- return bufferPoolLRU.size();
- } finally {
- lock.readLock().unlock();
- }
- }
- });
-
- Orient.instance().getProfiler().registerHookValue("system.file.mmap.alloc.strategy", new OProfilerHookValue() {
- public Object getValue() {
- return lastStrategy;
- }
- });
-
- Orient.instance().getProfiler().registerHookValue("system.file.mmap.overlap.strategy", new OProfilerHookValue() {
- public Object getValue() {
- return overlapStrategy;
- }
- });
-
- Orient.instance().getProfiler().registerHookValue("system.file.mmap.usedChannel", new OProfilerHookValue() {
- public Object getValue() {
- return metricUsedChannel;
- }
- });
-
- Orient.instance().getProfiler().registerHookValue("system.file.mmap.reusedPagesBetweenLast", new OProfilerHookValue() {
- public Object getValue() {
- return metricReusedPagesBetweenLast;
- }
- });
- Orient.instance().getProfiler().registerHookValue("system.file.mmap.reusedPages", new OProfilerHookValue() {
- public Object getValue() {
- return metricReusedPages;
- }
- });
- Orient.instance().getProfiler().registerHookValue("system.file.mmap.overlappedPageUsingChannel", new OProfilerHookValue() {
- public Object getValue() {
- return metricOverlappedPageUsingChannel;
- }
- });
+ Orient
+ .instance()
+ .getProfiler()
+ .registerHookValue("system.file.mmap.totalMemory", "Total memory used by memory mapping", METRIC_TYPE.SIZE,
+ new OProfilerHookValue() {
+ public Object getValue() {
+ return totalMemory;
+ }
+ });
+
+ Orient
+ .instance()
+ .getProfiler()
+ .registerHookValue("system.file.mmap.maxMemory", "Maximum memory usable by memory mapping", METRIC_TYPE.SIZE,
+ new OProfilerHookValue() {
+ public Object getValue() {
+ return maxMemory;
+ }
+ });
+
+ Orient
+ .instance()
+ .getProfiler()
+ .registerHookValue("system.file.mmap.blockSize", "Total block size used for memory mapping", METRIC_TYPE.SIZE,
+ new OProfilerHookValue() {
+ public Object getValue() {
+ return blockSize;
+ }
+ });
+
+ Orient
+ .instance()
+ .getProfiler()
+ .registerHookValue("system.file.mmap.blocks", "Total memory used by memory mapping", METRIC_TYPE.COUNTER,
+ new OProfilerHookValue() {
+ public Object getValue() {
+ lock.readLock().lock();
+ try {
+ return bufferPoolLRU.size();
+ } finally {
+ lock.readLock().unlock();
+ }
+ }
+ });
+
+ Orient
+ .instance()
+ .getProfiler()
+ .registerHookValue("system.file.mmap.alloc.strategy", "Memory mapping allocation strategy", METRIC_TYPE.TEXT,
+ new OProfilerHookValue() {
+ public Object getValue() {
+ return lastStrategy;
+ }
+ });
+
+ Orient
+ .instance()
+ .getProfiler()
+ .registerHookValue("system.file.mmap.overlap.strategy", "Memory mapping overlapping strategy", METRIC_TYPE.TEXT,
+ new OProfilerHookValue() {
+ public Object getValue() {
+ return overlapStrategy;
+ }
+ });
+
+ Orient
+ .instance()
+ .getProfiler()
+ .registerHookValue("system.file.mmap.usedChannel",
+ "Number of times the memory mapping has been bypassed to use direct file channel", METRIC_TYPE.TIMES,
+ new OProfilerHookValue() {
+ public Object getValue() {
+ return metricUsedChannel;
+ }
+ });
+
+ Orient
+ .instance()
+ .getProfiler()
+ .registerHookValue("system.file.mmap.reusedPagesBetweenLast",
+ "Number of times a memory mapped page has been reused in short time", METRIC_TYPE.TIMES, new OProfilerHookValue() {
+ public Object getValue() {
+ return metricReusedPagesBetweenLast;
+ }
+ });
+ Orient
+ .instance()
+ .getProfiler()
+ .registerHookValue("system.file.mmap.reusedPages", "Number of times a memory mapped page has been reused",
+ METRIC_TYPE.TIMES, new OProfilerHookValue() {
+ public Object getValue() {
+ return metricReusedPages;
+ }
+ });
+ Orient
+ .instance()
+ .getProfiler()
+ .registerHookValue("system.file.mmap.overlappedPageUsingChannel",
+ "Number of times a direct file channel access has been used because overlapping", METRIC_TYPE.TIMES,
+ new OProfilerHookValue() {
+ public Object getValue() {
+ return metricOverlappedPageUsingChannel;
+ }
+ });
}
public OMMapBufferEntry[] acquire(final OFileMMap iFile, final long iBeginOffset, final int iSize,
@@ -479,7 +522,7 @@ private static OMMapBufferEntry mapBuffer(final OFileMMap iFile, final long iBeg
try {
return new OMMapBufferEntry(iFile, iFile.map(iBeginOffset, iSize), iBeginOffset, iSize);
} finally {
- Orient.instance().getProfiler().stopChrono("OMMapManager.loadPage", timer);
+ Orient.instance().getProfiler().stopChrono("OMMapManager.loadPage", "Load a memory mapped page in memory", timer);
}
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/ODataLocal.java b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/ODataLocal.java
index 119a9ef0ef2..97de75e7bc3 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/ODataLocal.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/ODataLocal.java
@@ -325,7 +325,7 @@ public long setRecord(final long iPosition, final ORecordId iRid, final byte[] i
// USE THE OLD SPACE SINCE SIZE ISN'T CHANGED
file.write(pos[1] + RECORD_FIX_SIZE, iContent);
- Orient.instance().getProfiler().updateCounter(PROFILER_UPDATE_REUSED_ALL, +1);
+ Orient.instance().getProfiler().updateCounter(PROFILER_UPDATE_REUSED_ALL, "", +1);
return iPosition;
} else if (recordSize - contentLength > RECORD_FIX_SIZE + 50) {
// USE THE OLD SPACE BUT UPDATE THE CURRENT SIZE. IT'S PREFEREABLE TO USE THE SAME INSTEAD OF FINDING A BEST SUITED FOR IT
@@ -335,7 +335,8 @@ public long setRecord(final long iPosition, final ORecordId iRid, final byte[] i
// CREATE A HOLE WITH THE DIFFERENCE OF SPACE
createHole(iPosition + RECORD_FIX_SIZE + contentLength, recordSize - contentLength - RECORD_FIX_SIZE);
- Orient.instance().getProfiler().updateCounter(PROFILER_UPDATE_REUSED_PARTIAL, +1);
+ Orient.instance().getProfiler()
+ .updateCounter(PROFILER_UPDATE_REUSED_PARTIAL, "Space reused partially in data segment during record update", +1);
} else {
// CREATE A HOLE FOR THE ENTIRE OLD RECORD
createHole(iPosition, recordSize);
@@ -344,7 +345,8 @@ public long setRecord(final long iPosition, final ORecordId iRid, final byte[] i
pos = getFreeSpace(contentLength + RECORD_FIX_SIZE);
writeRecord(pos, iRid.clusterId, iRid.clusterPosition, iContent);
- Orient.instance().getProfiler().updateCounter(PROFILER_UPDATE_NOT_REUSED, +1);
+ Orient.instance().getProfiler()
+ .updateCounter(PROFILER_UPDATE_NOT_REUSED, "Space not reused in data segment during record update", +1);
}
return getAbsolutePosition(pos);
@@ -430,7 +432,7 @@ private void createHole(final long iRecordOffset, final int iRecordSize) throws
final ODataHoleInfo closestHole = getCloserHole(iRecordOffset, iRecordSize, file, pos);
- Orient.instance().getProfiler().stopChrono(PROFILER_HOLE_FIND_CLOSER, timer);
+ Orient.instance().getProfiler().stopChrono(PROFILER_HOLE_FIND_CLOSER, "Time to find the closer hole in data segment", timer);
if (closestHole == null)
// CREATE A NEW ONE
@@ -461,7 +463,7 @@ else if (closestHole.dataOffset + closestHole.size == iRecordOffset) {
files[(int) pos[0]].writeInt(pos[1], holeSize * -1);
} finally {
- Orient.instance().getProfiler().stopChrono(PROFILER_HOLE_CREATE, timer);
+ Orient.instance().getProfiler().stopChrono(PROFILER_HOLE_CREATE, "Time to create the hole in data segment", timer);
}
}
@@ -564,7 +566,7 @@ else if (sizeMoved != item[1])
final long[] pos = getRelativePosition(holePositionOffset);
files[(int) pos[0]].writeInt(pos[1], holeSize * -1);
- Orient.instance().getProfiler().stopChrono(PROFILER_HOLE_CREATE, timer);
+ Orient.instance().getProfiler().stopChrono(PROFILER_HOLE_CREATE, "Time to create the hole in data segment", timer);
}
private ODataHoleInfo getCloserHole(final long iRecordOffset, final int iRecordSize, final OFile file, final long[] pos) {
@@ -631,7 +633,7 @@ private int moveRecord(final long iSourcePosition, final long iDestinationPositi
writeRecord(getRelativePosition(iDestinationPosition), clusterId, clusterPosition, content);
- Orient.instance().getProfiler().stopChrono(PROFILER_MOVE_RECORD, timer);
+ Orient.instance().getProfiler().stopChrono(PROFILER_MOVE_RECORD, "Time to move a chunk in data segment", timer);
return recordSize + RECORD_FIX_SIZE;
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/ODataLocalHole.java b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/ODataLocalHole.java
index 75df511bd6b..e096e3be1a7 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/ODataLocalHole.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/ODataLocalHole.java
@@ -126,7 +126,7 @@ public synchronized void createHole(final long iRecordOffset, final int iRecordS
file.writeLong(p, iRecordOffset);
file.writeInt(p + OBinaryProtocol.SIZE_LONG, iRecordSize);
- Orient.instance().getProfiler().stopChrono(PROFILER_DATA_HOLE_CREATE, timer);
+ Orient.instance().getProfiler().stopChrono(PROFILER_DATA_HOLE_CREATE, "Time to create a hole in data segment", timer);
}
public synchronized ODataHoleInfo getCloserHole(final long iHolePosition, final int iHoleSize, final long iLowerRange,
@@ -185,7 +185,8 @@ protected synchronized long popFirstAvailableHole(final int iRecordSize) throws
ODataHoleInfo hole = availableHolesBySize.get(cursor);
if (hole != null && hole.size == iRecordSize) {
// PERFECT MATCH: DELETE THE HOLE
- Orient.instance().getProfiler().stopChrono(PROFILER_DATA_RECYCLED_COMPLETE, timer);
+ Orient.instance().getProfiler()
+ .stopChrono(PROFILER_DATA_RECYCLED_COMPLETE, "Time to recycle the hole space completely in data segment", timer);
final long pos = hole.dataOffset;
deleteHole(hole.holeOffset);
return pos;
@@ -196,13 +197,15 @@ protected synchronized long popFirstAvailableHole(final int iRecordSize) throws
if (hole.size > iRecordSize + ODataLocal.RECORD_FIX_SIZE + 50) {
// GOOD MATCH SINCE THE HOLE IS BIG ENOUGH ALSO FOR ANOTHER RECORD: UPDATE THE HOLE WITH THE DIFFERENCE
final long pos = hole.dataOffset;
- Orient.instance().getProfiler().stopChrono(PROFILER_DATA_RECYCLED_PARTIAL, timer);
+ Orient.instance().getProfiler()
+ .stopChrono(PROFILER_DATA_RECYCLED_PARTIAL, "Time to recycle the hole space partially in data segment", timer);
updateHole(hole, hole.dataOffset + iRecordSize, hole.size - iRecordSize);
return pos;
}
}
- Orient.instance().getProfiler().stopChrono(PROFILER_DATA_RECYCLED_NOTFOUND, timer);
+ Orient.instance().getProfiler()
+ .stopChrono(PROFILER_DATA_RECYCLED_NOTFOUND, "Time to recycle a hole space in data segment, but without luck", timer);
return -1;
}
@@ -260,7 +263,8 @@ public synchronized void updateHole(final ODataHoleInfo iHole, final long iNewDa
if (sizeChanged)
file.writeInt(holePosition + OBinaryProtocol.SIZE_LONG, iNewRecordSize);
- Orient.instance().getProfiler().stopChrono(PROFILER_DATA_HOLE_UPDATE, timer);
+ Orient.instance().getProfiler()
+.stopChrono(PROFILER_DATA_HOLE_UPDATE, "Time to update a hole in data segment", timer);
}
/**
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OStorageLocal.java b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OStorageLocal.java
index c52f99f746e..a9f44d8614f 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OStorageLocal.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OStorageLocal.java
@@ -32,6 +32,7 @@
import com.orientechnologies.common.io.OFileUtils;
import com.orientechnologies.common.log.OLogManager;
import com.orientechnologies.common.parser.OSystemVariableResolver;
+import com.orientechnologies.common.profiler.OProfiler.METRIC_TYPE;
import com.orientechnologies.common.profiler.OProfiler.OProfilerHookValue;
import com.orientechnologies.common.util.OArrays;
import com.orientechnologies.orient.core.Orient;
@@ -205,7 +206,7 @@ public synchronized void open(final String iUserName, final String iUserPassword
} finally {
lock.releaseExclusiveLock();
- Orient.instance().getProfiler().stopChrono("db." + name + ".open", timer);
+ Orient.instance().getProfiler().stopChrono("db." + name + ".open", "Open a local database", timer);
}
}
@@ -288,7 +289,7 @@ public void create(final Map<String, Object> iProperties) {
} finally {
lock.releaseExclusiveLock();
- Orient.instance().getProfiler().stopChrono("db." + name + ".create", timer);
+ Orient.instance().getProfiler().stopChrono("db." + name + ".create", "Create a local database", timer);
}
}
@@ -345,7 +346,7 @@ public void close(final boolean iForce) {
} finally {
lock.releaseExclusiveLock();
- Orient.instance().getProfiler().stopChrono("db." + name + ".close", timer);
+ Orient.instance().getProfiler().stopChrono("db." + name + ".close", "Close a local database", timer);
}
}
@@ -420,7 +421,7 @@ public void delete() {
} finally {
lock.releaseExclusiveLock();
- Orient.instance().getProfiler().stopChrono("db." + name + ".delete", timer);
+ Orient.instance().getProfiler().stopChrono("db." + name + ".drop", "Drop a local database", timer);
}
}
@@ -694,7 +695,7 @@ public ODataLocal getDataSegmentById(final int iDataSegmentId) {
try {
if (iDataSegmentId >= dataSegments.length)
- throw new IllegalArgumentException("Data segment #" + iDataSegmentId + " does not exist in storage '" + name + "'");
+ throw new IllegalArgumentException("Data segment #" + iDataSegmentId + " does not exist in database '" + name + "'");
return dataSegments[iDataSegmentId];
@@ -716,7 +717,7 @@ public int getDataSegmentIdByName(final String iDataSegmentName) {
if (d != null && d.getName().equalsIgnoreCase(iDataSegmentName))
return d.getId();
}
- throw new IllegalArgumentException("Data segment '" + iDataSegmentName + "' does not exist in storage '" + name + "'");
+ throw new IllegalArgumentException("Data segment '" + iDataSegmentName + "' does not exist in database '" + name + "'");
} finally {
lock.releaseSharedLock();
@@ -823,7 +824,7 @@ public boolean dropCluster(final int iClusterId) {
if (iClusterId < 0 || iClusterId >= clusters.length)
throw new IllegalArgumentException("Cluster id '" + iClusterId + "' is outside the of range of configured clusters (0-"
- + (clusters.length - 1) + ") in storage '" + name + "'");
+ + (clusters.length - 1) + ") in database '" + name + "'");
final OCluster cluster = clusters[iClusterId];
if (cluster == null)
@@ -887,7 +888,7 @@ public long count(final int[] iClusterIds) {
for (int i = 0; i < iClusterIds.length; ++i) {
if (iClusterIds[i] >= clusters.length)
- throw new OConfigurationException("Cluster id " + iClusterIds[i] + " was not found in storage '" + name + "'");
+ throw new OConfigurationException("Cluster id " + iClusterIds[i] + " was not found in database '" + name + "'");
if (iClusterIds[i] > -1) {
final OCluster c = clusters[iClusterIds[i]];
@@ -922,7 +923,7 @@ public long[] getClusterDataRange(final int iClusterId) {
public long count(final int iClusterId) {
if (iClusterId == -1)
- throw new OStorageException("Cluster Id " + iClusterId + " is invalid in storage '" + name + "'");
+ throw new OStorageException("Cluster Id " + iClusterId + " is invalid in database '" + name + "'");
// COUNT PHYSICAL CLUSTER IF ANY
checkOpeness();
@@ -1174,7 +1175,7 @@ public void synch() {
} finally {
lock.releaseExclusiveLock();
- Orient.instance().getProfiler().stopChrono("db." + name + ".synch", timer);
+ Orient.instance().getProfiler().stopChrono("db." + name + ".synch", "Synch a local database", timer);
}
}
@@ -1199,7 +1200,7 @@ protected void synchRecordUpdate(final OCluster cluster, final OPhysicalPosition
} finally {
lock.releaseExclusiveLock();
- Orient.instance().getProfiler().stopChrono("db." + name + "record.synch", timer);
+ Orient.instance().getProfiler().stopChrono("db." + name + "record.synch", "Synch a record to local database", timer);
}
}
@@ -1324,7 +1325,7 @@ public OCluster getClusterByName(final String iClusterName) {
final OCluster cluster = clusterMap.get(iClusterName.toLowerCase());
if (cluster == null)
- throw new IllegalArgumentException("Cluster " + iClusterName + " does not exist in storage '" + name + "'");
+ throw new IllegalArgumentException("Cluster " + iClusterName + " does not exist in database '" + name + "'");
return cluster;
} finally {
@@ -1478,7 +1479,7 @@ private int registerCluster(final OCluster iCluster) throws IOException {
// CHECK FOR DUPLICATION OF NAMES
if (clusterMap.containsKey(iCluster.getName()))
throw new OConfigurationException("Cannot add segment '" + iCluster.getName()
- + "' because it is already registered in storage '" + name + "'");
+ + "' because it is already registered in database '" + name + "'");
// CREATE AND ADD THE NEW REF SEGMENT
clusterMap.put(iCluster.getName(), iCluster);
id = iCluster.getId();
@@ -1493,7 +1494,7 @@ private int registerCluster(final OCluster iCluster) throws IOException {
private void checkClusterSegmentIndexRange(final int iClusterId) {
if (iClusterId > clusters.length - 1)
- throw new IllegalArgumentException("Cluster segment #" + iClusterId + " does not exist in storage '" + name + "'");
+ throw new IllegalArgumentException("Cluster segment #" + iClusterId + " does not exist in database '" + name + "'");
}
protected OPhysicalPosition createRecord(final ODataLocal iDataSegment, final OCluster iClusterSegment, final byte[] iContent,
@@ -1555,7 +1556,7 @@ protected OPhysicalPosition createRecord(final ODataLocal iDataSegment, final OC
} finally {
lock.releaseExclusiveLock();
- Orient.instance().getProfiler().stopChrono(PROFILER_CREATE_RECORD, timer);
+ Orient.instance().getProfiler().stopChrono(PROFILER_CREATE_RECORD, "Create a record in local database", timer);
}
}
@@ -1600,7 +1601,8 @@ public void changeRecordIdentity(ORID originalId, ORID newId) {
} finally {
lock.releaseExclusiveLock();
- Orient.instance().getProfiler().stopChrono("db." + name + ".changeRecordIdentity", timer);
+ Orient.instance().getProfiler()
+ .stopChrono("db." + name + ".changeRecordIdentity", "Change the identity of a record in local database", timer);
}
}
@@ -1612,7 +1614,8 @@ public boolean isLHClustersAreUsed() {
@Override
protected ORawBuffer readRecord(final OCluster iClusterSegment, final ORecordId iRid, boolean iAtomicLock) {
if (iRid.clusterPosition < 0)
- throw new IllegalArgumentException("Cannot read record " + iRid + " since the position is invalid in storage '" + name + '\'');
+ throw new IllegalArgumentException("Cannot read record " + iRid + " since the position is invalid in database '" + name
+ + '\'');
// NOT FOUND: SEARCH IT IN THE STORAGE
final long timer = Orient.instance().getProfiler().startChrono();
@@ -1648,7 +1651,7 @@ protected ORawBuffer readRecord(final OCluster iClusterSegment, final ORecordId
if (iAtomicLock)
lock.releaseSharedLock();
- Orient.instance().getProfiler().stopChrono(PROFILER_READ_RECORD, timer);
+ Orient.instance().getProfiler().stopChrono(PROFILER_READ_RECORD, "Read a record from local database", timer);
}
}
@@ -1732,7 +1735,7 @@ protected OPhysicalPosition updateRecord(final OCluster iClusterSegment, final O
} finally {
lock.releaseExclusiveLock();
- Orient.instance().getProfiler().stopChrono(PROFILER_UPDATE_RECORD, timer);
+ Orient.instance().getProfiler().stopChrono(PROFILER_UPDATE_RECORD, "Update a record to local database", timer);
}
return null;
@@ -1777,23 +1780,31 @@ protected OPhysicalPosition deleteRecord(final OCluster iClusterSegment, final O
} finally {
lock.releaseExclusiveLock();
- Orient.instance().getProfiler().stopChrono(PROFILER_DELETE_RECORD, timer);
+ Orient.instance().getProfiler().stopChrono(PROFILER_DELETE_RECORD, "Delete a record from local database", timer);
}
return null;
}
private void installProfilerHooks() {
- Orient.instance().getProfiler().registerHookValue("db." + name + ".data.holes", new OProfilerHookValue() {
- public Object getValue() {
- return getHoles();
- }
- });
- Orient.instance().getProfiler().registerHookValue("db." + name + ".data.holeSize", new OProfilerHookValue() {
- public Object getValue() {
- return getHoleSize();
- }
- });
+ Orient
+ .instance()
+ .getProfiler()
+ .registerHookValue("db." + name + ".data.holes", "Number of the holes in local database", METRIC_TYPE.COUNTER,
+ new OProfilerHookValue() {
+ public Object getValue() {
+ return getHoles();
+ }
+ });
+ Orient
+ .instance()
+ .getProfiler()
+ .registerHookValue("db." + name + ".data.holeSize", "Size of the holes in local database", METRIC_TYPE.SIZE,
+ new OProfilerHookValue() {
+ public Object getValue() {
+ return getHoleSize();
+ }
+ });
}
private void formatMessage(final boolean iVerbose, final OCommandOutputListener iListener, final String iMessage,
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/memory/OStorageMemory.java b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/memory/OStorageMemory.java
index 621e7783ecd..8347cf10fd3 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/memory/OStorageMemory.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/memory/OStorageMemory.java
@@ -307,7 +307,7 @@ public OStorageOperationResult<OPhysicalPosition> createRecord(final int iDataSe
} finally {
lock.releaseSharedLock();
- Orient.instance().getProfiler().stopChrono(PROFILER_CREATE_RECORD, timer);
+ Orient.instance().getProfiler().stopChrono(PROFILER_CREATE_RECORD, "Create a record in memory database", timer);
}
}
@@ -350,7 +350,7 @@ protected ORawBuffer readRecord(final OCluster iClusterSegment, final ORecordId
} finally {
lock.releaseSharedLock();
- Orient.instance().getProfiler().stopChrono(PROFILER_READ_RECORD, timer);
+ Orient.instance().getProfiler().stopChrono(PROFILER_READ_RECORD, "Read a record from memory database", timer);
}
}
@@ -403,7 +403,7 @@ public OStorageOperationResult<Integer> updateRecord(final ORecordId iRid, final
} finally {
lock.releaseSharedLock();
- Orient.instance().getProfiler().stopChrono(PROFILER_UPDATE_RECORD, timer);
+ Orient.instance().getProfiler().stopChrono(PROFILER_UPDATE_RECORD, "Update a record to memory database", timer);
}
}
@@ -453,7 +453,7 @@ public OStorageOperationResult<Boolean> deleteRecord(final ORecordId iRid, final
} finally {
lock.releaseSharedLock();
- Orient.instance().getProfiler().stopChrono(PROFILER_DELETE_RECORD, timer);
+ Orient.instance().getProfiler().stopChrono(PROFILER_DELETE_RECORD, "Delete a record from memory database", timer);
}
}
@@ -707,7 +707,8 @@ public void changeRecordIdentity(ORID originalId, ORID newId) {
} finally {
lock.releaseExclusiveLock();
- Orient.instance().getProfiler().stopChrono("db." + name + ".changeRecordIdentity", timer);
+ Orient.instance().getProfiler()
+ .stopChrono("db." + name + ".changeRecordIdentity", "Change the identity of a record in memory database", timer);
}
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreePersistent.java b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreePersistent.java
index e71fd46de48..96035e85752 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreePersistent.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreePersistent.java
@@ -215,7 +215,7 @@ public void clear() {
} catch (IOException e) {
OLogManager.instance().error(this, "Error on deleting the tree: " + dataProvider, e, OStorageException.class);
} finally {
- PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.clear"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.clear"), "Clear a MVRBTree", timer);
}
}
@@ -245,7 +245,7 @@ public void unload() {
} catch (Exception e) {
OLogManager.instance().error(this, "Error on unload the tree: " + dataProvider, e, OStorageException.class);
} finally {
- PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.unload"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.unload"), "Unload a MVRBTree", timer);
}
}
@@ -371,7 +371,7 @@ public int optimize(final boolean iForce) {
checkTreeStructure(root);
}
- PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.optimize"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.optimize"), "Optimize a MVRBTree", timer);
if (OLogManager.instance().isDebugEnabled())
OLogManager.instance().debug(this, "Optimization completed in %d ms\n", System.currentTimeMillis() - timer);
@@ -446,7 +446,7 @@ public V put(final K key, final V value) {
return v;
} finally {
- PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.put"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.put"), "Put a value into a MVRBTree", timer);
}
}
@@ -461,7 +461,7 @@ public void putAll(final Map<? extends K, ? extends V> map) {
commitChanges();
} finally {
- PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.putAll"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.putAll"), "Put multiple values into a MVRBTree", timer);
}
}
@@ -488,7 +488,7 @@ public V remove(final Object key) {
}
}
} finally {
- PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.remove"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.remove"), "Remove a value from a MVRBTree", timer);
}
throw new OLowMemoryException("OMVRBTreePersistent.remove()");
@@ -536,7 +536,7 @@ public int commitChanges() {
} finally {
- PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.commitChanges"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.commitChanges"), "Commit pending changes to a MVRBTree", timer);
}
return totalCommitted;
@@ -570,7 +570,7 @@ public V get(final Object iKey) {
throw new OLowMemoryException("OMVRBTreePersistent.get()");
} finally {
- PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.get"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.get"), "Get a value from a MVRBTree", timer);
}
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeRID.java b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeRID.java
index 5d87526c468..934efb165f7 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeRID.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeRID.java
@@ -152,7 +152,7 @@ public void putAll(final Collection<OIdentifiable> coll) {
commitChanges();
} finally {
- PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.putAll"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.putAll"), "Put multiple values in a MVRBTreeRID", timer);
}
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeMapEntryProvider.java b/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeMapEntryProvider.java
index 01a1782f4d2..a1c655b27fb 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeMapEntryProvider.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeMapEntryProvider.java
@@ -66,7 +66,7 @@ public K getKeyAt(final int iIndex) {
K k = keys[iIndex];
if (k == null)
try {
- PROFILER.updateCounter(PROFILER.getProcessMetric("mvrbtree.entry.unserializeKey"), 1);
+ PROFILER.updateCounter(PROFILER.getProcessMetric("mvrbtree.entry.unserializeKey"), "Deserialize a MVRBTree entry key", 1);
k = (K) keyFromStream(iIndex);
@@ -87,7 +87,8 @@ public V getValueAt(final int iIndex) {
V v = values[iIndex];
if (v == null)
try {
- PROFILER.updateCounter(PROFILER.getProcessMetric("mvrbtree.entry.unserializeValue"), 1);
+ PROFILER.updateCounter(PROFILER.getProcessMetric("mvrbtree.entry.unserializeValue"), "Deserialize a MVRBTree entry value",
+ 1);
v = (V) valueFromStream(iIndex);
@@ -289,7 +290,7 @@ public OSerializableStream fromStream(byte[] iStream) throws OSerializationExcep
} catch (IOException e) {
throw new OSerializationException("Can not unmarshall tree node with id ", e);
} finally {
- PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.entry.fromStream"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.entry.fromStream"), "Deserialize a MVRBTree entry", timer);
}
}
@@ -307,7 +308,7 @@ public byte[] toStream() throws OSerializationException {
} catch (IOException e) {
throw new OSerializationException("Cannot marshall RB+Tree node", e);
} finally {
- PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.entry.toStream"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.entry.toStream"), "Serialize a MVRBTree entry", timer);
}
}
@@ -405,7 +406,7 @@ private int serializeBinaryValue(byte[] newBuffer, int offset, int i) {
final OBinarySerializer<V> valueSerializer = (OBinarySerializer<V>) ((OMVRBTreeMapProvider<K, V>) treeDataProvider).valueSerializer;
if (serializedValues[i] <= 0) {
- PROFILER.updateCounter(PROFILER.getProcessMetric("mvrbtree.entry.serializeValue"), 1);
+ PROFILER.updateCounter(PROFILER.getProcessMetric("mvrbtree.entry.serializeValue"), "Serialize a MVRBTree entry value", 1);
valueSerializer.serialize(values[i], newBuffer, offset);
offset += valueSerializer.getObjectSize(values[i]);
} else {
@@ -420,7 +421,7 @@ private int serializeBinaryValue(byte[] newBuffer, int offset, int i) {
private int serializeKey(byte[] newBuffer, int offset, int i) {
final OBinarySerializer<K> keySerializer = ((OMVRBTreeMapProvider<K, V>) treeDataProvider).keySerializer;
if (serializedKeys[i] <= 0) {
- PROFILER.updateCounter(PROFILER.getProcessMetric("mvrbtree.entry.serializeKey"), 1);
+ PROFILER.updateCounter(PROFILER.getProcessMetric("mvrbtree.entry.serializeKey"), "Serialize a MVRBTree entry key", 1);
keySerializer.serialize(keys[i], newBuffer, offset);
offset += keySerializer.getObjectSize(keys[i]);
} else {
@@ -468,12 +469,16 @@ private void toStreamUsingBinaryStreamSerializer() throws IOException {
}
final OMemoryStream outStream = new OMemoryStream(outBuffer);
- outStream.jump(offset);
+ try {
+ outStream.jump(offset);
- for (int i = 0; i < size; ++i)
- serializedValues[i] = outStream.set(serializeStreamValue(i));
+ for (int i = 0; i < size; ++i)
+ serializedValues[i] = outStream.set(serializeStreamValue(i));
- buffer = outStream.toByteArray();
+ buffer = outStream.toByteArray();
+ } finally {
+ outStream.close();
+ }
if (stream == null)
stream = new OMemoryStream(buffer);
@@ -546,7 +551,7 @@ private void fromStreamUsingBinaryStreamSerializer(final byte[] inBuffer) {
protected byte[] serializeStreamValue(final int iIndex) throws IOException {
if (serializedValues[iIndex] <= 0) {
// NEW OR MODIFIED: MARSHALL CONTENT
- PROFILER.updateCounter(PROFILER.getProcessMetric("mvrbtree.entry.serializeValue"), 1);
+ PROFILER.updateCounter(PROFILER.getProcessMetric("mvrbtree.entry.serializeValue"), "Serialize a MVRBTree entry value", 1);
return ((OMVRBTreeMapProvider<K, V>) treeDataProvider).valueSerializer.toStream(values[iIndex]);
}
// RETURN ORIGINAL CONTENT
@@ -569,38 +574,43 @@ protected Object valueFromStream(final int iIndex) throws IOException {
private byte[] convertIntoNewSerializationFormat(byte[] stream) throws IOException {
final OMemoryStream oldStream = new OMemoryStream(stream);
- int oldPageSize = oldStream.getAsInteger();
+ try {
+ int oldPageSize = oldStream.getAsInteger();
- ORecordId oldParentRid = new ORecordId().fromStream(oldStream.getAsByteArrayFixed(ORecordId.PERSISTENT_SIZE));
- ORecordId oldLeftRid = new ORecordId().fromStream(oldStream.getAsByteArrayFixed(ORecordId.PERSISTENT_SIZE));
- ORecordId oldRightRid = new ORecordId().fromStream(oldStream.getAsByteArrayFixed(ORecordId.PERSISTENT_SIZE));
+ ORecordId oldParentRid = new ORecordId().fromStream(oldStream.getAsByteArrayFixed(ORecordId.PERSISTENT_SIZE));
+ ORecordId oldLeftRid = new ORecordId().fromStream(oldStream.getAsByteArrayFixed(ORecordId.PERSISTENT_SIZE));
+ ORecordId oldRightRid = new ORecordId().fromStream(oldStream.getAsByteArrayFixed(ORecordId.PERSISTENT_SIZE));
- boolean oldColor = oldStream.getAsBoolean();
- int oldSize = oldStream.getAsInteger();
+ boolean oldColor = oldStream.getAsBoolean();
+ int oldSize = oldStream.getAsInteger();
- if (oldSize > oldPageSize)
- throw new OConfigurationException("Loaded index with page size set to " + oldPageSize + " while the loaded was built with: "
- + oldSize);
+ if (oldSize > oldPageSize)
+ throw new OConfigurationException("Loaded index with page size set to " + oldPageSize
+ + " while the loaded was built with: " + oldSize);
- K[] oldKeys = (K[]) new Object[oldPageSize];
- for (int i = 0; i < oldSize; ++i) {
- oldKeys[i] = (K) ((OMVRBTreeMapProvider<K, V>) treeDataProvider).streamKeySerializer.fromStream(oldStream.getAsByteArray());
- }
+ K[] oldKeys = (K[]) new Object[oldPageSize];
+ for (int i = 0; i < oldSize; ++i) {
+ oldKeys[i] = (K) ((OMVRBTreeMapProvider<K, V>) treeDataProvider).streamKeySerializer.fromStream(oldStream.getAsByteArray());
+ }
- V[] oldValues = (V[]) new Object[oldPageSize];
- for (int i = 0; i < oldSize; ++i) {
- oldValues[i] = (V) ((OMVRBTreeMapProvider<K, V>) treeDataProvider).valueSerializer.fromStream(oldStream.getAsByteArray());
- }
+ V[] oldValues = (V[]) new Object[oldPageSize];
+ for (int i = 0; i < oldSize; ++i) {
+ oldValues[i] = (V) ((OMVRBTreeMapProvider<K, V>) treeDataProvider).valueSerializer.fromStream(oldStream.getAsByteArray());
+ }
- byte[] result;
- if (((OMVRBTreeMapProvider<K, V>) treeDataProvider).valueSerializer instanceof OBinarySerializer)
- result = convertNewSerializationFormatBinarySerializer(oldSize, oldPageSize, oldParentRid, oldLeftRid, oldRightRid, oldColor,
- oldKeys, oldValues);
- else
- result = convertNewSerializationFormatStreamSerializer(oldSize, oldPageSize, oldParentRid, oldLeftRid, oldRightRid, oldColor,
- oldKeys, oldValues);
+ byte[] result;
+ if (((OMVRBTreeMapProvider<K, V>) treeDataProvider).valueSerializer instanceof OBinarySerializer)
+ result = convertNewSerializationFormatBinarySerializer(oldSize, oldPageSize, oldParentRid, oldLeftRid, oldRightRid,
+ oldColor, oldKeys, oldValues);
+ else
+ result = convertNewSerializationFormatStreamSerializer(oldSize, oldPageSize, oldParentRid, oldLeftRid, oldRightRid,
+ oldColor, oldKeys, oldValues);
+
+ return result;
- return result;
+ } finally {
+ oldStream.close();
+ }
}
private byte[] convertNewSerializationFormatBinarySerializer(int oldSize, int oldPageSize, ORecordId oldParentRid,
@@ -661,11 +671,16 @@ private byte[] convertNewSerializationFormatStreamSerializer(int oldSize, int ol
}
final OMemoryStream outStream = new OMemoryStream(outBuffer);
- outStream.jump(offset);
+ try {
+ outStream.jump(offset);
- for (int i = 0; i < oldSize; ++i)
- outStream.set(valueSerializer.toStream(oldValues[i]));
+ for (int i = 0; i < oldSize; ++i)
+ outStream.set(valueSerializer.toStream(oldValues[i]));
+
+ return outStream.toByteArray();
- return outStream.toByteArray();
+ } finally {
+ outStream.close();
+ }
}
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeMapProvider.java b/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeMapProvider.java
index 8441b2060d9..9a89f1a5db4 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeMapProvider.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeMapProvider.java
@@ -116,7 +116,7 @@ public byte[] toStream() throws OSerializationException {
return result;
} finally {
- profiler.stopChrono(profiler.getProcessMetric("mvrbtree.toStream"), timer);
+ profiler.stopChrono(profiler.getProcessMetric("mvrbtree.toStream"), "Serialize a MVRBTree", timer);
}
}
@@ -176,7 +176,7 @@ public OSerializableStream fromStream(final byte[] iStream) throws OSerializatio
OLogManager.instance().error(this, "Error on unmarshalling OMVRBTreeMapProvider object from record: %s", e,
OSerializationException.class, root);
} finally {
- profiler.stopChrono(profiler.getProcessMetric("mvrbtree.fromStream"), timer);
+ profiler.stopChrono(profiler.getProcessMetric("mvrbtree.fromStream"), "Deserialize a MVRBTree", timer);
}
return this;
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeRIDProvider.java b/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeRIDProvider.java
index 28bb5aa8d8a..9ad111e1bb0 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeRIDProvider.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/type/tree/provider/OMVRBTreeRIDProvider.java
@@ -161,7 +161,7 @@ public OStringBuilderSerializable toStream(final StringBuilder iBuffer) throws O
} finally {
marshalling = false;
- PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.toStream"), timer);
+ PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.toStream"), "Serialize a MVRBTreeRID", timer);
}
iBuffer.append(buffer);
diff --git a/enterprise/src/main/java/com/orientechnologies/orient/enterprise/channel/OChannel.java b/enterprise/src/main/java/com/orientechnologies/orient/enterprise/channel/OChannel.java
index e2092534510..472449d567f 100644
--- a/enterprise/src/main/java/com/orientechnologies/orient/enterprise/channel/OChannel.java
+++ b/enterprise/src/main/java/com/orientechnologies/orient/enterprise/channel/OChannel.java
@@ -22,6 +22,7 @@
import java.util.concurrent.atomic.AtomicLong;
import com.orientechnologies.common.concur.resource.OSharedResourceExternalTimeout;
+import com.orientechnologies.common.profiler.OProfiler.METRIC_TYPE;
import com.orientechnologies.common.profiler.OProfiler.OProfilerHookValue;
import com.orientechnologies.orient.core.Orient;
import com.orientechnologies.orient.core.config.OContextConfiguration;
@@ -54,21 +55,24 @@ public abstract class OChannel extends OSharedResourceExternalTimeout {
static {
final String profilerMetric = PROFILER.getProcessMetric("network.channel.binary");
- PROFILER.registerHookValue(profilerMetric + ".transmittedBytes", new OProfilerHookValue() {
- public Object getValue() {
- return metricGlobalTransmittedBytes.get();
- }
- });
- PROFILER.registerHookValue(profilerMetric + ".receivedBytes", new OProfilerHookValue() {
- public Object getValue() {
- return metricGlobalReceivedBytes.get();
- }
- });
- PROFILER.registerHookValue(profilerMetric + ".flushes", new OProfilerHookValue() {
- public Object getValue() {
- return metricGlobalFlushes.get();
- }
- });
+ PROFILER.registerHookValue(profilerMetric + ".transmittedBytes", "Bytes transmitted to all the network channels",
+ METRIC_TYPE.SIZE, new OProfilerHookValue() {
+ public Object getValue() {
+ return metricGlobalTransmittedBytes.get();
+ }
+ });
+ PROFILER.registerHookValue(profilerMetric + ".receivedBytes", "Bytes received from all the network channels", METRIC_TYPE.SIZE,
+ new OProfilerHookValue() {
+ public Object getValue() {
+ return metricGlobalReceivedBytes.get();
+ }
+ });
+ PROFILER.registerHookValue(profilerMetric + ".flushes", "Number of times the network channels have been flushed",
+ METRIC_TYPE.TIMES, new OProfilerHookValue() {
+ public Object getValue() {
+ return metricGlobalFlushes.get();
+ }
+ });
}
public OChannel(final Socket iSocket, final OContextConfiguration iConfig) throws IOException {
@@ -110,21 +114,24 @@ public void connected() {
profilerMetric = PROFILER.getProcessMetric("network.channel.binary." + socket.getRemoteSocketAddress().toString()
+ socket.getLocalPort() + "".replace('.', '_'));
- PROFILER.registerHookValue(profilerMetric + ".transmittedBytes", new OProfilerHookValue() {
- public Object getValue() {
- return metricTransmittedBytes;
- }
- });
- PROFILER.registerHookValue(profilerMetric + ".receivedBytes", new OProfilerHookValue() {
- public Object getValue() {
- return metricReceivedBytes;
- }
- });
- PROFILER.registerHookValue(profilerMetric + ".flushes", new OProfilerHookValue() {
- public Object getValue() {
- return metricFlushes;
- }
- });
+ PROFILER.registerHookValue(profilerMetric + ".transmittedBytes", "Bytes transmitted to a network channel", METRIC_TYPE.SIZE,
+ new OProfilerHookValue() {
+ public Object getValue() {
+ return metricTransmittedBytes;
+ }
+ });
+ PROFILER.registerHookValue(profilerMetric + ".receivedBytes", "Bytes received from a network channel", METRIC_TYPE.SIZE,
+ new OProfilerHookValue() {
+ public Object getValue() {
+ return metricReceivedBytes;
+ }
+ });
+ PROFILER.registerHookValue(profilerMetric + ".flushes", "Number of times the network channel has been flushed",
+ METRIC_TYPE.TIMES, new OProfilerHookValue() {
+ public Object getValue() {
+ return metricFlushes;
+ }
+ });
}
@Override
diff --git a/object/src/main/java/com/orientechnologies/orient/object/enhancement/OObjectEntitySerializer.java b/object/src/main/java/com/orientechnologies/orient/object/enhancement/OObjectEntitySerializer.java
index a2b9be9ee60..da9473f3c5d 100644
--- a/object/src/main/java/com/orientechnologies/orient/object/enhancement/OObjectEntitySerializer.java
+++ b/object/src/main/java/com/orientechnologies/orient/object/enhancement/OObjectEntitySerializer.java
@@ -1050,7 +1050,7 @@ else if (ver.getClass().equals(Object.class))
OObjectSerializationThreadLocal.INSTANCE.get().remove(identityRecord);
- Orient.instance().getProfiler().stopChrono("Object.toStream", timer);
+ Orient.instance().getProfiler().stopChrono("Object.toStream", "Serialize a POJO", timer);
return (T) iProxiedPojo;
}
diff --git a/object/src/main/java/com/orientechnologies/orient/object/serialization/OObjectSerializerHelper.java b/object/src/main/java/com/orientechnologies/orient/object/serialization/OObjectSerializerHelper.java
index 32f904dd251..bcb19e8e3e6 100644
--- a/object/src/main/java/com/orientechnologies/orient/object/serialization/OObjectSerializerHelper.java
+++ b/object/src/main/java/com/orientechnologies/orient/object/serialization/OObjectSerializerHelper.java
@@ -371,7 +371,7 @@ public static Object fromStream(final ODocument iRecord, final Object iPojo, fin
// CALL AFTER UNMARSHALLING
invokeCallback(iPojo, iRecord, OAfterDeserialization.class);
- Orient.instance().getProfiler().stopChrono("Object.fromStream", timer);
+ Orient.instance().getProfiler().stopChrono("Object.fromStream", "Deserialize object from stream", timer);
return iPojo;
}
@@ -637,7 +637,7 @@ else if (ver.getClass().equals(Object.class))
OSerializationThreadLocal.INSTANCE.get().remove(identityRecord);
- Orient.instance().getProfiler().stopChrono("Object.toStream", timer);
+ Orient.instance().getProfiler().stopChrono("Object.toStream", "Serialize object to stream", timer);
return iRecord;
}
diff --git a/server/src/main/java/com/orientechnologies/orient/server/OClientConnectionManager.java b/server/src/main/java/com/orientechnologies/orient/server/OClientConnectionManager.java
index ff8aef0444c..15bad387921 100644
--- a/server/src/main/java/com/orientechnologies/orient/server/OClientConnectionManager.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/OClientConnectionManager.java
@@ -28,6 +28,7 @@
import com.orientechnologies.common.concur.resource.OSharedResourceAbstract;
import com.orientechnologies.common.log.OLogManager;
+import com.orientechnologies.common.profiler.OProfiler.METRIC_TYPE;
import com.orientechnologies.common.profiler.OProfiler.OProfilerHookValue;
import com.orientechnologies.orient.core.Orient;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
@@ -74,11 +75,15 @@ public void run() {
}
}, delay, delay);
- Orient.instance().getProfiler().registerHookValue("server.connections.actives", new OProfilerHookValue() {
- public Object getValue() {
- return metricActiveConnections;
- }
- });
+ Orient
+ .instance()
+ .getProfiler()
+ .registerHookValue("server.connections.actives", "Number of active network connections", METRIC_TYPE.COUNTER,
+ new OProfilerHookValue() {
+ public Object getValue() {
+ return metricActiveConnections;
+ }
+ });
}
/**
diff --git a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/http/ONetworkProtocolHttpAbstract.java b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/http/ONetworkProtocolHttpAbstract.java
index e375956cad3..8faee9565e1 100644
--- a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/http/ONetworkProtocolHttpAbstract.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/http/ONetworkProtocolHttpAbstract.java
@@ -99,7 +99,7 @@ public void config(final OServer iServer, final Socket iSocket, final OContextCo
}
public void service() throws ONetworkProtocolException, IOException {
- Orient.instance().getProfiler().updateCounter("server.http." + listeningAddress + ".requests", +1);
+ Orient.instance().getProfiler().updateCounter("server.http." + listeningAddress + ".requests", "Execution of HTTP request", +1);
++connection.data.totalRequests;
connection.data.commandInfo = null;
@@ -518,23 +518,26 @@ protected void execute() throws Exception {
readAllContent(request);
} finally {
if (connection.data.lastCommandReceived > -1)
- Orient.instance().getProfiler()
- .stopChrono("server.http." + listeningAddress + ".request", connection.data.lastCommandReceived);
+ Orient
+ .instance()
+ .getProfiler()
+ .stopChrono("server.http." + listeningAddress + ".request", "Execution of HTTP request",
+ connection.data.lastCommandReceived);
}
}
protected void connectionClosed() {
- Orient.instance().getProfiler().updateCounter("server.http." + listeningAddress + ".closed", +1);
+ Orient.instance().getProfiler().updateCounter("server.http." + listeningAddress + ".closed", "Close HTTP connection", +1);
sendShutdown();
}
protected void timeout() {
- Orient.instance().getProfiler().updateCounter("server.http." + listeningAddress + ".timeout", +1);
+ Orient.instance().getProfiler().updateCounter("server.http." + listeningAddress + ".timeout", "Timeout of HTTP connection", +1);
sendShutdown();
}
protected void connectionError() {
- Orient.instance().getProfiler().updateCounter("server.http." + listeningAddress + ".error", +1);
+ Orient.instance().getProfiler().updateCounter("server.http." + listeningAddress + ".error", "Error on HTTP connection", +1);
sendShutdown();
}
diff --git a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/http/command/get/OServerCommandGetProfiler.java b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/http/command/get/OServerCommandGetProfiler.java
index 58559d70d1d..40c8b20b745 100644
--- a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/http/command/get/OServerCommandGetProfiler.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/http/command/get/OServerCommandGetProfiler.java
@@ -42,19 +42,24 @@ public boolean execute(final OHttpRequest iRequest, OHttpResponse iResponse) thr
final String command = parts[1];
if (command.equalsIgnoreCase("start")) {
Orient.instance().getProfiler().startRecording();
- iResponse.send(OHttpUtils.STATUS_OK_CODE, "OK", OHttpUtils.CONTENT_JSON, "Recording started", null);
+ iResponse.send(OHttpUtils.STATUS_OK_CODE, "OK", OHttpUtils.CONTENT_TEXT_PLAIN, "Recording started", null);
} else if (command.equalsIgnoreCase("stop")) {
Orient.instance().getProfiler().stopRecording();
- iResponse.send(OHttpUtils.STATUS_OK_CODE, "OK", OHttpUtils.CONTENT_JSON, "Recording stopped", null);
+ iResponse.send(OHttpUtils.STATUS_OK_CODE, "OK", OHttpUtils.CONTENT_TEXT_PLAIN, "Recording stopped", null);
} else if (command.equalsIgnoreCase("configure")) {
Orient.instance().getProfiler().configure(parts[2]);
- iResponse.send(OHttpUtils.STATUS_OK_CODE, "OK", OHttpUtils.CONTENT_JSON, "Profiler configured with: " + parts[2], null);
+ iResponse.send(OHttpUtils.STATUS_OK_CODE, "OK", OHttpUtils.CONTENT_TEXT_PLAIN, "Profiler configured with: " + parts[2],
+ null);
} else if (command.equalsIgnoreCase("status")) {
final String status = Orient.instance().getProfiler().isRecording() ? "on" : "off";
- iResponse.send(OHttpUtils.STATUS_OK_CODE, "OK", OHttpUtils.CONTENT_JSON, status, null);
+ iResponse.send(OHttpUtils.STATUS_OK_CODE, "OK", OHttpUtils.CONTENT_TEXT_PLAIN, status, null);
+
+ } else if (command.equalsIgnoreCase("metadata")) {
+ iResponse.send(OHttpUtils.STATUS_OK_CODE, "OK", OHttpUtils.CONTENT_JSON, Orient.instance().getProfiler().metadataToJSON(),
+ null);
} else {
final String from = parts.length > 2 ? parts[2] : null;
@@ -68,8 +73,7 @@ public boolean execute(final OHttpRequest iRequest, OHttpResponse iResponse) thr
}
} catch (Exception e) {
- iResponse.send(OHttpUtils.STATUS_BADREQ_CODE, OHttpUtils.STATUS_BADREQ_DESCRIPTION, OHttpUtils.CONTENT_TEXT_PLAIN, e,
- null);
+ iResponse.send(OHttpUtils.STATUS_BADREQ_CODE, OHttpUtils.STATUS_BADREQ_DESCRIPTION, OHttpUtils.CONTENT_TEXT_PLAIN, e, null);
}
return false;
}
|
be86c7d216202c91a0ba0a43cd7f89b968146d62
|
drools
|
JBRULES-1590: fixing problem with shadow proxy- cloning for collections--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@19783 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/test/java/org/drools/MockPersistentSet.java b/drools-compiler/src/test/java/org/drools/MockPersistentSet.java
new file mode 100644
index 00000000000..24d85fdb07e
--- /dev/null
+++ b/drools-compiler/src/test/java/org/drools/MockPersistentSet.java
@@ -0,0 +1,45 @@
+package org.drools;
+
+import java.util.AbstractSet;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Set;
+
+public class MockPersistentSet extends AbstractSet implements Set
+{
+
+ private Set set;
+
+ private boolean exception;
+
+ public MockPersistentSet()
+ {
+ exception = true;
+ set = new HashSet();
+ }
+
+ public MockPersistentSet(boolean exception)
+ {
+ this.exception = exception;
+ set = new HashSet();
+ }
+
+ public int size()
+ {
+ return set.size();
+ }
+
+ public Iterator iterator()
+ {
+ return set.iterator();
+ }
+
+ public boolean addAll(Collection c)
+ {
+ if (exception)
+ throw new MockPersistentSetException("error message like PersistentSet");
+ return set.addAll(c);
+ }
+
+}
diff --git a/drools-compiler/src/test/java/org/drools/MockPersistentSetException.java b/drools-compiler/src/test/java/org/drools/MockPersistentSetException.java
new file mode 100644
index 00000000000..2b0253fb0d1
--- /dev/null
+++ b/drools-compiler/src/test/java/org/drools/MockPersistentSetException.java
@@ -0,0 +1,9 @@
+package org.drools;
+
+public class MockPersistentSetException extends RuntimeException
+{
+ public MockPersistentSetException(String message)
+ {
+ super(message);
+ }
+}
diff --git a/drools-compiler/src/test/java/org/drools/ObjectWithSet.java b/drools-compiler/src/test/java/org/drools/ObjectWithSet.java
new file mode 100644
index 00000000000..806137f4393
--- /dev/null
+++ b/drools-compiler/src/test/java/org/drools/ObjectWithSet.java
@@ -0,0 +1,35 @@
+package org.drools;
+
+import java.util.Set;
+
+public class ObjectWithSet
+{
+ private Set set;
+
+ private String message;
+
+ public String getMessage()
+ {
+ return message;
+ }
+
+ public void setMessage(String message)
+ {
+ this.message = message;
+ }
+
+ public ObjectWithSet()
+ {
+ }
+
+ public Set getSet()
+ {
+ return set;
+ }
+
+ public void setSet(Set set)
+ {
+ this.set = set;
+ }
+
+}
diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java b/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java
index 023d759a24e..943a188f84b 100644
--- a/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java
+++ b/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java
@@ -54,6 +54,8 @@
import org.drools.IndexedNumber;
import org.drools.InsertedObject;
import org.drools.Message;
+import org.drools.MockPersistentSet;
+import org.drools.ObjectWithSet;
import org.drools.Order;
import org.drools.OrderItem;
import org.drools.OuterClass;
@@ -4418,6 +4420,38 @@ public void testShadowProxyOnCollections() throws Exception {
cheesery.getCheeses().get( 0 ) );
}
+ public void testShadowProxyOnCollections2() throws Exception {
+ final PackageBuilder builder = new PackageBuilder();
+ builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ShadowProxyOnCollections2.drl" ) ) );
+ final Package pkg = builder.getPackage();
+
+ final RuleBase ruleBase = getRuleBase();
+ ruleBase.addPackage( pkg );
+ final StatefulSession workingMemory = ruleBase.newStatefulSession();
+
+ final List results = new ArrayList();
+ workingMemory.setGlobal( "results",
+ results );
+
+ List list = new ArrayList();
+ list.add( "example1" );
+ list.add( "example2" );
+
+ MockPersistentSet mockPersistentSet = new MockPersistentSet( false );
+ mockPersistentSet.addAll( list );
+ org.drools.ObjectWithSet objectWithSet = new ObjectWithSet();
+ objectWithSet.setSet( mockPersistentSet );
+
+ workingMemory.insert( objectWithSet );
+
+ workingMemory.fireAllRules();
+
+ assertEquals( 1,
+ results.size() );
+ assertEquals( "show",
+ objectWithSet.getMessage() );
+ }
+
public void testQueryWithCollect() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_Query.drl" ) ) );
@@ -4848,30 +4882,31 @@ public void testModifyRetractAndModifyInsert() throws Exception {
public void testAlphaCompositeConstraints() throws Exception {
final PackageBuilder builder = new PackageBuilder();
- builder.addPackageFromDrl(new InputStreamReader(getClass()
- .getResourceAsStream("test_AlphaCompositeConstraints.drl")));
+ builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_AlphaCompositeConstraints.drl" ) ) );
final Package pkg = builder.getPackage();
final RuleBase ruleBase = getRuleBase();
- ruleBase.addPackage(pkg);
+ ruleBase.addPackage( pkg );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
final List list = new ArrayList();
- workingMemory.setGlobal("results", list);
+ workingMemory.setGlobal( "results",
+ list );
- Person bob = new Person( "bob", 30 );
+ Person bob = new Person( "bob",
+ 30 );
- workingMemory.insert(bob);
+ workingMemory.insert( bob );
workingMemory.fireAllRules();
- assertEquals( 1, list.size());
+ assertEquals( 1,
+ list.size() );
}
- public void testModifyBlock() throws Exception {
- final PackageBuilder builder = new PackageBuilder();
- builder.addPackageFromDrl(new InputStreamReader(getClass()
- .getResourceAsStream("test_ModifyBlock.drl")));
- final Package pkg = builder.getPackage();
+ public void testModifyBlock() throws Exception {
+ final PackageBuilder builder = new PackageBuilder();
+ builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_ModifyBlock.drl" ) ) );
+ final Package pkg = builder.getPackage();
final RuleBase ruleBase = getRuleBase();
ruleBase.addPackage( pkg );
final WorkingMemory workingMemory = ruleBase.newStatefulSession();
@@ -4919,7 +4954,7 @@ public void testJavaModifyBlock() throws Exception {
workingMemory.insert( new Cheese() );
workingMemory.insert( new OuterClass.InnerClass( 1 ) );
- workingMemory.fireAllRules( );
+ workingMemory.fireAllRules();
assertEquals( 2,
list.size() );
@@ -4928,7 +4963,7 @@ public void testJavaModifyBlock() throws Exception {
assertEquals( 31,
bob.getAge() );
assertEquals( 2,
- ((OuterClass.InnerClass)list.get( 1 )).getIntAttr() );
+ ((OuterClass.InnerClass) list.get( 1 )).getIntAttr() );
}
public void testOrCE() throws Exception {
diff --git a/drools-compiler/src/test/resources/org/drools/integrationtests/test_ShadowProxyOnCollections2.drl b/drools-compiler/src/test/resources/org/drools/integrationtests/test_ShadowProxyOnCollections2.drl
new file mode 100644
index 00000000000..22825890c28
--- /dev/null
+++ b/drools-compiler/src/test/resources/org/drools/integrationtests/test_ShadowProxyOnCollections2.drl
@@ -0,0 +1,14 @@
+package org.drools;
+
+import java.util.ArrayList;
+
+global java.util.List results;
+
+rule "shadow proxy on collections2"
+ when
+ obj:ObjectWithSet( set != null && set.empty == false )
+ then
+ obj.setMessage("show");
+ results.add( "OK" );
+end
+
diff --git a/drools-core/src/main/java/org/drools/util/ShadowProxyUtils.java b/drools-core/src/main/java/org/drools/util/ShadowProxyUtils.java
index eae3422325a..3475e97ce5d 100644
--- a/drools-core/src/main/java/org/drools/util/ShadowProxyUtils.java
+++ b/drools-core/src/main/java/org/drools/util/ShadowProxyUtils.java
@@ -53,6 +53,7 @@ public static Object cloneObject(Object original) {
} catch ( Exception e ) {
/* Failed to clone. Don't worry about it, and just return
* the original object. */
+ clone = null;
}
}
@@ -82,6 +83,7 @@ public static Object cloneObject(Object original) {
} catch ( Exception e ) {
/* Failed to clone. Don't worry about it, and just return
* the original object. */
+ clone = null;
}
}
|
ec559db68e1ad306b1ba97283fbda1074fa50eb0
|
hadoop
|
HDFS-1480. All replicas of a block can end up on- the same rack when some datanodes are decommissioning. Contributed by Todd- Lipcon.--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1160897 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs/CHANGES.txt
index ad1429fa53cae..35f58a5db075c 100644
--- a/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs/CHANGES.txt
@@ -989,6 +989,9 @@ Trunk (unreleased changes)
HDFS-2267. DataXceiver thread name incorrect while waiting on op during
keepalive. (todd)
+ HDFS-1480. All replicas of a block can end up on the same rack when
+ some datanodes are decommissioning. (todd)
+
BREAKDOWN OF HDFS-1073 SUBTASKS
HDFS-1521. Persist transaction ID on disk between NN restarts.
diff --git a/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java b/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
index 296ee98e801f9..081a60430c526 100644
--- a/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
+++ b/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
@@ -66,6 +66,8 @@
import org.apache.hadoop.net.Node;
import org.apache.hadoop.util.Daemon;
+import com.google.common.annotations.VisibleForTesting;
+
/**
* Keeps information related to the blocks stored in the Hadoop cluster.
* This class is a helper class for {@link FSNamesystem} and requires several
@@ -147,7 +149,8 @@ public long getExcessBlocksCount() {
// We also store pending replication-orders.
//
public final UnderReplicatedBlocks neededReplications = new UnderReplicatedBlocks();
- private final PendingReplicationBlocks pendingReplications;
+ @VisibleForTesting
+ final PendingReplicationBlocks pendingReplications;
/** The maximum number of replicas allowed for a block */
public final short maxReplication;
@@ -312,9 +315,14 @@ public void metaSave(PrintWriter out) {
for (Block block : neededReplications) {
List<DatanodeDescriptor> containingNodes =
new ArrayList<DatanodeDescriptor>();
+ List<DatanodeDescriptor> containingLiveReplicasNodes =
+ new ArrayList<DatanodeDescriptor>();
+
NumberReplicas numReplicas = new NumberReplicas();
// source node returned is not used
- chooseSourceDatanode(block, containingNodes, numReplicas);
+ chooseSourceDatanode(block, containingNodes,
+ containingLiveReplicasNodes, numReplicas);
+ assert containingLiveReplicasNodes.size() == numReplicas.liveReplicas();
int usableReplicas = numReplicas.liveReplicas() +
numReplicas.decommissionedReplicas();
@@ -993,9 +1001,10 @@ private List<List<Block>> chooseUnderReplicatedBlocks(int blocksToProcess) {
* @param priority a hint of its priority in the neededReplication queue
* @return if the block gets replicated or not
*/
- private boolean computeReplicationWorkForBlock(Block block, int priority) {
+ @VisibleForTesting
+ boolean computeReplicationWorkForBlock(Block block, int priority) {
int requiredReplication, numEffectiveReplicas;
- List<DatanodeDescriptor> containingNodes;
+ List<DatanodeDescriptor> containingNodes, liveReplicaNodes;
DatanodeDescriptor srcNode;
INodeFile fileINode = null;
int additionalReplRequired;
@@ -1016,11 +1025,14 @@ private boolean computeReplicationWorkForBlock(Block block, int priority) {
// get a source data-node
containingNodes = new ArrayList<DatanodeDescriptor>();
+ liveReplicaNodes = new ArrayList<DatanodeDescriptor>();
NumberReplicas numReplicas = new NumberReplicas();
- srcNode = chooseSourceDatanode(block, containingNodes, numReplicas);
+ srcNode = chooseSourceDatanode(
+ block, containingNodes, liveReplicaNodes, numReplicas);
if(srcNode == null) // block can not be replicated from any node
return false;
+ assert liveReplicaNodes.size() == numReplicas.liveReplicas();
// do not schedule more if enough replicas is already pending
numEffectiveReplicas = numReplicas.liveReplicas() +
pendingReplications.getNumReplicas(block);
@@ -1047,13 +1059,20 @@ private boolean computeReplicationWorkForBlock(Block block, int priority) {
} finally {
namesystem.writeUnlock();
}
+
+ // Exclude all of the containing nodes from being targets.
+ // This list includes decommissioning or corrupt nodes.
+ HashMap<Node, Node> excludedNodes = new HashMap<Node, Node>();
+ for (DatanodeDescriptor dn : containingNodes) {
+ excludedNodes.put(dn, dn);
+ }
// choose replication targets: NOT HOLDING THE GLOBAL LOCK
// It is costly to extract the filename for which chooseTargets is called,
// so for now we pass in the Inode itself.
DatanodeDescriptor targets[] =
blockplacement.chooseTarget(fileINode, additionalReplRequired,
- srcNode, containingNodes, block.getNumBytes());
+ srcNode, liveReplicaNodes, excludedNodes, block.getNumBytes());
if(targets.length == 0)
return false;
@@ -1182,8 +1201,10 @@ public DatanodeDescriptor[] chooseTarget(final String src,
private DatanodeDescriptor chooseSourceDatanode(
Block block,
List<DatanodeDescriptor> containingNodes,
+ List<DatanodeDescriptor> nodesContainingLiveReplicas,
NumberReplicas numReplicas) {
containingNodes.clear();
+ nodesContainingLiveReplicas.clear();
DatanodeDescriptor srcNode = null;
int live = 0;
int decommissioned = 0;
@@ -1202,6 +1223,7 @@ else if (node.isDecommissionInProgress() || node.isDecommissioned())
else if (excessBlocks != null && excessBlocks.contains(block)) {
excess++;
} else {
+ nodesContainingLiveReplicas.add(node);
live++;
}
containingNodes.add(node);
@@ -2049,7 +2071,8 @@ private long addBlock(Block block, List<BlockWithLocations> results) {
/**
* The given node is reporting that it received a certain block.
*/
- private void addBlock(DatanodeDescriptor node, Block block, String delHint)
+ @VisibleForTesting
+ void addBlock(DatanodeDescriptor node, Block block, String delHint)
throws IOException {
// decrement number of blocks scheduled to this datanode.
node.decBlocksScheduled();
diff --git a/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockPlacementPolicy.java b/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockPlacementPolicy.java
index e1e8c9ced946c..b333972a26221 100644
--- a/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockPlacementPolicy.java
+++ b/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockPlacementPolicy.java
@@ -127,9 +127,10 @@ DatanodeDescriptor[] chooseTarget(FSInodeInfo srcInode,
int numOfReplicas,
DatanodeDescriptor writer,
List<DatanodeDescriptor> chosenNodes,
+ HashMap<Node, Node> excludedNodes,
long blocksize) {
return chooseTarget(srcInode.getFullPathName(), numOfReplicas, writer,
- chosenNodes, blocksize);
+ chosenNodes, excludedNodes, blocksize);
}
/**
diff --git a/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockPlacementPolicyDefault.java b/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockPlacementPolicyDefault.java
index fcada7b562350..1b483a75373bf 100644
--- a/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockPlacementPolicyDefault.java
+++ b/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockPlacementPolicyDefault.java
@@ -102,16 +102,6 @@ public DatanodeDescriptor[] chooseTarget(String srcPath,
excludedNodes, blocksize);
}
- /** {@inheritDoc} */
- @Override
- public DatanodeDescriptor[] chooseTarget(FSInodeInfo srcInode,
- int numOfReplicas,
- DatanodeDescriptor writer,
- List<DatanodeDescriptor> chosenNodes,
- long blocksize) {
- return chooseTarget(numOfReplicas, writer, chosenNodes, false,
- null, blocksize);
- }
/** This is the implementation. */
DatanodeDescriptor[] chooseTarget(int numOfReplicas,
diff --git a/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockManager.java b/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockManager.java
new file mode 100644
index 0000000000000..e8193b56d5d52
--- /dev/null
+++ b/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockManager.java
@@ -0,0 +1,391 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.blockmanagement;
+
+import static org.junit.Assert.*;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map.Entry;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.HdfsConfiguration;
+import org.apache.hadoop.hdfs.protocol.Block;
+import org.apache.hadoop.hdfs.protocol.DatanodeID;
+import org.apache.hadoop.hdfs.protocol.FSConstants;
+import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo;
+import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
+import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
+import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor.BlockTargetPair;
+import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
+import org.apache.hadoop.hdfs.server.namenode.INodeFile;
+import org.apache.hadoop.net.NetworkTopology;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import com.google.common.base.Joiner;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.LinkedListMultimap;
+import com.google.common.collect.Lists;
+
+public class TestBlockManager {
+ private final List<DatanodeDescriptor> nodes = ImmutableList.of(
+ new DatanodeDescriptor(new DatanodeID("h1:5020"), "/rackA"),
+ new DatanodeDescriptor(new DatanodeID("h2:5020"), "/rackA"),
+ new DatanodeDescriptor(new DatanodeID("h3:5020"), "/rackA"),
+ new DatanodeDescriptor(new DatanodeID("h4:5020"), "/rackB"),
+ new DatanodeDescriptor(new DatanodeID("h5:5020"), "/rackB"),
+ new DatanodeDescriptor(new DatanodeID("h6:5020"), "/rackB")
+ );
+ private final List<DatanodeDescriptor> rackA = nodes.subList(0, 3);
+ private final List<DatanodeDescriptor> rackB = nodes.subList(3, 6);
+
+ /**
+ * Some of these tests exercise code which has some randomness involved -
+ * ie even if there's a bug, they may pass because the random node selection
+ * chooses the correct result.
+ *
+ * Since they're true unit tests and run quickly, we loop them a number
+ * of times trying to trigger the incorrect behavior.
+ */
+ private static final int NUM_TEST_ITERS = 30;
+
+ private static final int BLOCK_SIZE = 64*1024;
+
+ private Configuration conf;
+ private FSNamesystem fsn;
+ private BlockManager bm;
+
+ @Before
+ public void setupMockCluster() throws IOException {
+ conf = new HdfsConfiguration();
+ conf.set(DFSConfigKeys.NET_TOPOLOGY_SCRIPT_FILE_NAME_KEY,
+ "need to set a dummy value here so it assumes a multi-rack cluster");
+ fsn = Mockito.mock(FSNamesystem.class);
+ Mockito.doReturn(true).when(fsn).hasWriteLock();
+ bm = new BlockManager(fsn, conf);
+ }
+
+ private void addNodes(Iterable<DatanodeDescriptor> nodesToAdd) {
+ NetworkTopology cluster = bm.getDatanodeManager().getNetworkTopology();
+ // construct network topology
+ for (DatanodeDescriptor dn : nodesToAdd) {
+ cluster.add(dn);
+ dn.updateHeartbeat(
+ 2*FSConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0L,
+ 2*FSConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0L, 0, 0);
+ }
+ }
+
+ private void removeNode(DatanodeDescriptor deadNode) {
+ NetworkTopology cluster = bm.getDatanodeManager().getNetworkTopology();
+ cluster.remove(deadNode);
+ bm.removeBlocksAssociatedTo(deadNode);
+ }
+
+
+ /**
+ * Test that replication of under-replicated blocks is detected
+ * and basically works
+ */
+ @Test
+ public void testBasicReplication() throws Exception {
+ addNodes(nodes);
+ for (int i = 0; i < NUM_TEST_ITERS; i++) {
+ doBasicTest(i);
+ }
+ }
+
+ private void doBasicTest(int testIndex) {
+ List<DatanodeDescriptor> origNodes = nodes(0, 1);
+ BlockInfo blockInfo = addBlockOnNodes((long)testIndex, origNodes);
+
+ DatanodeDescriptor[] pipeline = scheduleSingleReplication(blockInfo);
+ assertEquals(2, pipeline.length);
+ assertTrue("Source of replication should be one of the nodes the block " +
+ "was on. Was: " + pipeline[0],
+ origNodes.contains(pipeline[0]));
+ assertTrue("Destination of replication should be on the other rack. " +
+ "Was: " + pipeline[1],
+ rackB.contains(pipeline[1]));
+ }
+
+
+ /**
+ * Regression test for HDFS-1480
+ * - Cluster has 2 racks, A and B, each with three nodes.
+ * - Block initially written on A1, A2, B1
+ * - Admin decommissions two of these nodes (let's say A1 and A2 but it doesn't matter)
+ * - Re-replication should respect rack policy
+ */
+ @Test
+ public void testTwoOfThreeNodesDecommissioned() throws Exception {
+ addNodes(nodes);
+ for (int i = 0; i < NUM_TEST_ITERS; i++) {
+ doTestTwoOfThreeNodesDecommissioned(i);
+ }
+ }
+
+ private void doTestTwoOfThreeNodesDecommissioned(int testIndex) throws Exception {
+ // Block originally on A1, A2, B1
+ List<DatanodeDescriptor> origNodes = nodes(0, 1, 3);
+ BlockInfo blockInfo = addBlockOnNodes(testIndex, origNodes);
+
+ // Decommission two of the nodes (A1, A2)
+ List<DatanodeDescriptor> decomNodes = startDecommission(0, 1);
+
+ DatanodeDescriptor[] pipeline = scheduleSingleReplication(blockInfo);
+ assertTrue("Source of replication should be one of the nodes the block " +
+ "was on. Was: " + pipeline[0],
+ origNodes.contains(pipeline[0]));
+ assertEquals("Should have two targets", 3, pipeline.length);
+
+ boolean foundOneOnRackA = false;
+ for (int i = 1; i < pipeline.length; i++) {
+ DatanodeDescriptor target = pipeline[i];
+ if (rackA.contains(target)) {
+ foundOneOnRackA = true;
+ }
+ assertFalse(decomNodes.contains(target));
+ assertFalse(origNodes.contains(target));
+ }
+
+ assertTrue("Should have at least one target on rack A. Pipeline: " +
+ Joiner.on(",").join(pipeline),
+ foundOneOnRackA);
+ }
+
+
+ /**
+ * Test what happens when a block is on three nodes, and all three of those
+ * nodes are decommissioned. It should properly re-replicate to three new
+ * nodes.
+ */
+ @Test
+ public void testAllNodesHoldingReplicasDecommissioned() throws Exception {
+ addNodes(nodes);
+ for (int i = 0; i < NUM_TEST_ITERS; i++) {
+ doTestAllNodesHoldingReplicasDecommissioned(i);
+ }
+ }
+
+ private void doTestAllNodesHoldingReplicasDecommissioned(int testIndex) throws Exception {
+ // Block originally on A1, A2, B1
+ List<DatanodeDescriptor> origNodes = nodes(0, 1, 3);
+ BlockInfo blockInfo = addBlockOnNodes(testIndex, origNodes);
+
+ // Decommission all of the nodes
+ List<DatanodeDescriptor> decomNodes = startDecommission(0, 1, 3);
+
+ DatanodeDescriptor[] pipeline = scheduleSingleReplication(blockInfo);
+ assertTrue("Source of replication should be one of the nodes the block " +
+ "was on. Was: " + pipeline[0],
+ origNodes.contains(pipeline[0]));
+ assertEquals("Should have three targets", 4, pipeline.length);
+
+ boolean foundOneOnRackA = false;
+ boolean foundOneOnRackB = false;
+ for (int i = 1; i < pipeline.length; i++) {
+ DatanodeDescriptor target = pipeline[i];
+ if (rackA.contains(target)) {
+ foundOneOnRackA = true;
+ } else if (rackB.contains(target)) {
+ foundOneOnRackB = true;
+ }
+ assertFalse(decomNodes.contains(target));
+ assertFalse(origNodes.contains(target));
+ }
+
+ assertTrue("Should have at least one target on rack A. Pipeline: " +
+ Joiner.on(",").join(pipeline),
+ foundOneOnRackA);
+ assertTrue("Should have at least one target on rack B. Pipeline: " +
+ Joiner.on(",").join(pipeline),
+ foundOneOnRackB);
+ }
+
+ /**
+ * Test what happens when there are two racks, and an entire rack is
+ * decommissioned.
+ *
+ * Since the cluster is multi-rack, it will consider the block
+ * under-replicated rather than create a third replica on the
+ * same rack. Adding a new node on a third rack should cause re-replication
+ * to that node.
+ */
+ @Test
+ public void testOneOfTwoRacksDecommissioned() throws Exception {
+ addNodes(nodes);
+ for (int i = 0; i < NUM_TEST_ITERS; i++) {
+ doTestOneOfTwoRacksDecommissioned(i);
+ }
+ }
+
+ private void doTestOneOfTwoRacksDecommissioned(int testIndex) throws Exception {
+ // Block originally on A1, A2, B1
+ List<DatanodeDescriptor> origNodes = nodes(0, 1, 3);
+ BlockInfo blockInfo = addBlockOnNodes(testIndex, origNodes);
+
+ // Decommission all of the nodes in rack A
+ List<DatanodeDescriptor> decomNodes = startDecommission(0, 1, 2);
+
+ DatanodeDescriptor[] pipeline = scheduleSingleReplication(blockInfo);
+ assertTrue("Source of replication should be one of the nodes the block " +
+ "was on. Was: " + pipeline[0],
+ origNodes.contains(pipeline[0]));
+ assertEquals("Should have 2 targets", 3, pipeline.length);
+
+ boolean foundOneOnRackB = false;
+ for (int i = 1; i < pipeline.length; i++) {
+ DatanodeDescriptor target = pipeline[i];
+ if (rackB.contains(target)) {
+ foundOneOnRackB = true;
+ }
+ assertFalse(decomNodes.contains(target));
+ assertFalse(origNodes.contains(target));
+ }
+
+ assertTrue("Should have at least one target on rack B. Pipeline: " +
+ Joiner.on(",").join(pipeline),
+ foundOneOnRackB);
+
+ // Mark the block as received on the target nodes in the pipeline
+ fulfillPipeline(blockInfo, pipeline);
+
+ // the block is still under-replicated. Add a new node. This should allow
+ // the third off-rack replica.
+ DatanodeDescriptor rackCNode = new DatanodeDescriptor(new DatanodeID("h7:5020"), "/rackC");
+ addNodes(ImmutableList.of(rackCNode));
+ try {
+ DatanodeDescriptor[] pipeline2 = scheduleSingleReplication(blockInfo);
+ assertEquals(2, pipeline2.length);
+ assertEquals(rackCNode, pipeline2[1]);
+ } finally {
+ removeNode(rackCNode);
+ }
+ }
+
+ /**
+ * Unit test version of testSufficientlyReplBlocksUsesNewRack from
+ * {@link TestBlocksWithNotEnoughRacks}.
+ **/
+ @Test
+ public void testSufficientlyReplBlocksUsesNewRack() throws Exception {
+ addNodes(nodes);
+ for (int i = 0; i < NUM_TEST_ITERS; i++) {
+ doTestSufficientlyReplBlocksUsesNewRack(i);
+ }
+ }
+
+ private void doTestSufficientlyReplBlocksUsesNewRack(int testIndex) {
+ // Originally on only nodes in rack A.
+ List<DatanodeDescriptor> origNodes = rackA;
+ BlockInfo blockInfo = addBlockOnNodes((long)testIndex, origNodes);
+ DatanodeDescriptor pipeline[] = scheduleSingleReplication(blockInfo);
+
+ assertEquals(2, pipeline.length); // single new copy
+ assertTrue("Source of replication should be one of the nodes the block " +
+ "was on. Was: " + pipeline[0],
+ origNodes.contains(pipeline[0]));
+ assertTrue("Destination of replication should be on the other rack. " +
+ "Was: " + pipeline[1],
+ rackB.contains(pipeline[1]));
+ }
+
+
+ /**
+ * Tell the block manager that replication is completed for the given
+ * pipeline.
+ */
+ private void fulfillPipeline(BlockInfo blockInfo,
+ DatanodeDescriptor[] pipeline) throws IOException {
+ for (int i = 1; i < pipeline.length; i++) {
+ bm.addBlock(pipeline[i], blockInfo, null);
+ }
+ }
+
+ private BlockInfo blockOnNodes(long blkId, List<DatanodeDescriptor> nodes) {
+ Block block = new Block(blkId);
+ BlockInfo blockInfo = new BlockInfo(block, 3);
+
+ for (DatanodeDescriptor dn : nodes) {
+ blockInfo.addNode(dn);
+ }
+ return blockInfo;
+ }
+
+ private List<DatanodeDescriptor> nodes(int ... indexes) {
+ List<DatanodeDescriptor> ret = Lists.newArrayList();
+ for (int idx : indexes) {
+ ret.add(nodes.get(idx));
+ }
+ return ret;
+ }
+
+ private List<DatanodeDescriptor> startDecommission(int ... indexes) {
+ List<DatanodeDescriptor> nodes = nodes(indexes);
+ for (DatanodeDescriptor node : nodes) {
+ node.startDecommission();
+ }
+ return nodes;
+ }
+
+ private BlockInfo addBlockOnNodes(long blockId, List<DatanodeDescriptor> nodes) {
+ INodeFile iNode = Mockito.mock(INodeFile.class);
+ Mockito.doReturn((short)3).when(iNode).getReplication();
+ BlockInfo blockInfo = blockOnNodes(blockId, nodes);
+
+ bm.blocksMap.addINode(blockInfo, iNode);
+ return blockInfo;
+ }
+
+ private DatanodeDescriptor[] scheduleSingleReplication(Block block) {
+ assertEquals("Block not initially pending replication",
+ 0, bm.pendingReplications.getNumReplicas(block));
+ assertTrue("computeReplicationWork should indicate replication is needed",
+ bm.computeReplicationWorkForBlock(block, 1));
+ assertTrue("replication is pending after work is computed",
+ bm.pendingReplications.getNumReplicas(block) > 0);
+
+ LinkedListMultimap<DatanodeDescriptor, BlockTargetPair> repls =
+ getAllPendingReplications();
+ assertEquals(1, repls.size());
+ Entry<DatanodeDescriptor, BlockTargetPair> repl = repls.entries().iterator().next();
+ DatanodeDescriptor[] targets = repl.getValue().targets;
+
+ DatanodeDescriptor[] pipeline = new DatanodeDescriptor[1 + targets.length];
+ pipeline[0] = repl.getKey();
+ System.arraycopy(targets, 0, pipeline, 1, targets.length);
+
+ return pipeline;
+ }
+
+ private LinkedListMultimap<DatanodeDescriptor, BlockTargetPair> getAllPendingReplications() {
+ LinkedListMultimap<DatanodeDescriptor, BlockTargetPair> repls =
+ LinkedListMultimap.create();
+ for (DatanodeDescriptor dn : nodes) {
+ List<BlockTargetPair> thisRepls = dn.getReplicationCommand(10);
+ if (thisRepls != null) {
+ repls.putAll(dn, thisRepls);
+ }
+ }
+ return repls;
+ }
+}
|
32510df2df13cdf6a03ead228a2b4f2c6be67b6b
|
drools
|
fix incremental compilation when updating a- kiemodule without changing the release id (for snaphots)--
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/KieContainerImpl.java b/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/KieContainerImpl.java
index 2a670144eeb..9162fbd3ed8 100644
--- a/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/KieContainerImpl.java
+++ b/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/KieContainerImpl.java
@@ -73,7 +73,11 @@ public void updateToVersion(ReleaseId newReleaseId) {
throw new UnsupportedOperationException( "It is not possible to update a classpath container to a new version." );
}
ReleaseId currentReleaseId = kProject.getGAV();
- InternalKieModule currentKM = (InternalKieModule) kr.getKieModule( currentReleaseId );
+
+ // if the new and the current release are equal (a snapshot) check if there is an older version with the same releaseId
+ InternalKieModule currentKM = currentReleaseId.equals( newReleaseId ) && !currentReleaseId.equals(kr.getDefaultReleaseId()) ?
+ (InternalKieModule) ((KieRepositoryImpl)kr).getOldKieModule( currentReleaseId ) :
+ (InternalKieModule) kr.getKieModule( currentReleaseId );
InternalKieModule newKM = (InternalKieModule) kr.getKieModule( newReleaseId );
ChangeSetBuilder csb = new ChangeSetBuilder();
diff --git a/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/KieRepositoryImpl.java b/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/KieRepositoryImpl.java
index 85ffd7a9701..50f6c5c7173 100644
--- a/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/KieRepositoryImpl.java
+++ b/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/KieRepositoryImpl.java
@@ -70,10 +70,13 @@ public KieModule getKieModule(ReleaseId releaseId) {
return getKieModule(releaseId, null);
}
- public KieModule getKieModule(ReleaseId releaseId, byte[] pomXml) {
- VersionRange versionRange = new VersionRange(releaseId.getVersion());
+ KieModule getOldKieModule(ReleaseId releaseId) {
+ KieModule kieModule = kieModuleRepo.loadOldAndRemove(releaseId);
+ return kieModule != null ? kieModule : getKieModule(releaseId);
+ }
- KieModule kieModule = kieModuleRepo.load(releaseId, versionRange);
+ public KieModule getKieModule(ReleaseId releaseId, byte[] pomXml) {
+ KieModule kieModule = kieModuleRepo.load(releaseId);
if ( kieModule == null ) {
log.debug( "KieModule Lookup. ReleaseId {} was not in cache, checking classpath",
releaseId.toExternalForm() );
@@ -188,6 +191,7 @@ public KieModule getKieModule(Resource resource) {
private static class KieModuleRepo {
private final Map<String, TreeMap<ComparableVersion, KieModule>> kieModules = new HashMap<String, TreeMap<ComparableVersion, KieModule>>();
+ private final Map<ReleaseId, KieModule> oldKieModules = new HashMap<ReleaseId, KieModule>();
void store(KieModule kieModule) {
ReleaseId releaseId = kieModule.getReleaseId();
@@ -198,7 +202,19 @@ void store(KieModule kieModule) {
artifactMap = new TreeMap<ComparableVersion, KieModule>();
kieModules.put(ga, artifactMap);
}
- artifactMap.put(new ComparableVersion(releaseId.getVersion()), kieModule);
+ ComparableVersion comparableVersion = new ComparableVersion(releaseId.getVersion());
+ if (oldKieModules.get(releaseId) == null) {
+ oldKieModules.put(releaseId, artifactMap.get(comparableVersion));
+ }
+ artifactMap.put(comparableVersion, kieModule);
+ }
+
+ private KieModule loadOldAndRemove(ReleaseId releaseId) {
+ return oldKieModules.remove(releaseId);
+ }
+
+ KieModule load(ReleaseId releaseId) {
+ return load(releaseId, new VersionRange(releaseId.getVersion()));
}
KieModule load(ReleaseId releaseId, VersionRange versionRange) {
|
ff5b19bb8413cd0689d33e23b295a81c90dd38c3
|
orientdb
|
Drop database implemented and tested against- local and remote db--
|
a
|
https://github.com/orientechnologies/orientdb
|
diff --git a/commons/src/main/java/com/orientechnologies/common/console/OConsoleApplication.java b/commons/src/main/java/com/orientechnologies/common/console/OConsoleApplication.java
index 0d9af09e443..c400af61a8b 100644
--- a/commons/src/main/java/com/orientechnologies/common/console/OConsoleApplication.java
+++ b/commons/src/main/java/com/orientechnologies/common/console/OConsoleApplication.java
@@ -154,7 +154,7 @@ protected boolean execute(String iCommand) {
methodName = m.getName();
ann = m.getAnnotation(ConsoleCommand.class);
- StringBuilder commandName = new StringBuilder();
+ final StringBuilder commandName = new StringBuilder();
char ch;
int commandWordCount = 1;
for (int i = 0; i < methodName.length(); ++i) {
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLDropClass.java b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLDropClass.java
index 6084ef01f3c..bf158216366 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLDropClass.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLDropClass.java
@@ -57,13 +57,13 @@ public OCommandExecutorSQLDropClass parse(final OCommandRequestText iRequest) {
className = word.toString();
if (className == null)
- throw new OCommandSQLParsingException("Class not found", text, pos);
+ throw new OCommandSQLParsingException("Class is null", text, pos);
return this;
}
/**
- * Execute the CREATE PROPERTY.
+ * Execute the DROP CLASS.
*/
public Object execute(final Map<Object, Object> iArgs) {
if (className == null)
diff --git a/tools/src/main/java/com/orientechnologies/orient/console/OConsoleDatabaseApp.java b/tools/src/main/java/com/orientechnologies/orient/console/OConsoleDatabaseApp.java
index 90a99a76a02..05750221f90 100644
--- a/tools/src/main/java/com/orientechnologies/orient/console/OConsoleDatabaseApp.java
+++ b/tools/src/main/java/com/orientechnologies/orient/console/OConsoleDatabaseApp.java
@@ -418,28 +418,67 @@ public void createIndex(@ConsoleParameter(name = "command-text", description = "
out.println("\nIndex created succesfully");
}
+ @ConsoleCommand(description = "Delete the current database")
+ public void dropDatabase() throws IOException {
+ checkCurrentDatabase();
+
+ final String dbName = currentDatabase.getName();
+
+ if (currentDatabase.getURL().startsWith(OEngineRemote.NAME)) {
+ // REMOTE CONNECTION
+ final String dbURL = currentDatabase.getURL().substring(OEngineRemote.NAME.length() + 1);
+ new OServerAdmin(dbURL).connect(currentDatabaseUserName, currentDatabaseUserPassword).deleteDatabase();
+ } else {
+ // LOCAL CONNECTION
+ currentDatabase.delete();
+ currentDatabase = null;
+ }
+
+ out.println("\nDatabase '" + dbName + "' deleted succesfully");
+ }
+
+ @ConsoleCommand(description = "Delete the specified database")
+ public void dropDatabase(
+ @ConsoleParameter(name = "database-url", description = "The url of the database to create in the format '<mode>:<path>'") String iDatabaseURL,
+ @ConsoleParameter(name = "user", description = "Server administrator name") String iUserName,
+ @ConsoleParameter(name = "password", description = "Server administrator password") String iUserPassword) throws IOException {
+
+ if (iDatabaseURL.startsWith(OEngineRemote.NAME)) {
+ // REMOTE CONNECTION
+ final String dbURL = iDatabaseURL.substring(OEngineRemote.NAME.length() + 1);
+ new OServerAdmin(dbURL).connect(iUserName, iUserPassword).deleteDatabase();
+ } else {
+ // LOCAL CONNECTION
+ currentDatabase = new ODatabaseDocumentTx(iDatabaseURL);
+ currentDatabase.delete();
+ currentDatabase = null;
+ }
+
+ out.println("\nDatabase '" + iDatabaseURL + "' deleted succesfully");
+ }
+
@ConsoleCommand(splitInWords = false, description = "Remove an index")
- public void removeIndex(@ConsoleParameter(name = "command-text", description = "The command text to execute") String iCommandText)
+ public void dropIndex(@ConsoleParameter(name = "command-text", description = "The command text to execute") String iCommandText)
throws IOException {
out.println("\nRemoving index...");
- sqlCommand("remove", iCommandText, "\nRemoved index %d link(s) in %f sec(s).\n");
+ sqlCommand("drop", iCommandText, "\nRemoved index %d link(s) in %f sec(s).\n");
out.println("\nIndex removed succesfully");
}
@ConsoleCommand(splitInWords = false, description = "Remove a class from the schema")
- public void removeClass(@ConsoleParameter(name = "command-text", description = "The command text to execute") String iCommandText)
+ public void dropClass(@ConsoleParameter(name = "command-text", description = "The command text to execute") String iCommandText)
throws IOException {
- sqlCommand("remove", iCommandText, "\nRemoved class in %f sec(s).\n");
+ sqlCommand("drop", iCommandText, "\nRemoved class in %f sec(s).\n");
currentDatabase.getMetadata().getSchema().reload();
out.println("\nClass removed succesfully");
}
@ConsoleCommand(splitInWords = false, description = "Remove a property from a class")
- public void removeProperty(
- @ConsoleParameter(name = "command-text", description = "The command text to execute") String iCommandText) throws IOException {
- sqlCommand("remove", iCommandText, "\nRemoved class property in %f sec(s).\n");
+ public void dropProperty(@ConsoleParameter(name = "command-text", description = "The command text to execute") String iCommandText)
+ throws IOException {
+ sqlCommand("drop", iCommandText, "\nRemoved class property in %f sec(s).\n");
out.println("\nClass property removed succesfully");
}
|
f520f8bfb97f8bbb9fa6205045facf1c12123cb3
|
elasticsearch
|
Tests: Add test for parsing "_name" field in- RangeQueryParser--
|
p
|
https://github.com/elastic/elasticsearch
|
diff --git a/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java
index bf580e55f46eb..4df799e9f370e 100644
--- a/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java
@@ -23,7 +23,9 @@
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermRangeQuery;
import org.elasticsearch.ElasticsearchParseException;
+import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.lucene.BytesRefs;
+import org.hamcrest.core.IsEqual;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
@@ -353,4 +355,42 @@ public void testFromJson() throws IOException {
assertEquals(json, "2015-01-01 00:00:00", parsed.from());
assertEquals(json, "now", parsed.to());
}
+
+ public void testNamedQueryParsing() throws IOException {
+ String json =
+ "{\n" +
+ " \"range\" : {\n" +
+ " \"timestamp\" : {\n" +
+ " \"from\" : \"2015-01-01 00:00:00\",\n" +
+ " \"to\" : \"now\",\n" +
+ " \"boost\" : 1.0,\n" +
+ " \"_name\" : \"my_range\"\n" +
+ " }\n" +
+ " }\n" +
+ "}";
+ assertNotNull(parseQuery(json));
+
+ json =
+ "{\n" +
+ " \"range\" : {\n" +
+ " \"timestamp\" : {\n" +
+ " \"from\" : \"2015-01-01 00:00:00\",\n" +
+ " \"to\" : \"now\",\n" +
+ " \"boost\" : 1.0\n" +
+ " },\n" +
+ " \"_name\" : \"my_range\"\n" +
+ " }\n" +
+ "}";
+
+ // non strict parsing should accept "_name" on top level
+ assertNotNull(parseQuery(json, ParseFieldMatcher.EMPTY));
+
+ // with strict parsing, ParseField will throw exception
+ try {
+ parseQuery(json, ParseFieldMatcher.STRICT);
+ fail("Strict parsing should trigger exception for '_name' on top level");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("Deprecated field [_name] used, replaced by [query name is not supported in short version of range query]"));
+ }
+ }
}
|
f4cf5a7d4a1b3998632309288777275bc30517bb
|
elasticsearch
|
Fix RobinEngineIntegrationTest - missed to- explicitly create the index--
|
c
|
https://github.com/elastic/elasticsearch
|
diff --git a/src/test/java/org/elasticsearch/index/engine/robin/RobinEngineIntegrationTest.java b/src/test/java/org/elasticsearch/index/engine/robin/RobinEngineIntegrationTest.java
index d703fd5fd8463..652ee129d86ea 100644
--- a/src/test/java/org/elasticsearch/index/engine/robin/RobinEngineIntegrationTest.java
+++ b/src/test/java/org/elasticsearch/index/engine/robin/RobinEngineIntegrationTest.java
@@ -35,21 +35,21 @@ public class RobinEngineIntegrationTest extends AbstractIntegrationTest {
@Test
public void testSetIndexCompoundOnFlush() {
- client().admin().indices().prepareCreate("test").setSettings(ImmutableSettings.builder().put("number_of_replicas", 0));
+ client().admin().indices().prepareCreate("test").setSettings(ImmutableSettings.builder().put("number_of_replicas", 0).put("number_of_shards", 1)).get();
client().prepareIndex("test", "foo").setSource("field", "foo").get();
refresh();
- assertTotalCompoundSegments(2, 2, "test");
+ assertTotalCompoundSegments(1, 1, "test");
client().admin().indices().prepareUpdateSettings("test")
.setSettings(ImmutableSettings.builder().put(RobinEngine.INDEX_COMPOUND_ON_FLUSH, false)).get();
client().prepareIndex("test", "foo").setSource("field", "foo").get();
refresh();
- assertTotalCompoundSegments(2, 4, "test");
+ assertTotalCompoundSegments(1, 2, "test");
client().admin().indices().prepareUpdateSettings("test")
.setSettings(ImmutableSettings.builder().put(RobinEngine.INDEX_COMPOUND_ON_FLUSH, true)).get();
client().prepareIndex("test", "foo").setSource("field", "foo").get();
refresh();
- assertTotalCompoundSegments(4, 6, "test");
+ assertTotalCompoundSegments(2, 3, "test");
}
@@ -61,11 +61,13 @@ private void assertTotalCompoundSegments(int i, int t, String index) {
int total = 0;
for (IndexShardSegments indexShardSegments : values) {
for (ShardSegments s : indexShardSegments) {
- for (Segment segment : s.getSegments()) {
- if (segment.isCompound()) {
- compounds++;
+ for (Segment segment : s) {
+ if (segment.isSearch() && segment.getNumDocs() > 0) {
+ if (segment.isCompound()) {
+ compounds++;
+ }
+ total++;
}
- total++;
}
}
}
|
cbde1cfa6eec6cc37025fea406deb33eb82c3509
|
intellij-community
|
don't suggest 'convert to groovy-style property- access' inside closure--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/plugins/groovy/src/org/jetbrains/plugins/groovy/intentions/style/JavaStylePropertiesInvocationIntention.java b/plugins/groovy/src/org/jetbrains/plugins/groovy/intentions/style/JavaStylePropertiesInvocationIntention.java
index 0327535993267..22ed9c42e5349 100644
--- a/plugins/groovy/src/org/jetbrains/plugins/groovy/intentions/style/JavaStylePropertiesInvocationIntention.java
+++ b/plugins/groovy/src/org/jetbrains/plugins/groovy/intentions/style/JavaStylePropertiesInvocationIntention.java
@@ -22,16 +22,24 @@
import org.jetbrains.plugins.groovy.intentions.base.Intention;
import org.jetbrains.plugins.groovy.intentions.base.IntentionUtils;
import org.jetbrains.plugins.groovy.intentions.base.PsiElementPredicate;
+import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrStatement;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.arguments.GrArgumentList;
-import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.*;
+import org.jetbrains.plugins.groovy.lang.psi.api.statements.blocks.GrClosableBlock;
+import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrApplicationStatement;
+import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrCall;
+import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrExpression;
+import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrReferenceExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.path.GrMethodCallExpression;
-import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrStatement;
import static org.jetbrains.plugins.groovy.lang.psi.util.PsiElementUtil.*;
/**
* @author ilyas
*/
public class JavaStylePropertiesInvocationIntention extends Intention {
+ @Override
+ protected boolean isStopElement(PsiElement element) {
+ return super.isStopElement(element) || element instanceof GrClosableBlock;
+ }
protected void processIntention(@NotNull PsiElement element) throws IncorrectOperationException {
assert element instanceof GrMethodCallExpression || element instanceof GrApplicationStatement;
diff --git a/plugins/groovy/test/org/jetbrains/plugins/groovy/lang/GroovyFixesTest.groovy b/plugins/groovy/test/org/jetbrains/plugins/groovy/lang/GroovyFixesTest.groovy
index aa2674d0ea9b0..7224f84e78fce 100644
--- a/plugins/groovy/test/org/jetbrains/plugins/groovy/lang/GroovyFixesTest.groovy
+++ b/plugins/groovy/test/org/jetbrains/plugins/groovy/lang/GroovyFixesTest.groovy
@@ -6,7 +6,8 @@ package org.jetbrains.plugins.groovy.lang;
import com.intellij.testFramework.fixtures.LightCodeInsightFixtureTestCase
import org.jetbrains.plugins.groovy.codeInspection.control.GroovyConstantIfStatementInspection.ConstantIfStatementVisitor
-import org.jetbrains.plugins.groovy.codeInspection.control.GroovyConstantIfStatementInspection;
+import org.jetbrains.plugins.groovy.codeInspection.control.GroovyConstantIfStatementInspection
+import org.jetbrains.plugins.groovy.codeInspection.gpath.GroovySetterCallCanBePropertyAccessInspection;
/**
* @author peter
@@ -27,4 +28,19 @@ if (true) {
}"""
}
+ public void testShallowChangeToGroovyStylePropertyAccess() throws Throwable {
+ myFixture.enableInspections new GroovySetterCallCanBePropertyAccessInspection()
+ myFixture.configureByText "a.groovy", """class GroovyClasss {
+ def initializer
+ def foo() {
+ setInitializer({
+ <caret>println "hello"
+ })
+ }
+}
+
+"""
+ assertEmpty myFixture.filterAvailableIntentions("Change to Groovy-style property reference")
+ }
+
}
\ No newline at end of file
|
9eb8ef961e7ea615784bf60221ff8232b0b6c108
|
intellij-community
|
remember committed changes splitter proportions- (IDEADEV-16784)--
|
a
|
https://github.com/JetBrains/intellij-community
|
diff --git a/source/com/intellij/openapi/vcs/changes/committed/CommittedChangesPanel.java b/source/com/intellij/openapi/vcs/changes/committed/CommittedChangesPanel.java
index a765e07a0b57b..3bd7bb849c276 100644
--- a/source/com/intellij/openapi/vcs/changes/committed/CommittedChangesPanel.java
+++ b/source/com/intellij/openapi/vcs/changes/committed/CommittedChangesPanel.java
@@ -25,6 +25,7 @@
import com.intellij.openapi.vcs.changes.ChangeList;
import com.intellij.openapi.vcs.versionBrowser.ChangeBrowserSettings;
import com.intellij.openapi.vcs.versionBrowser.CommittedChangeList;
+import com.intellij.openapi.Disposable;
import com.intellij.ui.FilterComponent;
import com.intellij.util.Consumer;
import org.jetbrains.annotations.NotNull;
@@ -35,7 +36,7 @@
import java.util.Collection;
import java.util.List;
-public class CommittedChangesPanel extends JPanel implements TypeSafeDataProvider {
+public class CommittedChangesPanel extends JPanel implements TypeSafeDataProvider, Disposable {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.vcs.changes.committed.CommittedChangesPanel");
private CommittedChangesTreeBrowser myBrowser;
@@ -193,6 +194,10 @@ else if (key.equals(DataKeys.CHANGE_LISTS)) {
}
}
+ public void dispose() {
+ myBrowser.dispose();
+ }
+
private class MyFilterComponent extends FilterComponent {
public MyFilterComponent() {
super("COMMITTED_CHANGES_FILTER_HISTORY", 20);
diff --git a/source/com/intellij/openapi/vcs/changes/committed/CommittedChangesTreeBrowser.java b/source/com/intellij/openapi/vcs/changes/committed/CommittedChangesTreeBrowser.java
index 6c68c522d68d7..823cae3268c52 100644
--- a/source/com/intellij/openapi/vcs/changes/committed/CommittedChangesTreeBrowser.java
+++ b/source/com/intellij/openapi/vcs/changes/committed/CommittedChangesTreeBrowser.java
@@ -14,11 +14,14 @@
import com.intellij.openapi.vcs.changes.ui.ChangesBrowser;
import com.intellij.openapi.vcs.versionBrowser.CommittedChangeList;
import com.intellij.openapi.ui.Splitter;
+import com.intellij.openapi.ui.SplitterProportionsData;
+import com.intellij.openapi.Disposable;
import com.intellij.ui.ColoredTreeCellRenderer;
import com.intellij.ui.PopupHandler;
import com.intellij.ui.SimpleTextAttributes;
import com.intellij.util.ui.Tree;
import com.intellij.util.ui.tree.TreeUtil;
+import com.intellij.peer.PeerFactory;
import javax.swing.*;
import javax.swing.event.TreeSelectionEvent;
@@ -41,7 +44,7 @@
/**
* @author yole
*/
-public class CommittedChangesTreeBrowser extends JPanel implements TypeSafeDataProvider {
+public class CommittedChangesTreeBrowser extends JPanel implements TypeSafeDataProvider, Disposable {
private final Tree myChangesTree;
private final ChangesBrowser myChangesView;
private List<CommittedChangeList> myChangeLists;
@@ -51,10 +54,9 @@ public class CommittedChangesTreeBrowser extends JPanel implements TypeSafeDataP
private Splitter myFilterSplitter;
private JPanel myLeftPanel;
private CommittedChangeListRenderer myCellRenderer;
- private JScrollPane myChangesTreeScrollPane;
- private Splitter mySplitter;
private FilterChangeListener myFilterChangeListener = new FilterChangeListener();
private List<CommittedChangeList> myFilteredChangeLists;
+ private final SplitterProportionsData mySplitterProportionsData = PeerFactory.getInstance().getUIHelper().createSplitterProportionsData();
public CommittedChangesTreeBrowser(final Project project, final List<CommittedChangeList> changeLists) {
super(new BorderLayout());
@@ -95,15 +97,17 @@ public void mouseClicked(final MouseEvent e) {
});
myLeftPanel = new JPanel(new BorderLayout());
- myChangesTreeScrollPane = new JScrollPane(myChangesTree);
myFilterSplitter = new Splitter(false, 0.5f);
- myFilterSplitter.setSecondComponent(myChangesTreeScrollPane);
+ myFilterSplitter.setSecondComponent(new JScrollPane(myChangesTree));
myLeftPanel.add(myFilterSplitter, BorderLayout.CENTER);
- mySplitter = new Splitter(false, 0.7f);
- mySplitter.setFirstComponent(myLeftPanel);
- mySplitter.setSecondComponent(myChangesView);
+ final Splitter splitter = new Splitter(false, 0.7f);
+ splitter.setFirstComponent(myLeftPanel);
+ splitter.setSecondComponent(myChangesView);
- add(mySplitter, BorderLayout.CENTER);
+ add(splitter, BorderLayout.CENTER);
+
+ mySplitterProportionsData.externalizeFromDimensionService("CommittedChanges.SplitterProportions");
+ mySplitterProportionsData.restoreSplitterProportions(this);
updateBySelectionChange();
@@ -137,6 +141,8 @@ public void addToolBar(JComponent toolBar) {
}
public void dispose() {
+ mySplitterProportionsData.saveSplitterProportions(this);
+ mySplitterProportionsData.externalizeToDimensionService("CommittedChanges.SplitterProportions");
myChangesView.dispose();
}
diff --git a/source/com/intellij/openapi/vcs/changes/ui/ChangesViewContentManager.java b/source/com/intellij/openapi/vcs/changes/ui/ChangesViewContentManager.java
index 99ec1f66a9b16..c3eb37981076e 100644
--- a/source/com/intellij/openapi/vcs/changes/ui/ChangesViewContentManager.java
+++ b/source/com/intellij/openapi/vcs/changes/ui/ChangesViewContentManager.java
@@ -23,6 +23,7 @@
import com.intellij.openapi.wm.ToolWindow;
import com.intellij.openapi.wm.ToolWindowAnchor;
import com.intellij.openapi.wm.ToolWindowManager;
+import com.intellij.openapi.Disposable;
import com.intellij.peer.PeerFactory;
import com.intellij.ui.content.Content;
import com.intellij.ui.content.ContentManager;
@@ -237,7 +238,11 @@ public void selectionChanged(final ContentManagerEvent event) {
if (event.getContent().getComponent() instanceof ContentStub) {
ChangesViewContentEP ep = ((ContentStub) event.getContent().getComponent()).getEP();
ChangesViewContentProvider provider = ep.getInstance(myProject);
- event.getContent().setComponent(provider.initContent());
+ final JComponent contentComponent = provider.initContent();
+ event.getContent().setComponent(contentComponent);
+ if (contentComponent instanceof Disposable) {
+ event.getContent().setDisposer((Disposable) contentComponent);
+ }
}
}
}
diff --git a/ui/openapi/com/intellij/openapi/ui/SplitterProportionsData.java b/ui/openapi/com/intellij/openapi/ui/SplitterProportionsData.java
index d095e1ecaa7c5..a8b1249d43927 100644
--- a/ui/openapi/com/intellij/openapi/ui/SplitterProportionsData.java
+++ b/ui/openapi/com/intellij/openapi/ui/SplitterProportionsData.java
@@ -23,6 +23,7 @@
package com.intellij.openapi.ui;
import com.intellij.openapi.util.JDOMExternalizable;
+import org.jetbrains.annotations.NonNls;
import java.awt.*;
@@ -32,7 +33,7 @@ public interface SplitterProportionsData extends JDOMExternalizable {
void restoreSplitterProportions(Component root);
- void externalizeToDimensionService(String key);
+ void externalizeToDimensionService(@NonNls String key);
- void externalizeFromDimensionService(String key);
+ void externalizeFromDimensionService(@NonNls String key);
}
\ No newline at end of file
|
3e26fa32b15dfa1e16bddaae6b874a744358de75
|
ReactiveX-RxJava
|
Removed window between the two synchronized blocks.--
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/observers/SerializedObserver.java b/rxjava-core/src/main/java/rx/observers/SerializedObserver.java
index 94a7845bcb..c9103ba954 100644
--- a/rxjava-core/src/main/java/rx/observers/SerializedObserver.java
+++ b/rxjava-core/src/main/java/rx/observers/SerializedObserver.java
@@ -128,6 +128,7 @@ public void onNext(T t) {
}
// we only get here if we won the right to emit, otherwise we returned in the if(emitting) block above
+ boolean skipFinal = false;
try {
int iter = MAX_DRAIN_ITERATION;
do {
@@ -141,27 +142,32 @@ public void onNext(T t) {
synchronized (this) {
list = queue;
queue = null;
- }
- if (list == null) {
- break;
+ if (list == null) {
+ emitting = false;
+ skipFinal = true;
+ return;
+ }
}
}
} while (iter > 0);
} finally {
- synchronized (this) {
- if (terminated) {
- list = queue;
- queue = null;
- } else {
- emitting = false;
- list = null;
+ if (!skipFinal) {
+ synchronized (this) {
+ if (terminated) {
+ list = queue;
+ queue = null;
+ } else {
+ emitting = false;
+ list = null;
+ }
}
}
- // this will only drain if terminated (done here outside of synchronized block)
- drainQueue(list);
}
+
+ // this will only drain if terminated (done here outside of synchronized block)
+ drainQueue(list);
}
-
+
void drainQueue(FastList list) {
if (list == null || list.size == 0) {
return;
|
43b5767f6213784c7e4cdfce2bebadd87108b33f
|
hbase
|
HBASE-10606 Bad timeout in- RpcRetryingCaller-callWithRetries w/o parameters--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1572124 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
index 8b2df053f67c..bf89fe276bd2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
@@ -33,7 +33,6 @@
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.RejectedExecutionException;
-import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
@@ -49,7 +48,6 @@
import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.hadoop.hbase.util.Pair;
import org.cloudera.htrace.Trace;
import com.google.common.annotations.VisibleForTesting;
@@ -118,8 +116,6 @@ public static interface AsyncRequestFuture {
public void waitUntilDone() throws InterruptedIOException {}
};
-
- // TODO: many of the fields should be made private
protected final long id;
protected final ClusterConnection hConnection;
@@ -156,6 +152,7 @@ public void waitUntilDone() throws InterruptedIOException {}
protected final long pause;
protected int numTries;
protected int serverTrackerTimeout;
+ protected int operationTimeout;
// End configuration settings.
protected static class BatchErrors {
@@ -206,6 +203,8 @@ public AsyncProcess(ClusterConnection hc, Configuration conf, ExecutorService po
HConstants.DEFAULT_HBASE_CLIENT_PAUSE);
this.numTries = conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,
HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);
+ this.operationTimeout = conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT,
+ HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);
this.maxTotalConcurrentTasks = conf.getInt(HConstants.HBASE_CLIENT_MAX_TOTAL_TASKS,
HConstants.DEFAULT_HBASE_CLIENT_MAX_TOTAL_TASKS);
@@ -303,14 +302,12 @@ public <CResult> AsyncRequestFuture submit(ExecutorService pool, TableName table
Iterator<? extends Row> it = rows.iterator();
while (it.hasNext()) {
Row r = it.next();
- HRegionLocation loc = null;
+ HRegionLocation loc;
try {
loc = findDestLocation(tableName, r);
} catch (IOException ex) {
- if (locationErrors == null) {
- locationErrors = new ArrayList<Exception>();
- locationErrorRows = new ArrayList<Integer>();
- }
+ locationErrors = new ArrayList<Exception>();
+ locationErrorRows = new ArrayList<Integer>();
LOG.error("Failed to get region location ", ex);
// This action failed before creating ars. Add it to retained but do not add to submit list.
// We will then add it to ars in an already-failed state.
@@ -600,7 +597,7 @@ public void run() {
try {
MultiServerCallable<Row> callable = createCallable(server, tableName, multiAction);
try {
- res = createCaller(callable).callWithoutRetries(callable);
+ res = createCaller(callable).callWithoutRetries(callable, operationTimeout);
} catch (IOException e) {
// The service itself failed . It may be an error coming from the communication
// layer, but, as well, a functional error raised by the server.
@@ -1010,7 +1007,7 @@ public boolean hasError() {
* failed operations themselves.
* @param failedRows an optional list into which the rows that failed since the last time
* {@link #waitForAllPreviousOpsAndReset(List)} was called, or AP was created, are saved.
- * @returns all the errors since the last time {@link #waitForAllPreviousOpsAndReset(List)}
+ * @return all the errors since the last time {@link #waitForAllPreviousOpsAndReset(List)}
* was called, or AP was created.
*/
public RetriesExhaustedWithDetailsException waitForAllPreviousOpsAndReset(
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
index 807975eb6c9c..0290dcac922a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
@@ -39,6 +39,7 @@
import org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos;
import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.ExceptionUtil;
/**
* Implements the scanner interface for the HBase client.
@@ -63,7 +64,7 @@ public class ClientScanner extends AbstractClientScanner {
protected final long maxScannerResultSize;
private final HConnection connection;
private final TableName tableName;
- private final int scannerTimeout;
+ protected final int scannerTimeout;
protected boolean scanMetricsPublished = false;
protected RpcRetryingCaller<Result []> caller;
@@ -224,7 +225,7 @@ protected boolean nextScanner(int nbRows, final boolean done)
// Close the previous scanner if it's open
if (this.callable != null) {
this.callable.setClose();
- this.caller.callWithRetries(callable);
+ this.caller.callWithRetries(callable, scannerTimeout);
this.callable = null;
}
@@ -261,7 +262,7 @@ protected boolean nextScanner(int nbRows, final boolean done)
callable = getScannerCallable(localStartKey, nbRows);
// Open a scanner on the region server starting at the
// beginning of the region
- this.caller.callWithRetries(callable);
+ this.caller.callWithRetries(callable, scannerTimeout);
this.currentRegion = callable.getHRegionInfo();
if (this.scanMetrics != null) {
this.scanMetrics.countOfRegions.incrementAndGet();
@@ -326,17 +327,17 @@ public Result next() throws IOException {
// Skip only the first row (which was the last row of the last
// already-processed batch).
callable.setCaching(1);
- values = this.caller.callWithRetries(callable);
+ values = this.caller.callWithRetries(callable, scannerTimeout);
callable.setCaching(this.caching);
skipFirst = false;
}
// Server returns a null values if scanning is to stop. Else,
// returns an empty array if scanning is to go on and we've just
// exhausted current region.
- values = this.caller.callWithRetries(callable);
+ values = this.caller.callWithRetries(callable, scannerTimeout);
if (skipFirst && values != null && values.length == 1) {
skipFirst = false; // Already skipped, unset it before scanning again
- values = this.caller.callWithRetries(callable);
+ values = this.caller.callWithRetries(callable, scannerTimeout);
}
retryAfterOutOfOrderException = true;
} catch (DoNotRetryIOException e) {
@@ -428,7 +429,7 @@ public void close() {
if (callable != null) {
callable.setClose();
try {
- this.caller.callWithRetries(callable);
+ this.caller.callWithRetries(callable, scannerTimeout);
} catch (UnknownScannerException e) {
// We used to catch this error, interpret, and rethrow. However, we
// have since decided that it's not nice for a scanner's close to
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java
index 77fd2aa3186a..a980ec968f41 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java
@@ -200,7 +200,7 @@ && nextScanner(countdown, values == null, currentRegionDone)) {
// Server returns a null values if scanning is to stop. Else,
// returns an empty array if scanning is to go on and we've just
// exhausted current region.
- values = this.caller.callWithRetries(smallScanCallable);
+ values = this.caller.callWithRetries(smallScanCallable, scannerTimeout);
this.currentRegion = smallScanCallable.getHRegionInfo();
long currentTime = System.currentTimeMillis();
if (this.scanMetrics != null) {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index 2dc212a76686..27e973ba7af9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -175,6 +175,7 @@ public class HBaseAdmin implements Abortable, Closeable {
private boolean aborted;
private boolean cleanupConnectionOnClose = false; // close the connection in close()
private boolean closed = false;
+ private int operationTimeout;
private RpcRetryingCallerFactory rpcCallerFactory;
@@ -192,6 +193,11 @@ public HBaseAdmin(Configuration c)
this.cleanupConnectionOnClose = true;
}
+ public int getOperationTimeout() {
+ return operationTimeout;
+ }
+
+
/**
* Constructor for externally managed HConnections.
* The connection to master will be created when required by admin functions.
@@ -217,6 +223,9 @@ public HBaseAdmin(HConnection connection)
HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);
this.retryLongerMultiplier = this.conf.getInt(
"hbase.client.retries.longer.multiplier", 10);
+ this.operationTimeout = this.conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT,
+ HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);
+
this.rpcCallerFactory = RpcRetryingCallerFactory.instantiate(this.conf);
}
@@ -3315,7 +3324,7 @@ public long sleep(long pause, int tries) {
private <V> V executeCallable(MasterCallable<V> callable) throws IOException {
RpcRetryingCaller<V> caller = rpcCallerFactory.newCaller();
try {
- return caller.callWithRetries(callable);
+ return caller.callWithRetries(callable, operationTimeout);
} finally {
callable.close();
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java
index 1a6ee4c59e2e..62fb9ccc73b4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java
@@ -36,6 +36,7 @@
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.ExceptionUtil;
/**
* Scanner class that contains the <code>hbase:meta</code> table scanning logic.
@@ -189,6 +190,7 @@ static void metaScan(Configuration configuration, ClusterConnection connection,
try {
scanner.close();
} catch (Throwable t) {
+ ExceptionUtil.rethrowIfInterrupt(t);
LOG.debug("Got exception in closing the result scanner", t);
}
}
@@ -196,6 +198,7 @@ static void metaScan(Configuration configuration, ClusterConnection connection,
try {
visitor.close();
} catch (Throwable t) {
+ ExceptionUtil.rethrowIfInterrupt(t);
LOG.debug("Got exception in closing the meta scanner visitor", t);
}
}
@@ -203,6 +206,7 @@ static void metaScan(Configuration configuration, ClusterConnection connection,
try {
metaTable.close();
} catch (Throwable t) {
+ ExceptionUtil.rethrowIfInterrupt(t);
LOG.debug("Got exception in closing the meta table", t);
}
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ReversedClientScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ReversedClientScanner.java
index 618b3b38003b..470ffa132fc6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ReversedClientScanner.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ReversedClientScanner.java
@@ -60,7 +60,7 @@ protected boolean nextScanner(int nbRows, final boolean done)
// Close the previous scanner if it's open
if (this.callable != null) {
this.callable.setClose();
- this.caller.callWithRetries(callable);
+ this.caller.callWithRetries(callable, scannerTimeout);
this.callable = null;
}
@@ -108,7 +108,7 @@ protected boolean nextScanner(int nbRows, final boolean done)
callable = getScannerCallable(localStartKey, nbRows, locateStartRow);
// Open a scanner on the region server starting at the
// beginning of the region
- this.caller.callWithRetries(callable);
+ this.caller.callWithRetries(callable, scannerTimeout);
this.currentRegion = callable.getHRegionInfo();
if (this.scanMetrics != null) {
this.scanMetrics.countOfRegions.incrementAndGet();
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java
index 68faba560ec9..2a9732522079 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java
@@ -42,20 +42,14 @@
* Runs an rpc'ing {@link RetryingCallable}. Sets into rpc client
* threadlocal outstanding timeouts as so we don't persist too much.
* Dynamic rather than static so can set the generic appropriately.
+ *
+ * This object has a state. It should not be used by in parallel by different threads.
+ * Reusing it is possible however, even between multiple threads. However, the user will
+ * have to manage the synchronization on its side: there is no synchronization inside the class.
*/
@InterfaceAudience.Private
[email protected]
- (value = "IS2_INCONSISTENT_SYNC", justification = "na")
public class RpcRetryingCaller<T> {
static final Log LOG = LogFactory.getLog(RpcRetryingCaller.class);
- /**
- * Timeout for the call including retries
- */
- private int callTimeout;
- /**
- * The remaining time, for the call to come. Takes into account the tries already done.
- */
- private int remainingTime;
/**
* When we started making calls.
*/
@@ -70,18 +64,17 @@ public class RpcRetryingCaller<T> {
public RpcRetryingCaller(Configuration conf) {
this.pause = conf.getLong(HConstants.HBASE_CLIENT_PAUSE,
- HConstants.DEFAULT_HBASE_CLIENT_PAUSE);
+ HConstants.DEFAULT_HBASE_CLIENT_PAUSE);
this.retries =
conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,
HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);
- this.callTimeout = conf.getInt(
- HConstants.HBASE_CLIENT_OPERATION_TIMEOUT,
- HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);
}
- private void beforeCall() {
- if (callTimeout > 0) {
- remainingTime = (int) (callTimeout -
+ private int getRemainingTime(int callTimeout) {
+ if (callTimeout <= 0) {
+ return 0;
+ } else {
+ int remainingTime = (int) (callTimeout -
(EnvironmentEdgeManager.currentTimeMillis() - this.globalStartTime));
if (remainingTime < MIN_RPC_TIMEOUT) {
// If there is no time left, we're trying anyway. It's too late.
@@ -89,17 +82,10 @@ private void beforeCall() {
// resetting to the minimum.
remainingTime = MIN_RPC_TIMEOUT;
}
- } else {
- remainingTime = 0;
+ return remainingTime;
}
}
-
- public synchronized T callWithRetries(RetryingCallable<T> callable) throws IOException,
- RuntimeException {
- return callWithRetries(callable, HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);
- }
-
/**
* Retries if invocation fails.
* @param callTimeout Timeout for this call
@@ -108,11 +94,8 @@ public synchronized T callWithRetries(RetryingCallable<T> callable) throws IOExc
* @throws IOException if a remote or network exception occurs
* @throws RuntimeException other unspecified error
*/
- @edu.umd.cs.findbugs.annotations.SuppressWarnings
- (value = "SWL_SLEEP_WITH_LOCK_HELD", justification = "na")
- public synchronized T callWithRetries(RetryingCallable<T> callable, int callTimeout)
+ public T callWithRetries(RetryingCallable<T> callable, int callTimeout)
throws IOException, RuntimeException {
- this.callTimeout = callTimeout;
List<RetriesExhaustedException.ThrowableWithExtraContext> exceptions =
new ArrayList<RetriesExhaustedException.ThrowableWithExtraContext>();
this.globalStartTime = EnvironmentEdgeManager.currentTimeMillis();
@@ -120,8 +103,7 @@ public synchronized T callWithRetries(RetryingCallable<T> callable, int callTime
long expectedSleep;
try {
callable.prepare(tries != 0); // if called with false, check table status on ZK
- beforeCall();
- return callable.call(remainingTime);
+ return callable.call(getRemainingTime(callTimeout));
} catch (Throwable t) {
ExceptionUtil.rethrowIfInterrupt(t);
if (LOG.isTraceEnabled()) {
@@ -145,8 +127,8 @@ public synchronized T callWithRetries(RetryingCallable<T> callable, int callTime
// If, after the planned sleep, there won't be enough time left, we stop now.
long duration = singleCallDuration(expectedSleep);
- if (duration > this.callTimeout) {
- String msg = "callTimeout=" + this.callTimeout + ", callDuration=" + duration +
+ if (duration > callTimeout) {
+ String msg = "callTimeout=" + callTimeout + ", callDuration=" + duration +
": " + callable.getExceptionMessageAdditionalDetail();
throw (SocketTimeoutException)(new SocketTimeoutException(msg).initCause(t));
}
@@ -163,8 +145,7 @@ public synchronized T callWithRetries(RetryingCallable<T> callable, int callTime
* @return Calculate how long a single call took
*/
private long singleCallDuration(final long expectedSleep) {
- return (EnvironmentEdgeManager.currentTimeMillis() - this.globalStartTime)
- + MIN_RPC_TIMEOUT + expectedSleep;
+ return (EnvironmentEdgeManager.currentTimeMillis() - this.globalStartTime) + expectedSleep;
}
/**
@@ -176,7 +157,7 @@ private long singleCallDuration(final long expectedSleep) {
* @throws IOException if a remote or network exception occurs
* @throws RuntimeException other unspecified error
*/
- public T callWithoutRetries(RetryingCallable<T> callable)
+ public T callWithoutRetries(RetryingCallable<T> callable, int callTimeout)
throws IOException, RuntimeException {
// The code of this method should be shared with withRetries.
this.globalStartTime = EnvironmentEdgeManager.currentTimeMillis();
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java
index f28feac23eb2..e627662c34b2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java
@@ -24,6 +24,7 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HConnection;
import org.apache.hadoop.hbase.client.RegionServerCallable;
@@ -52,6 +53,7 @@ public class RegionCoprocessorRpcChannel extends CoprocessorRpcChannel{
private final TableName table;
private final byte[] row;
private byte[] lastRegion;
+ private int operationTimeout;
private RpcRetryingCallerFactory rpcFactory;
@@ -60,6 +62,9 @@ public RegionCoprocessorRpcChannel(HConnection conn, TableName table, byte[] row
this.table = table;
this.row = row;
this.rpcFactory = RpcRetryingCallerFactory.instantiate(conn.getConfiguration());
+ this.operationTimeout = conn.getConfiguration().getInt(
+ HConstants.HBASE_CLIENT_OPERATION_TIMEOUT,
+ HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);
}
@Override
@@ -88,7 +93,7 @@ public CoprocessorServiceResponse call(int callTimeout) throws Exception {
}
};
CoprocessorServiceResponse result = rpcFactory.<CoprocessorServiceResponse> newCaller()
- .callWithRetries(callable);
+ .callWithRetries(callable, operationTimeout);
Message response = null;
if (result.getValue().hasValue()) {
response = responsePrototype.newBuilderForType()
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
index 66c51721f9a4..1ff1f01dd173 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
@@ -149,7 +149,8 @@ protected RpcRetryingCaller<MultiResponse> createCaller(MultiServerCallable<Row>
callable.getMulti(), nbMultiResponse, nbActions);
return new RpcRetryingCaller<MultiResponse>(conf) {
@Override
- public MultiResponse callWithoutRetries( RetryingCallable<MultiResponse> callable)
+ public MultiResponse callWithoutRetries(RetryingCallable<MultiResponse> callable,
+ int callTimeout)
throws IOException, RuntimeException {
try {
// sleep one second in order for threadpool to start another thread instead of reusing
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
index 43e167f0aaf1..d49146a6ed45 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
@@ -630,7 +630,7 @@ public Boolean call(int callTimeout) throws Exception {
List<LoadQueueItem> toRetry = new ArrayList<LoadQueueItem>();
Configuration conf = getConf();
boolean success = RpcRetryingCallerFactory.instantiate(conf).<Boolean> newCaller()
- .callWithRetries(svrCallable);
+ .callWithRetries(svrCallable, Integer.MAX_VALUE);
if (!success) {
LOG.warn("Attempt to bulk load region containing "
+ Bytes.toStringBinary(first) + " into table "
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java
index ea56574f52de..38194f970edd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java
@@ -1552,7 +1552,6 @@ private void setUpforLogRolling() {
TEST_UTIL.getConfiguration().setInt(
"hbase.regionserver.logroll.errors.tolerated", 2);
- TEST_UTIL.getConfiguration().setInt("ipc.socket.timeout", 10 * 1000);
TEST_UTIL.getConfiguration().setInt("hbase.rpc.timeout", 10 * 1000);
// For less frequently updated regions flush after every 2 flushes
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
index 89ce874495fa..c1d93a2e788f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
@@ -170,7 +170,7 @@ public Void call(int callTimeout) throws Exception {
};
RpcRetryingCallerFactory factory = new RpcRetryingCallerFactory(conf);
RpcRetryingCaller<Void> caller = factory.<Void> newCaller();
- caller.callWithRetries(callable);
+ caller.callWithRetries(callable, Integer.MAX_VALUE);
// Periodically do compaction to reduce the number of open file handles.
if (numBulkLoads.get() % 10 == 0) {
@@ -190,7 +190,7 @@ public Void call(int callTimeout) throws Exception {
return null;
}
};
- caller.callWithRetries(callable);
+ caller.callWithRetries(callable, Integer.MAX_VALUE);
}
}
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java
index 590481f15372..4a1b0f5f625e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java
@@ -60,7 +60,6 @@ public static void setUpBeforeClass() throws Exception {
// Tweak default timeout values down for faster recovery
TEST_UTIL.getConfiguration().setInt(
"hbase.regionserver.logroll.errors.tolerated", 2);
- TEST_UTIL.getConfiguration().setInt("ipc.socket.timeout", 10 * 1000);
TEST_UTIL.getConfiguration().setInt("hbase.rpc.timeout", 10 * 1000);
// Increase the amount of time between client retries
|
36f77aa32cbf76057b0f55be6e31711efb418412
|
camel
|
CAMEL-3114: Fixed rss component bug with uri- encoding.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@998608 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/camel
|
diff --git a/components/camel-rss/src/main/java/org/apache/camel/component/rss/RssComponent.java b/components/camel-rss/src/main/java/org/apache/camel/component/rss/RssComponent.java
index 3f4c2a6a1626c..fbd870091ce85 100644
--- a/components/camel-rss/src/main/java/org/apache/camel/component/rss/RssComponent.java
+++ b/components/camel-rss/src/main/java/org/apache/camel/component/rss/RssComponent.java
@@ -16,13 +16,12 @@
*/
package org.apache.camel.component.rss;
-import java.net.URI;
+import java.util.LinkedHashMap;
import java.util.Map;
import org.apache.camel.Endpoint;
import org.apache.camel.component.feed.FeedComponent;
import org.apache.camel.component.feed.FeedEndpoint;
-import org.apache.camel.util.CastUtils;
import org.apache.camel.util.URISupport;
/**
@@ -48,8 +47,9 @@ protected void afterConfiguration(String uri, String remaining, Endpoint endpoin
// for the http feed
String feedUri;
if (!parameters.isEmpty()) {
- URI remainingUri = URISupport.createRemainingURI(new URI(remaining), CastUtils.cast(parameters));
- feedUri = remainingUri.toString();
+ Map<Object, Object> options = new LinkedHashMap<Object, Object>(parameters);
+ String query = URISupport.createQueryString(options);
+ feedUri = remaining + "?" + query;
} else {
feedUri = remaining;
}
diff --git a/components/camel-rss/src/test/java/org/apache/camel/component/rss/RssUriEncodingIssueTest.java b/components/camel-rss/src/test/java/org/apache/camel/component/rss/RssUriEncodingIssueTest.java
new file mode 100644
index 0000000000000..2351a148b1775
--- /dev/null
+++ b/components/camel-rss/src/test/java/org/apache/camel/component/rss/RssUriEncodingIssueTest.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.rss;
+
+import org.apache.camel.Exchange;
+import org.apache.camel.PollingConsumer;
+import org.apache.camel.test.junit4.CamelTestSupport;
+import org.junit.Ignore;
+import org.junit.Test;
+
+/**
+ * @version $Revision$
+ */
+@Ignore("Must be online")
+public class RssUriEncodingIssueTest extends CamelTestSupport {
+
+ @Test
+ public void testUriIssue() throws Exception {
+ String uri = "rss:http://api.flickr.com/services/feeds/photos_public.gne?id=23353282@N05&tags=lowlands&lang=en-us&format=rss_200";
+
+ PollingConsumer consumer = context.getEndpoint(uri).createPollingConsumer();
+ consumer.start();
+ Exchange exchange = consumer.receive();
+ log.info("Receive " + exchange);
+ assertNotNull(exchange);
+ assertNotNull(exchange.getIn().getBody());
+ consumer.stop();
+ }
+
+}
|
89c5498bdfd50a213b8d8e46324266883f6209ea
|
camel
|
Added another test--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@921999 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/test/java/org/apache/camel/util/CaseInsensitiveMapTest.java b/camel-core/src/test/java/org/apache/camel/util/CaseInsensitiveMapTest.java
index 45e7a5eb2b788..dedc37c2a296d 100644
--- a/camel-core/src/test/java/org/apache/camel/util/CaseInsensitiveMapTest.java
+++ b/camel-core/src/test/java/org/apache/camel/util/CaseInsensitiveMapTest.java
@@ -279,7 +279,7 @@ public void testRomeksUsingRegularHashMap() {
assertEquals("cake", map.get("FOO"));
}
- public void testRomeksTransferedToHashMapAfterwards() {
+ public void testRomeksTransferredToHashMapAfterwards() {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("Foo", "cheese");
map.put("FOO", "cake");
@@ -349,4 +349,21 @@ public void testCopyToAnotherMapPreserveKeyCasePutAll() {
assertEquals(2, other.size());
}
+ public void testCopyToAnotherMapPreserveKeyCaseCtr() {
+ Map<String, Object> map = new CaseInsensitiveMap();
+ map.put("Foo", "cheese");
+ map.put("BAR", "cake");
+ assertEquals(2, map.size());
+ assertEquals(true, map.containsKey("foo"));
+ assertEquals(true, map.containsKey("bar"));
+
+ Map<String, Object> other = new HashMap<String, Object>(map);
+
+ assertEquals(false, other.containsKey("foo"));
+ assertEquals(true, other.containsKey("Foo"));
+ assertEquals(false, other.containsKey("bar"));
+ assertEquals(true, other.containsKey("BAR"));
+ assertEquals(2, other.size());
+ }
+
}
\ No newline at end of file
|
ec0e8cf1fb4f2b99d8677666df38234a621158d2
|
camel
|
Fixed test--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@962779 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/test/java/org/apache/camel/processor/ShutdownCompleteAllTasksTest.java b/camel-core/src/test/java/org/apache/camel/processor/ShutdownCompleteAllTasksTest.java
index d894f019a8a0d..511a8ad2eb90c 100644
--- a/camel-core/src/test/java/org/apache/camel/processor/ShutdownCompleteAllTasksTest.java
+++ b/camel-core/src/test/java/org/apache/camel/processor/ShutdownCompleteAllTasksTest.java
@@ -45,19 +45,18 @@ public void testShutdownCompleteAllTasks() throws Exception {
// give it 20 seconds to shutdown
context.getShutdownStrategy().setTimeout(20);
- // start route which will pickup the 5 files
- context.startRoute("route1");
-
MockEndpoint bar = getMockEndpoint("mock:bar");
bar.expectedMinimumMessageCount(1);
assertMockEndpointsSatisfied();
+ int batch = bar.getReceivedExchanges().get(0).getProperty(Exchange.BATCH_SIZE, int.class);
+
// shutdown during processing
context.stop();
// should route all 5
- assertEquals("Should complete all messages", 5, bar.getReceivedCounter());
+ assertEquals("Should complete all messages", batch, bar.getReceivedCounter());
}
@Override
@@ -66,7 +65,7 @@ protected RouteBuilder createRouteBuilder() throws Exception {
@Override
// START SNIPPET: e1
public void configure() throws Exception {
- from(url).routeId("route1").noAutoStartup()
+ from(url)
// let it complete all tasks during shutdown
.shutdownRunningTask(ShutdownRunningTask.CompleteAllTasks)
.delay(1000).to("seda:foo");
diff --git a/camel-core/src/test/java/org/apache/camel/processor/ShutdownCompleteCurrentTaskOnlyTest.java b/camel-core/src/test/java/org/apache/camel/processor/ShutdownCompleteCurrentTaskOnlyTest.java
index a06b8ada3ec05..880d61e4efa2c 100644
--- a/camel-core/src/test/java/org/apache/camel/processor/ShutdownCompleteCurrentTaskOnlyTest.java
+++ b/camel-core/src/test/java/org/apache/camel/processor/ShutdownCompleteCurrentTaskOnlyTest.java
@@ -45,9 +45,6 @@ public void testShutdownCompleteCurrentTaskOnly() throws Exception {
// give it 20 seconds to shutdown
context.getShutdownStrategy().setTimeout(20);
- // start route which will pickup the 5 files
- context.startRoute("route1");
-
MockEndpoint bar = getMockEndpoint("mock:bar");
bar.expectedMinimumMessageCount(1);
@@ -65,7 +62,7 @@ protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
- from(url).routeId("route1").noAutoStartup()
+ from(url)
// let it complete only current task so we shutdown faster
.shutdownRunningTask(ShutdownRunningTask.CompleteCurrentTaskOnly)
.delay(1000).to("seda:foo");
|
4cfc90590c1e54c88fc5a683b061258dd897da49
|
hadoop
|
YARN-2065 AM cannot create new containers after- restart--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1607440 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 872997c372050..f223854b88fc6 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -303,6 +303,9 @@ Release 2.5.0 - UNRELEASED
YARN-2216 TestRMApplicationHistoryWriter sometimes fails in trunk.
(Zhijie Shen via xgong)
+ YARN-2216 YARN-2065 AM cannot create new containers after restart
+ (Jian He via stevel)
+
Release 2.4.1 - 2014-06-23
INCOMPATIBLE CHANGES
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java
index ded2013bfc90b..1e155d27b84c9 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java
@@ -475,8 +475,8 @@ protected void authorizeStartRequest(NMTokenIdentifier nmTokenIdentifier,
boolean unauthorized = false;
StringBuilder messageBuilder =
new StringBuilder("Unauthorized request to start container. ");
- if (!nmTokenIdentifier.getApplicationAttemptId().equals(
- containerId.getApplicationAttemptId())) {
+ if (!nmTokenIdentifier.getApplicationAttemptId().getApplicationId().equals(
+ containerId.getApplicationAttemptId().getApplicationId())) {
unauthorized = true;
messageBuilder.append("\nNMToken for application attempt : ")
.append(nmTokenIdentifier.getApplicationAttemptId())
@@ -810,26 +810,24 @@ protected void authorizeGetAndStopContainerRequest(ContainerId containerId,
* belongs to the same application attempt (NMToken) which was used. (Note:-
* This will prevent user in knowing another application's containers).
*/
-
- if ((!identifier.getApplicationAttemptId().equals(
- containerId.getApplicationAttemptId()))
- || (container != null && !identifier.getApplicationAttemptId().equals(
- container.getContainerId().getApplicationAttemptId()))) {
+ ApplicationId nmTokenAppId =
+ identifier.getApplicationAttemptId().getApplicationId();
+ if ((!nmTokenAppId.equals(containerId.getApplicationAttemptId().getApplicationId()))
+ || (container != null && !nmTokenAppId.equals(container
+ .getContainerId().getApplicationAttemptId().getApplicationId()))) {
if (stopRequest) {
LOG.warn(identifier.getApplicationAttemptId()
+ " attempted to stop non-application container : "
- + container.getContainerId().toString());
+ + container.getContainerId());
NMAuditLogger.logFailure("UnknownUser", AuditConstants.STOP_CONTAINER,
"ContainerManagerImpl", "Trying to stop unknown container!",
- identifier.getApplicationAttemptId().getApplicationId(),
- container.getContainerId());
+ nmTokenAppId, container.getContainerId());
} else {
LOG.warn(identifier.getApplicationAttemptId()
+ " attempted to get status for non-application container : "
- + container.getContainerId().toString());
+ + container.getContainerId());
}
}
-
}
class ContainerEventDispatcher implements EventHandler<ContainerEvent> {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java
index d607079235cf0..6797165dfe09f 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java
@@ -202,8 +202,6 @@ private void testNMTokens(Configuration conf) throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 1);
ApplicationAttemptId validAppAttemptId =
ApplicationAttemptId.newInstance(appId, 1);
- ApplicationAttemptId invalidAppAttemptId =
- ApplicationAttemptId.newInstance(appId, 2);
ContainerId validContainerId =
ContainerId.newInstance(validAppAttemptId, 0);
@@ -269,26 +267,14 @@ private void testNMTokens(Configuration conf) throws Exception {
testStartContainer(rpc, validAppAttemptId, validNode,
validContainerToken, invalidNMToken, true)));
- // using appAttempt-2 token for launching container for appAttempt-1.
- invalidNMToken =
- nmTokenSecretManagerRM.createNMToken(invalidAppAttemptId, validNode,
- user);
- sb = new StringBuilder("\nNMToken for application attempt : ");
- sb.append(invalidAppAttemptId.toString())
- .append(" was used for starting container with container token")
- .append(" issued for application attempt : ")
- .append(validAppAttemptId.toString());
- Assert.assertTrue(testStartContainer(rpc, validAppAttemptId, validNode,
- validContainerToken, invalidNMToken, true).contains(sb.toString()));
-
// using correct tokens. nmtoken for app attempt should get saved.
conf.setInt(YarnConfiguration.RM_CONTAINER_ALLOC_EXPIRY_INTERVAL_MS,
4 * 60 * 1000);
validContainerToken =
containerTokenSecretManager.createContainerToken(validContainerId,
validNode, user, r, Priority.newInstance(0), 0);
- testStartContainer(rpc, validAppAttemptId, validNode, validContainerToken,
- validNMToken, false);
+ Assert.assertTrue(testStartContainer(rpc, validAppAttemptId, validNode,
+ validContainerToken, validNMToken, false).isEmpty());
Assert.assertTrue(nmTokenSecretManagerNM
.isAppAttemptNMTokenKeyPresent(validAppAttemptId));
@@ -330,6 +316,18 @@ private void testNMTokens(Configuration conf) throws Exception {
Assert.assertTrue(testGetContainer(rpc, validAppAttemptId, validNode,
validContainerId, validNMToken, false).contains(sb.toString()));
+ // using appAttempt-1 NMtoken for launching container for appAttempt-2 should
+ // succeed.
+ ApplicationAttemptId attempt2 = ApplicationAttemptId.newInstance(appId, 2);
+ Token attempt1NMToken =
+ nmTokenSecretManagerRM
+ .createNMToken(validAppAttemptId, validNode, user);
+ org.apache.hadoop.yarn.api.records.Token newContainerToken =
+ containerTokenSecretManager.createContainerToken(
+ ContainerId.newInstance(attempt2, 1), validNode, user, r,
+ Priority.newInstance(0), 0);
+ Assert.assertTrue(testStartContainer(rpc, attempt2, validNode,
+ newContainerToken, attempt1NMToken, false).isEmpty());
}
private void waitForContainerToFinishOnNM(ContainerId containerId) {
|
f582aa449210110546147851f529e2fbb337aa0e
|
hadoop
|
HDFS-2154. In TestDFSShell, use TEST_ROOT_DIR and- fix some deprecated warnings.--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1147184 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/hadoop
|
diff --git a/hdfs/CHANGES.txt b/hdfs/CHANGES.txt
index efcf7b44b23f8..95f14a7789810 100644
--- a/hdfs/CHANGES.txt
+++ b/hdfs/CHANGES.txt
@@ -554,6 +554,9 @@ Trunk (unreleased changes)
HDFS-2140. Move Host2NodesMap to the blockmanagement package. (szetszwo)
+ HDFS-2154. In TestDFSShell, use TEST_ROOT_DIR and fix some deprecated
+ warnings. (szetszwo)
+
OPTIMIZATIONS
HDFS-1458. Improve checkpoint performance by avoiding unnecessary image
diff --git a/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSShell.java b/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSShell.java
index d99bf04fcb37b..bd4961afc0d2b 100644
--- a/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSShell.java
+++ b/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSShell.java
@@ -43,7 +43,6 @@
import org.apache.hadoop.fs.FsShell;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.fs.shell.Count;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.FSDataset;
@@ -701,10 +700,10 @@ public void testCount() throws Exception {
String root = createTree(dfs, "count");
// Verify the counts
- runCount(root, 2, 4, conf);
- runCount(root + "2", 2, 1, conf);
- runCount(root + "2/f1", 0, 1, conf);
- runCount(root + "2/sub", 1, 0, conf);
+ runCount(root, 2, 4, shell);
+ runCount(root + "2", 2, 1, shell);
+ runCount(root + "2/f1", 0, 1, shell);
+ runCount(root + "2/sub", 1, 0, shell);
final FileSystem localfs = FileSystem.getLocal(conf);
Path localpath = new Path(TEST_ROOT_DIR, "testcount");
@@ -714,8 +713,8 @@ public void testCount() throws Exception {
final String localstr = localpath.toString();
System.out.println("localstr=" + localstr);
- runCount(localstr, 1, 0, conf);
- assertEquals(0, new Count(new String[]{root, localstr}, 0, conf).runAll());
+ runCount(localstr, 1, 0, shell);
+ assertEquals(0, runCmd(shell, "-count", root, localstr));
} finally {
try {
dfs.close();
@@ -724,7 +723,7 @@ public void testCount() throws Exception {
cluster.shutdown();
}
}
- private void runCount(String path, long dirs, long files, Configuration conf
+ private static void runCount(String path, long dirs, long files, FsShell shell
) throws IOException {
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
PrintStream out = new PrintStream(bytes);
@@ -733,7 +732,7 @@ private void runCount(String path, long dirs, long files, Configuration conf
Scanner in = null;
String results = null;
try {
- new Count(new String[]{path}, 0, conf).runAll();
+ runCmd(shell, "-count", path);
results = bytes.toString();
in = new Scanner(results);
assertEquals(dirs, in.nextLong());
@@ -747,7 +746,7 @@ private void runCount(String path, long dirs, long files, Configuration conf
}
//throws IOException instead of Exception as shell.run() does.
- private int runCmd(FsShell shell, String... args) throws IOException {
+ private static int runCmd(FsShell shell, String... args) throws IOException {
StringBuilder cmdline = new StringBuilder("RUN:");
for (String arg : args) cmdline.append(" " + arg);
LOG.info(cmdline.toString());
@@ -1362,48 +1361,46 @@ public void testCopyCommandsWithForceOption() throws Exception {
.format(true).build();
FsShell shell = null;
FileSystem fs = null;
- File localFile = new File("testFileForPut");
- Path hdfsTestDir = new Path("ForceTestDir");
+ final File localFile = new File(TEST_ROOT_DIR, "testFileForPut");
+ final String localfilepath = localFile.getAbsolutePath();
+ final String testdir = TEST_ROOT_DIR + "/ForceTestDir";
+ final Path hdfsTestDir = new Path(testdir);
try {
fs = cluster.getFileSystem();
fs.mkdirs(hdfsTestDir);
localFile.createNewFile();
- writeFile(fs, new Path("testFileForPut"));
+ writeFile(fs, new Path(TEST_ROOT_DIR, "testFileForPut"));
shell = new FsShell();
// Tests for put
- String[] argv = new String[] { "-put", "-f", localFile.getName(),
- "ForceTestDir" };
+ String[] argv = new String[] { "-put", "-f", localfilepath, testdir };
int res = ToolRunner.run(shell, argv);
int SUCCESS = 0;
int ERROR = 1;
assertEquals("put -f is not working", SUCCESS, res);
- argv = new String[] { "-put", localFile.getName(), "ForceTestDir" };
+ argv = new String[] { "-put", localfilepath, testdir };
res = ToolRunner.run(shell, argv);
assertEquals("put command itself is able to overwrite the file", ERROR,
res);
// Tests for copyFromLocal
- argv = new String[] { "-copyFromLocal", "-f", localFile.getName(),
- "ForceTestDir" };
+ argv = new String[] { "-copyFromLocal", "-f", localfilepath, testdir };
res = ToolRunner.run(shell, argv);
assertEquals("copyFromLocal -f is not working", SUCCESS, res);
- argv = new String[] { "-copyFromLocal", localFile.getName(),
- "ForceTestDir" };
+ argv = new String[] { "-copyFromLocal", localfilepath, testdir };
res = ToolRunner.run(shell, argv);
assertEquals(
"copyFromLocal command itself is able to overwrite the file", ERROR,
res);
// Tests for cp
- argv = new String[] { "-cp", "-f", localFile.getName(), "ForceTestDir" };
+ argv = new String[] { "-cp", "-f", localfilepath, testdir };
res = ToolRunner.run(shell, argv);
assertEquals("cp -f is not working", SUCCESS, res);
- argv = new String[] { "-cp", localFile.getName(),
- "ForceTestDir" };
+ argv = new String[] { "-cp", localfilepath, testdir };
res = ToolRunner.run(shell, argv);
assertEquals("cp command itself is able to overwrite the file", ERROR,
res);
|
6d6e2da437fe7857251c6fc0f2058655e04ed400
|
restlet-framework-java
|
- Fixed latest issues. Submitted by Thierry- Boileau.--
|
c
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/module/com.noelios.restlet.ext.jdbc_3.0/src/com/noelios/restlet/ext/jdbc/RowSetRepresentation.java b/module/com.noelios.restlet.ext.jdbc_3.0/src/com/noelios/restlet/ext/jdbc/RowSetRepresentation.java
index 12a99ac375..21bce52839 100644
--- a/module/com.noelios.restlet.ext.jdbc_3.0/src/com/noelios/restlet/ext/jdbc/RowSetRepresentation.java
+++ b/module/com.noelios.restlet.ext.jdbc_3.0/src/com/noelios/restlet/ext/jdbc/RowSetRepresentation.java
@@ -64,8 +64,6 @@ private static WebRowSet create(ResultSet resultSet) throws SQLException {
result.populate(resultSet);
}
- result.release();
-
return result;
}
|
b9ab5a6392651b25554ed554c8b262b20e5a40c5
|
drools
|
-testRemovePackage now works.--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@7100 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/test/java/org/drools/Precondition.java b/drools-compiler/src/test/java/org/drools/Precondition.java
index 508c384227f..4772976fb17 100755
--- a/drools-compiler/src/test/java/org/drools/Precondition.java
+++ b/drools-compiler/src/test/java/org/drools/Precondition.java
@@ -4,6 +4,10 @@ public class Precondition {
private String code;
private String value;
+ public Precondition() {
+
+ }
+
public Precondition(String code, String value) {
super();
this.code = code;
|
9d3a1f0f0f6f7c1781e7f6b5785e23db6eb5703a
|
intellij-community
|
move statement should be enabled when moving to- the end of file--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/platform/lang-impl/src/com/intellij/codeInsight/editorActions/moveUpDown/BaseMoveHandler.java b/platform/lang-impl/src/com/intellij/codeInsight/editorActions/moveUpDown/BaseMoveHandler.java
index 7670facb22bad..6c8cbefcecf7f 100644
--- a/platform/lang-impl/src/com/intellij/codeInsight/editorActions/moveUpDown/BaseMoveHandler.java
+++ b/platform/lang-impl/src/com/intellij/codeInsight/editorActions/moveUpDown/BaseMoveHandler.java
@@ -65,7 +65,7 @@ public boolean isEnabled(Editor editor, DataContext dataContext) {
final LineRange range = mover.getInfo().toMove;
if (range.startLine == 0 && !isDown) return false;
- return range.endLine < maxLine || !isDown;
+ return range.endLine <= maxLine || !isDown;
}
@Nullable
|
14bc62302f5db980c5c21bcb64af047ca68241b5
|
restlet-framework-java
|
- The CLAP connector didn't set the- expiration date based on its 'timeToLive' parameter. Reported by- Peter Becker.--
|
c
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/modules/org.restlet.example/src/org/restlet/example/jaxrs/GuardedExample.java b/modules/org.restlet.example/src/org/restlet/example/jaxrs/GuardedExample.java
index 845d842af6..5d88624c9b 100644
--- a/modules/org.restlet.example/src/org/restlet/example/jaxrs/GuardedExample.java
+++ b/modules/org.restlet.example/src/org/restlet/example/jaxrs/GuardedExample.java
@@ -32,14 +32,14 @@
import org.restlet.Component;
import org.restlet.Server;
-import org.restlet.security.ChallengeGuard;
-import org.restlet.security.MemoryRealm;
-import org.restlet.security.Organization;
-import org.restlet.security.User;
import org.restlet.data.ChallengeScheme;
import org.restlet.data.Protocol;
import org.restlet.ext.jaxrs.JaxRsApplication;
import org.restlet.ext.jaxrs.RoleChecker;
+import org.restlet.security.ChallengeGuard;
+import org.restlet.security.MemoryRealm;
+import org.restlet.security.Organization;
+import org.restlet.security.User;
/**
* <p>
@@ -61,6 +61,7 @@
* @see ExampleServer
* @see ExampleApplication
*/
+@SuppressWarnings("deprecation")
public class GuardedExample {
/**
diff --git a/modules/org.restlet/src/org/restlet/engine/local/ClapClientHelper.java b/modules/org.restlet/src/org/restlet/engine/local/ClapClientHelper.java
index 0a995072d9..282106ea99 100644
--- a/modules/org.restlet/src/org/restlet/engine/local/ClapClientHelper.java
+++ b/modules/org.restlet/src/org/restlet/engine/local/ClapClientHelper.java
@@ -168,6 +168,16 @@ protected void handleClassLoader(Request request, Response response,
output.setIdentifier(request.getResourceRef());
output.setModificationDate(modificationDate);
+ // Update the expiration date
+ long timeToLive = getTimeToLive();
+ if (timeToLive == 0) {
+ output.setExpirationDate(new Date());
+ } else if (timeToLive > 0) {
+ output.setExpirationDate(new Date(System
+ .currentTimeMillis()
+ + (1000L * timeToLive)));
+ }
+
// Update the metadata based on file extensions
final String name = path
.substring(path.lastIndexOf('/') + 1);
diff --git a/modules/org.restlet/src/org/restlet/engine/local/LocalClientHelper.java b/modules/org.restlet/src/org/restlet/engine/local/LocalClientHelper.java
index c91301f4de..7f5f217b66 100644
--- a/modules/org.restlet/src/org/restlet/engine/local/LocalClientHelper.java
+++ b/modules/org.restlet/src/org/restlet/engine/local/LocalClientHelper.java
@@ -39,7 +39,6 @@
import org.restlet.representation.Variant;
import org.restlet.service.MetadataService;
-
/**
* Connector to the local resources accessible via file system, class loaders
* and similar mechanisms. Here is the list of parameters that are supported:
@@ -54,7 +53,8 @@
* <td>timeToLive</td>
* <td>int</td>
* <td>600</td>
- * <td>Time to live for a file representation before it expires (in seconds).</td>
+ * <td>Time to live for a representation before it expires (in seconds). If you
+ * set the value to '0', the representation will never expire.</td>
* </tr>
* <tr>
* <td>defaultLanguage</td>
|
65a5e2cc46b2c04591aa59b7a85751479a7dbd0b
|
hadoop
|
YARN-1936. Added security support for the Timeline- Client. Contributed by Zhijie Shen. svn merge --ignore-ancestry -c 1597153- ../../trunk/--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1597154 13f79535-47bb-0310-9956-ffa450edef68-
|
a
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 32c042622ceb3..02b922616b362 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -78,6 +78,9 @@ Release 2.5.0 - UNRELEASED
YARN-2049. Added delegation-token support for the Timeline Server. (Zhijie
Shen via vinodkv)
+ YARN-1936. Added security support for the Timeline Client. (Zhijie Shen via
+ vinodkv)
+
OPTIMIZATIONS
BUG FIXES
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java
index a2ed3e70a51c9..de1d3e2ae53ff 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java
@@ -23,11 +23,13 @@
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
import org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl;
import org.apache.hadoop.yarn.exceptions.YarnException;
+import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier;
/**
* A client library that can be used to post some information in terms of a
@@ -65,4 +67,22 @@ protected TimelineClient(String name) {
public abstract TimelinePutResponse putEntities(
TimelineEntity... entities) throws IOException, YarnException;
+ /**
+ * <p>
+ * Get a delegation token so as to be able to talk to the timeline server in a
+ * secure way.
+ * </p>
+ *
+ * @param renewer
+ * Address of the renewer who can renew these tokens when needed by
+ * securely talking to the timeline server
+ * @return a delegation token ({@link Token}) that can be used to talk to the
+ * timeline server
+ * @throws IOException
+ * @throws YarnException
+ */
+ @Public
+ public abstract Token<TimelineDelegationTokenIdentifier> getDelegationToken(
+ String renewer) throws IOException, YarnException;
+
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
index 64cc041aaea54..5ffe17a24a6fb 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
@@ -18,24 +18,43 @@
package org.apache.hadoop.yarn.client.api.impl;
+import java.io.File;
import java.io.IOException;
+import java.net.HttpURLConnection;
import java.net.URI;
+import java.net.URL;
import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
import javax.ws.rs.core.MediaType;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Options;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
+import org.apache.hadoop.security.authentication.client.AuthenticationException;
+import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
import org.apache.hadoop.yarn.client.api.TimelineClient;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
+import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
+import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier;
+import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenSelector;
+import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider;
+import org.codehaus.jackson.map.ObjectMapper;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
@@ -44,6 +63,8 @@
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.api.client.config.ClientConfig;
import com.sun.jersey.api.client.config.DefaultClientConfig;
+import com.sun.jersey.client.urlconnection.HttpURLConnectionFactory;
+import com.sun.jersey.client.urlconnection.URLConnectionClientHandler;
@Private
@Unstable
@@ -52,16 +73,29 @@ public class TimelineClientImpl extends TimelineClient {
private static final Log LOG = LogFactory.getLog(TimelineClientImpl.class);
private static final String RESOURCE_URI_STR = "/ws/v1/timeline/";
private static final Joiner JOINER = Joiner.on("");
+ private static Options opts;
+ static {
+ opts = new Options();
+ opts.addOption("put", true, "Put the TimelineEntities in a JSON file");
+ opts.getOption("put").setArgName("Path to the JSON file");
+ opts.addOption("help", false, "Print usage");
+ }
private Client client;
private URI resURI;
private boolean isEnabled;
+ private TimelineAuthenticatedURLConnectionFactory urlFactory;
public TimelineClientImpl() {
super(TimelineClientImpl.class.getName());
ClientConfig cc = new DefaultClientConfig();
cc.getClasses().add(YarnJacksonJaxbJsonProvider.class);
- client = Client.create(cc);
+ if (UserGroupInformation.isSecurityEnabled()) {
+ urlFactory = new TimelineAuthenticatedURLConnectionFactory();
+ client = new Client(new URLConnectionClientHandler(urlFactory), cc);
+ } else {
+ client = Client.create(cc);
+ }
}
protected void serviceInit(Configuration conf) throws Exception {
@@ -83,6 +117,9 @@ protected void serviceInit(Configuration conf) throws Exception {
YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_ADDRESS),
RESOURCE_URI_STR));
}
+ if (UserGroupInformation.isSecurityEnabled()) {
+ urlFactory.setService(TimelineUtils.buildTimelineTokenService(conf));
+ }
LOG.info("Timeline service address: " + resURI);
}
super.serviceInit(conf);
@@ -124,6 +161,13 @@ public TimelinePutResponse putEntities(
return resp.getEntity(TimelinePutResponse.class);
}
+ @Override
+ public Token<TimelineDelegationTokenIdentifier> getDelegationToken(
+ String renewer) throws IOException, YarnException {
+ return TimelineAuthenticator.getDelegationToken(resURI.toURL(),
+ urlFactory.token, renewer);
+ }
+
@Private
@VisibleForTesting
public ClientResponse doPostingEntities(TimelineEntities entities) {
@@ -133,4 +177,138 @@ public ClientResponse doPostingEntities(TimelineEntities entities) {
.post(ClientResponse.class, entities);
}
+ private static class TimelineAuthenticatedURLConnectionFactory
+ implements HttpURLConnectionFactory {
+
+ private AuthenticatedURL.Token token;
+ private TimelineAuthenticator authenticator;
+ private Token<TimelineDelegationTokenIdentifier> dToken;
+ private Text service;
+
+ public TimelineAuthenticatedURLConnectionFactory() {
+ token = new AuthenticatedURL.Token();
+ authenticator = new TimelineAuthenticator();
+ }
+
+ @Override
+ public HttpURLConnection getHttpURLConnection(URL url) throws IOException {
+ try {
+ if (dToken == null) {
+ //TODO: need to take care of the renew case
+ dToken = selectToken();
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Timeline delegation token: " + dToken.toString());
+ }
+ }
+ if (dToken != null) {
+ Map<String, String> params = new HashMap<String, String>();
+ TimelineAuthenticator.injectDelegationToken(params, dToken);
+ url = TimelineAuthenticator.appendParams(url, params);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("URL with delegation token: " + url);
+ }
+ }
+ return new AuthenticatedURL(authenticator).openConnection(url, token);
+ } catch (AuthenticationException e) {
+ LOG.error("Authentication failed when openning connection [" + url
+ + "] with token [" + token + "].", e);
+ throw new IOException(e);
+ }
+ }
+
+ private Token<TimelineDelegationTokenIdentifier> selectToken() {
+ UserGroupInformation ugi;
+ try {
+ ugi = UserGroupInformation.getCurrentUser();
+ } catch (IOException e) {
+ String msg = "Error when getting the current user";
+ LOG.error(msg, e);
+ throw new YarnRuntimeException(msg, e);
+ }
+ TimelineDelegationTokenSelector tokenSelector =
+ new TimelineDelegationTokenSelector();
+ return tokenSelector.selectToken(
+ service, ugi.getCredentials().getAllTokens());
+ }
+
+ public void setService(Text service) {
+ this.service = service;
+ }
+
+ }
+
+ public static void main(String[] argv) throws Exception {
+ CommandLine cliParser = new GnuParser().parse(opts, argv);
+ if (cliParser.hasOption("put")) {
+ String path = cliParser.getOptionValue("put");
+ if (path != null && path.length() > 0) {
+ putTimelineEntitiesInJSONFile(path);
+ return;
+ }
+ }
+ printUsage();
+ }
+
+ /**
+ * Put timeline data in a JSON file via command line.
+ *
+ * @param path
+ * path to the {@link TimelineEntities} JSON file
+ */
+ private static void putTimelineEntitiesInJSONFile(String path) {
+ File jsonFile = new File(path);
+ if (!jsonFile.exists()) {
+ System.out.println("Error: File [" + jsonFile.getAbsolutePath()
+ + "] doesn't exist");
+ return;
+ }
+ ObjectMapper mapper = new ObjectMapper();
+ YarnJacksonJaxbJsonProvider.configObjectMapper(mapper);
+ TimelineEntities entities = null;
+ try {
+ entities = mapper.readValue(jsonFile, TimelineEntities.class);
+ } catch (Exception e) {
+ System.err.println("Error: " + e.getMessage());
+ e.printStackTrace(System.err);
+ return;
+ }
+ Configuration conf = new YarnConfiguration();
+ TimelineClient client = TimelineClient.createTimelineClient();
+ client.init(conf);
+ client.start();
+ try {
+ if (UserGroupInformation.isSecurityEnabled()
+ && conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, false)) {
+ Token<TimelineDelegationTokenIdentifier> token =
+ client.getDelegationToken(
+ UserGroupInformation.getCurrentUser().getUserName());
+ UserGroupInformation.getCurrentUser().addToken(token);
+ }
+ TimelinePutResponse response = client.putEntities(
+ entities.getEntities().toArray(
+ new TimelineEntity[entities.getEntities().size()]));
+ if (response.getErrors().size() == 0) {
+ System.out.println("Timeline data is successfully put");
+ } else {
+ for (TimelinePutResponse.TimelinePutError error : response.getErrors()) {
+ System.out.println("TimelineEntity [" + error.getEntityType() + ":" +
+ error.getEntityId() + "] is not successfully put. Error code: " +
+ error.getErrorCode());
+ }
+ }
+ } catch (Exception e) {
+ System.err.println("Error: " + e.getMessage());
+ e.printStackTrace(System.err);
+ } finally {
+ client.stop();
+ }
+ }
+
+ /**
+ * Helper function to print out usage
+ */
+ private static void printUsage() {
+ new HelpFormatter().printHelp("TimelineClient", opts);
+ }
+
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java
index 8a0348b336842..f1a3b6eeceaf3 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.yarn.client.api.impl;
import java.io.IOException;
+import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.List;
@@ -29,8 +30,13 @@
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.DataInputByteBuffer;
+import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse;
@@ -64,6 +70,7 @@
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.ContainerReport;
import org.apache.hadoop.yarn.api.records.NodeReport;
import org.apache.hadoop.yarn.api.records.NodeState;
@@ -74,6 +81,7 @@
import org.apache.hadoop.yarn.api.records.YarnClusterMetrics;
import org.apache.hadoop.yarn.client.ClientRMProxy;
import org.apache.hadoop.yarn.client.api.AHSClient;
+import org.apache.hadoop.yarn.client.api.TimelineClient;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.client.api.YarnClientApplication;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
@@ -82,8 +90,10 @@
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.security.AMRMTokenIdentifier;
+import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.Records;
+import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
import com.google.common.annotations.VisibleForTesting;
@@ -97,8 +107,11 @@ public class YarnClientImpl extends YarnClient {
protected long submitPollIntervalMillis;
private long asyncApiPollIntervalMillis;
private long asyncApiPollTimeoutMillis;
- protected AHSClient historyClient;
+ private AHSClient historyClient;
private boolean historyServiceEnabled;
+ protected TimelineClient timelineClient;
+ protected Text timelineService;
+ protected boolean timelineServiceEnabled;
private static final String ROOT = "root";
@@ -126,10 +139,17 @@ protected void serviceInit(Configuration conf) throws Exception {
if (conf.getBoolean(YarnConfiguration.APPLICATION_HISTORY_ENABLED,
YarnConfiguration.DEFAULT_APPLICATION_HISTORY_ENABLED)) {
historyServiceEnabled = true;
- historyClient = AHSClientImpl.createAHSClient();
- historyClient.init(getConfig());
+ historyClient = AHSClient.createAHSClient();
+ historyClient.init(conf);
}
+ if (conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED,
+ YarnConfiguration.DEFAULT_TIMELINE_SERVICE_ENABLED)) {
+ timelineServiceEnabled = true;
+ timelineClient = TimelineClient.createTimelineClient();
+ timelineClient.init(conf);
+ timelineService = TimelineUtils.buildTimelineTokenService(conf);
+ }
super.serviceInit(conf);
}
@@ -141,6 +161,9 @@ protected void serviceStart() throws Exception {
if (historyServiceEnabled) {
historyClient.start();
}
+ if (timelineServiceEnabled) {
+ timelineClient.start();
+ }
} catch (IOException e) {
throw new YarnRuntimeException(e);
}
@@ -155,6 +178,9 @@ protected void serviceStop() throws Exception {
if (historyServiceEnabled) {
historyClient.stop();
}
+ if (timelineServiceEnabled) {
+ timelineClient.stop();
+ }
super.serviceStop();
}
@@ -189,6 +215,12 @@ public YarnClientApplication createApplication()
Records.newRecord(SubmitApplicationRequest.class);
request.setApplicationSubmissionContext(appContext);
+ // Automatically add the timeline DT into the CLC
+ // Only when the security and the timeline service are both enabled
+ if (isSecurityEnabled() && timelineServiceEnabled) {
+ addTimelineDelegationToken(appContext.getAMContainerSpec());
+ }
+
//TODO: YARN-1763:Handle RM failovers during the submitApplication call.
rmClient.submitApplication(request);
@@ -238,6 +270,48 @@ public YarnClientApplication createApplication()
return applicationId;
}
+ private void addTimelineDelegationToken(
+ ContainerLaunchContext clc) throws YarnException, IOException {
+ org.apache.hadoop.security.token.Token<TimelineDelegationTokenIdentifier> timelineDelegationToken =
+ timelineClient.getDelegationToken(
+ UserGroupInformation.getCurrentUser().getUserName());
+ if (timelineDelegationToken == null) {
+ return;
+ }
+ Credentials credentials = new Credentials();
+ DataInputByteBuffer dibb = new DataInputByteBuffer();
+ ByteBuffer tokens = clc.getTokens();
+ if (tokens != null) {
+ dibb.reset(tokens);
+ credentials.readTokenStorageStream(dibb);
+ tokens.rewind();
+ }
+ // If the timeline delegation token is already in the CLC, no need to add
+ // one more
+ for (org.apache.hadoop.security.token.Token<? extends TokenIdentifier> token : credentials
+ .getAllTokens()) {
+ TokenIdentifier tokenIdentifier = token.decodeIdentifier();
+ if (tokenIdentifier instanceof TimelineDelegationTokenIdentifier) {
+ return;
+ }
+ }
+ credentials.addToken(timelineService, timelineDelegationToken);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Add timline delegation token into credentials: "
+ + timelineDelegationToken);
+ }
+ DataOutputBuffer dob = new DataOutputBuffer();
+ credentials.writeTokenStorageToStream(dob);
+ tokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
+ clc.setTokens(tokens);
+ }
+
+ @Private
+ @VisibleForTesting
+ protected boolean isSecurityEnabled() {
+ return UserGroupInformation.isSecurityEnabled();
+ }
+
@Override
public void killApplication(ApplicationId applicationId)
throws YarnException, IOException {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestYarnClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestYarnClient.java
index cfee6f78d0c86..6407f7a1089e8 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestYarnClient.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestYarnClient.java
@@ -25,19 +25,26 @@
import static org.mockito.Mockito.when;
import java.io.IOException;
+import java.nio.ByteBuffer;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
+import java.util.Collection;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import org.junit.Assert;
-
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.DataInputByteBuffer;
+import org.apache.hadoop.io.DataOutputBuffer;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse;
@@ -69,19 +76,23 @@
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
+import org.apache.hadoop.yarn.client.api.TimelineClient;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.client.api.YarnClientApplication;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.ApplicationIdNotProvidedException;
import org.apache.hadoop.yarn.exceptions.YarnException;
+import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier;
import org.apache.hadoop.yarn.server.MiniYARNCluster;
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.util.Records;
+import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
+import org.junit.Assert;
import org.junit.Test;
public class TestYarnClient {
@@ -725,4 +736,80 @@ private void testAsyncAPIPollTimeoutHelper(Long valueForTimeout,
IOUtils.closeQuietly(client);
}
}
+
+ @Test
+ public void testAutomaticTimelineDelegationTokenLoading()
+ throws Exception {
+ Configuration conf = new YarnConfiguration();
+ conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
+ SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, conf);
+ final Token<TimelineDelegationTokenIdentifier> dToken =
+ new Token<TimelineDelegationTokenIdentifier>();
+ // crate a mock client
+ YarnClientImpl client = new YarnClientImpl() {
+ @Override
+ protected void serviceInit(Configuration conf) throws Exception {
+ if (getConfig().getBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED,
+ YarnConfiguration.DEFAULT_TIMELINE_SERVICE_ENABLED)) {
+ timelineServiceEnabled = true;
+ timelineClient = mock(TimelineClient.class);
+ when(timelineClient.getDelegationToken(any(String.class)))
+ .thenReturn(dToken);
+ timelineClient.init(getConfig());
+ timelineService = TimelineUtils.buildTimelineTokenService(getConfig());
+ }
+ this.setConfig(conf);
+ }
+
+ @Override
+ protected void serviceStart() throws Exception {
+ rmClient = mock(ApplicationClientProtocol.class);
+ }
+
+ @Override
+ protected void serviceStop() throws Exception {
+ }
+
+ @Override
+ public ApplicationReport getApplicationReport(ApplicationId appId) {
+ ApplicationReport report = mock(ApplicationReport.class);
+ when(report.getYarnApplicationState())
+ .thenReturn(YarnApplicationState.SUBMITTED);
+ return report;
+ }
+
+ @Override
+ public boolean isSecurityEnabled() {
+ return true;
+ }
+ };
+ client.init(conf);
+ client.start();
+ ApplicationSubmissionContext context =
+ mock(ApplicationSubmissionContext.class);
+ ApplicationId applicationId = ApplicationId.newInstance(0, 1);
+ when(context.getApplicationId()).thenReturn(applicationId);
+ DataOutputBuffer dob = new DataOutputBuffer();
+ Credentials credentials = new Credentials();
+ credentials.writeTokenStorageToStream(dob);
+ ByteBuffer tokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
+ ContainerLaunchContext clc = ContainerLaunchContext.newInstance(
+ null, null, null, null, tokens, null);
+ when(context.getAMContainerSpec()).thenReturn(clc);
+ client.submitApplication(context);
+ // Check whether token is added or not
+ credentials = new Credentials();
+ DataInputByteBuffer dibb = new DataInputByteBuffer();
+ tokens = clc.getTokens();
+ if (tokens != null) {
+ dibb.reset(tokens);
+ credentials.readTokenStorageStream(dibb);
+ tokens.rewind();
+ }
+ Collection<Token<? extends TokenIdentifier>> dTokens =
+ credentials.getAllTokens();
+ Assert.assertEquals(1, dTokens.size());
+ Assert.assertEquals(dToken, dTokens.iterator().next());
+ client.stop();
+ }
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java
index a62ed4869da47..02b5eb4eabdd9 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java
@@ -19,9 +19,14 @@
package org.apache.hadoop.yarn.util.timeline;
import java.io.IOException;
+import java.net.InetSocketAddress;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.map.JsonMappingException;
@@ -78,4 +83,26 @@ public static String dumpTimelineRecordtoJSON(Object o, boolean pretty)
}
}
+ public static InetSocketAddress getTimelineTokenServiceAddress(
+ Configuration conf) {
+ InetSocketAddress timelineServiceAddr = null;
+ if (YarnConfiguration.useHttps(conf)) {
+ timelineServiceAddr = conf.getSocketAddr(
+ YarnConfiguration.TIMELINE_SERVICE_WEBAPP_HTTPS_ADDRESS,
+ YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_HTTPS_ADDRESS,
+ YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_HTTPS_PORT);
+ } else {
+ timelineServiceAddr = conf.getSocketAddr(
+ YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS,
+ YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_ADDRESS,
+ YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_PORT);
+ }
+ return timelineServiceAddr;
+ }
+
+ public static Text buildTimelineTokenService(Configuration conf) {
+ InetSocketAddress timelineServiceAddr =
+ getTimelineTokenServiceAddress(conf);
+ return SecurityUtil.buildTokenService(timelineServiceAddr);
+ }
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineDelegationTokenSecretManagerService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineDelegationTokenSecretManagerService.java
index fee9eb41cd80a..2808dac60dae6 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineDelegationTokenSecretManagerService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineDelegationTokenSecretManagerService.java
@@ -34,6 +34,7 @@
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier;
+import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
/**
* The service wrapper of {@link TimelineDelegationTokenSecretManager}
@@ -65,17 +66,7 @@ protected void serviceInit(Configuration conf) throws Exception {
3600000);
secretManager.startThreads();
- if (YarnConfiguration.useHttps(getConfig())) {
- serviceAddr = getConfig().getSocketAddr(
- YarnConfiguration.TIMELINE_SERVICE_WEBAPP_HTTPS_ADDRESS,
- YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_HTTPS_ADDRESS,
- YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_HTTPS_PORT);
- } else {
- serviceAddr = getConfig().getSocketAddr(
- YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS,
- YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_ADDRESS,
- YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_PORT);
- }
+ serviceAddr = TimelineUtils.getTimelineTokenServiceAddress(getConfig());
super.init(conf);
}
|
672a790148381c4a91b8a862959d97096a7bb513
|
intellij-community
|
NPE--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/lang-api/src/com/intellij/psi/search/PsiSearchScopeUtil.java b/lang-api/src/com/intellij/psi/search/PsiSearchScopeUtil.java
index da5b8a1fe907e..9a7ebb9d0278d 100644
--- a/lang-api/src/com/intellij/psi/search/PsiSearchScopeUtil.java
+++ b/lang-api/src/com/intellij/psi/search/PsiSearchScopeUtil.java
@@ -73,14 +73,14 @@ public static boolean isInScope(SearchScope scope, PsiElement element) {
return false;
}
else {
- GlobalSearchScope _scope = (GlobalSearchScope)scope;
-
+ GlobalSearchScope globalScope = (GlobalSearchScope)scope;
PsiFile file = element.getContainingFile();
if (file != null) {
final PsiElement context = file.getContext();
if (context != null) file = context.getContainingFile();
- if (file.getVirtualFile() == null) return true; //?
- return _scope.contains(file.getVirtualFile());
+ if (file == null) return false;
+ VirtualFile virtualFile = file.getVirtualFile();
+ return virtualFile == null || globalScope.contains(file.getVirtualFile());
}
else {
return true;
|
61466809552f96a83aa19446d4d59cecd0d2cad5
|
hadoop
|
YARN-3094. Reset timer for liveness monitors after- RM recovery. Contributed by Jun Gong (cherry picked from commit- 0af6a99a3fcfa4b47d3bcba5e5cc5fe7b312a152)--
|
c
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index bdddbf36b194d..f3bcb8edee02f 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -482,6 +482,9 @@ Release 2.7.0 - UNRELEASED
YARN-3143. RM Apps REST API can return NPE or entries missing id and other
fields (jlowe)
+ YARN-3094. Reset timer for liveness monitors after RM recovery. (Jun Gong
+ via jianhe)
+
Release 2.6.0 - 2014-11-18
INCOMPATIBLE CHANGES
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/AbstractLivelinessMonitor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/AbstractLivelinessMonitor.java
index c1825319a734b..4f587b348cf11 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/AbstractLivelinessMonitor.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/AbstractLivelinessMonitor.java
@@ -59,6 +59,7 @@ public AbstractLivelinessMonitor(String name, Clock clock) {
@Override
protected void serviceStart() throws Exception {
assert !stopped : "starting when already stopped";
+ resetTimer();
checkerThread = new Thread(new PingChecker());
checkerThread.setName("Ping Checker");
checkerThread.start();
@@ -99,6 +100,13 @@ public synchronized void unregister(O ob) {
running.remove(ob);
}
+ public synchronized void resetTimer() {
+ long time = clock.getTime();
+ for (O ob : running.keySet()) {
+ running.put(ob, time);
+ }
+ }
+
private class PingChecker implements Runnable {
@Override
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java
index 4f242e93ae4e2..a93372a72fe72 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java
@@ -564,12 +564,14 @@ protected void serviceStart() throws Exception {
if(recoveryEnabled) {
try {
+ LOG.info("Recovery started");
rmStore.checkVersion();
if (rmContext.isWorkPreservingRecoveryEnabled()) {
rmContext.setEpoch(rmStore.getAndIncrementEpoch());
}
RMState state = rmStore.loadState();
recover(state);
+ LOG.info("Recovery ended");
} catch (Exception e) {
// the Exception from loadState() needs to be handled for
// HA and we need to give up master status if we got fenced
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/AMLivelinessMonitor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/AMLivelinessMonitor.java
index 2c1f7f1f03e3d..76331bf7fec41 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/AMLivelinessMonitor.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/AMLivelinessMonitor.java
@@ -24,6 +24,7 @@
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.util.AbstractLivelinessMonitor;
+import org.apache.hadoop.yarn.util.Clock;
import org.apache.hadoop.yarn.util.SystemClock;
public class AMLivelinessMonitor extends AbstractLivelinessMonitor<ApplicationAttemptId> {
@@ -35,6 +36,11 @@ public AMLivelinessMonitor(Dispatcher d) {
this.dispatcher = d.getEventHandler();
}
+ public AMLivelinessMonitor(Dispatcher d, Clock clock) {
+ super("AMLivelinessMonitor", clock);
+ this.dispatcher = d.getEventHandler();
+ }
+
public void serviceInit(Configuration conf) throws Exception {
super.serviceInit(conf);
int expireIntvl = conf.getInt(YarnConfiguration.RM_AM_EXPIRY_INTERVAL_MS,
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/TestAMLivelinessMonitor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/TestAMLivelinessMonitor.java
new file mode 100644
index 0000000000000..e0e6aee022862
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/TestAMLivelinessMonitor.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt;
+
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.service.Service;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.event.Dispatcher;
+import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
+import org.apache.hadoop.yarn.server.resourcemanager.recovery.MemoryRMStateStore;
+import org.apache.hadoop.yarn.util.ControlledClock;
+import org.apache.hadoop.yarn.util.SystemClock;
+import org.junit.Assert;
+import org.junit.Test;
+
+import static org.mockito.Mockito.mock;
+
+public class TestAMLivelinessMonitor {
+
+ @Test(timeout = 10000)
+ public void testResetTimer() throws Exception {
+ YarnConfiguration conf = new YarnConfiguration();
+ UserGroupInformation.setConfiguration(conf);
+ conf.set(YarnConfiguration.RECOVERY_ENABLED, "true");
+ conf.set(YarnConfiguration.RM_STORE, MemoryRMStateStore.class.getName());
+ conf.setBoolean(YarnConfiguration.RM_WORK_PRESERVING_RECOVERY_ENABLED, true);
+ conf.setInt(YarnConfiguration.RM_AM_EXPIRY_INTERVAL_MS, 6000);
+ final ControlledClock clock = new ControlledClock(new SystemClock());
+ clock.setTime(0);
+ MemoryRMStateStore memStore = new MemoryRMStateStore() {
+ @Override
+ public synchronized RMState loadState() throws Exception {
+ clock.setTime(8000);
+ return super.loadState();
+ }
+ };
+ memStore.init(conf);
+ final ApplicationAttemptId attemptId = mock(ApplicationAttemptId.class);
+ final Dispatcher dispatcher = mock(Dispatcher.class);
+ final boolean[] expired = new boolean[]{false};
+ final AMLivelinessMonitor monitor = new AMLivelinessMonitor(
+ dispatcher, clock) {
+ @Override
+ protected void expire(ApplicationAttemptId id) {
+ Assert.assertEquals(id, attemptId);
+ expired[0] = true;
+ }
+ };
+ monitor.register(attemptId);
+ MockRM rm = new MockRM(conf, memStore) {
+ @Override
+ protected AMLivelinessMonitor createAMLivelinessMonitor() {
+ return monitor;
+ }
+ };
+ rm.start();
+ // make sure that monitor has started
+ while (monitor.getServiceState() != Service.STATE.STARTED) {
+ Thread.sleep(100);
+ }
+ // expired[0] would be set to true without resetTimer
+ Assert.assertFalse(expired[0]);
+ rm.stop();
+ }
+}
|
885de91ea45d194f0473fa9c3098243f18d3f65f
|
ReactiveX-RxJava
|
Make Functions.from typesafe--
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/subscriptions/Subscriptions.java b/rxjava-core/src/main/java/rx/subscriptions/Subscriptions.java
index f3f1dd46c7..788942e99a 100644
--- a/rxjava-core/src/main/java/rx/subscriptions/Subscriptions.java
+++ b/rxjava-core/src/main/java/rx/subscriptions/Subscriptions.java
@@ -20,6 +20,7 @@
import rx.Subscription;
import rx.util.functions.Action0;
import rx.util.functions.FuncN;
+import rx.util.functions.Function;
import rx.util.functions.Functions;
/**
@@ -83,23 +84,6 @@ public static CompositeSubscription create(Subscription... subscriptions) {
return new CompositeSubscription(subscriptions);
}
- /**
- * A {@link Subscription} implemented via an anonymous function (such as closures from other languages).
- *
- * @return {@link Subscription}
- */
- public static Subscription create(final Object unsubscribe) {
- final FuncN<?> f = Functions.from(unsubscribe);
- return new Subscription() {
-
- @Override
- public void unsubscribe() {
- f.call();
- }
-
- };
- }
-
/**
* A {@link Subscription} that does nothing when its unsubscribe method is called.
*/
diff --git a/rxjava-core/src/main/java/rx/util/functions/Action.java b/rxjava-core/src/main/java/rx/util/functions/Action.java
index c1d43eede6..27d781e957 100644
--- a/rxjava-core/src/main/java/rx/util/functions/Action.java
+++ b/rxjava-core/src/main/java/rx/util/functions/Action.java
@@ -5,6 +5,6 @@
* <p>
* Marker interface to allow instanceof checks.
*/
-public interface Action {
+public interface Action extends Function {
}
diff --git a/rxjava-core/src/main/java/rx/util/functions/Functions.java b/rxjava-core/src/main/java/rx/util/functions/Functions.java
index bae9c466ca..c66ca837e4 100644
--- a/rxjava-core/src/main/java/rx/util/functions/Functions.java
+++ b/rxjava-core/src/main/java/rx/util/functions/Functions.java
@@ -15,9 +15,6 @@
*/
package rx.util.functions;
-import java.util.Collection;
-import java.util.concurrent.ConcurrentHashMap;
-
public class Functions {
/**
@@ -26,17 +23,11 @@ public class Functions {
* @param function
*/
@SuppressWarnings({ "rawtypes" })
- public static FuncN from(final Object function) {
+ public static FuncN from(final Function function) {
if (function == null) {
throw new RuntimeException("function is null. Can't send arguments to null function.");
}
-
- /* check for typed Rx Function implementation first */
- if (function instanceof Function) {
- return fromFunction((Function) function);
- }
- // no support found
- throw new RuntimeException("Unsupported closure type: " + function.getClass().getSimpleName());
+ return fromFunction(function);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
|
9304ec103d2d104e11c9f96c87864d6f7a026b64
|
kotlin
|
wrongly added test removed (correct one was added- before by Zhenja)--
|
p
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/android-tests/tests/org/jetbrains/jet/compiler/android/SpecialFiles.java b/compiler/android-tests/tests/org/jetbrains/jet/compiler/android/SpecialFiles.java
index 97335a21b9bba..9b160e88e0373 100644
--- a/compiler/android-tests/tests/org/jetbrains/jet/compiler/android/SpecialFiles.java
+++ b/compiler/android-tests/tests/org/jetbrains/jet/compiler/android/SpecialFiles.java
@@ -122,8 +122,6 @@ private static void fillExcludedFiles() {
excludedFiles.add("kt1779.kt"); // Bug KT-2202 - private fun tryToComputeNext() in AbstractIterator.kt
excludedFiles.add("kt344.jet"); // Bug KT-2251
excludedFiles.add("kt529.kt"); // Bug
-
- excludedFiles.add("kt2981.kt"); // with java
}
private SpecialFiles() {
|
cbaaca7d5bbc93d4d5dab31dfa07c7523a7d1a38
|
restlet-framework-java
|
prevented primitive type from being returned as- models in API Declarations (https://github.com/restlet/apispark/issues/1168)--
|
c
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/modules/org.restlet.ext.swagger/src/org/restlet/ext/swagger/internal/RWADefToSwaggerConverter.java b/modules/org.restlet.ext.swagger/src/org/restlet/ext/swagger/internal/RWADefToSwaggerConverter.java
index 1f3e8ea715..fa1b407e47 100644
--- a/modules/org.restlet.ext.swagger/src/org/restlet/ext/swagger/internal/RWADefToSwaggerConverter.java
+++ b/modules/org.restlet.ext.swagger/src/org/restlet/ext/swagger/internal/RWADefToSwaggerConverter.java
@@ -251,7 +251,7 @@ public ApiDeclaration getApiDeclaration(String category, Definition def) {
String model = iterator.next();
Representation repr = getRepresentationByName(model,
def.getContract());
- if (repr == null) {
+ if (repr == null || isPrimitiveType(model)) {
continue;
}
ModelDeclaration md = new ModelDeclaration();
|
6950c38a0436ec937797f01fba8d7d95e6d6225f
|
elasticsearch
|
Tests: Improve test coverage.--Close -7428-
|
p
|
https://github.com/elastic/elasticsearch
|
diff --git a/rest-api-spec/test/cat.indices/10_basic.yml b/rest-api-spec/test/cat.indices/10_basic.yml
new file mode 100644
index 0000000000000..b3025a25ace5e
--- /dev/null
+++ b/rest-api-spec/test/cat.indices/10_basic.yml
@@ -0,0 +1,26 @@
+---
+"Test cat indices output":
+
+ - do:
+ cat.indices: {}
+
+ - match:
+ $body: |
+ /^$/
+
+ - do:
+ indices.create:
+ index: index1
+ body:
+ settings:
+ number_of_shards: "1"
+ number_of_replicas: "0"
+ - do:
+ cluster.health:
+ wait_for_status: yellow
+ - do:
+ cat.indices: {}
+
+ - match:
+ $body: |
+ /^(green \s+ index1 \s+ 1 \s+ 0 \s+ 0 \s+ 0 \s+ (\d+|\d+[.]\d+)(kb|b) \s+ (\d+|\d+[.]\d+)(kb|b))$/
diff --git a/src/main/java/org/elasticsearch/common/Booleans.java b/src/main/java/org/elasticsearch/common/Booleans.java
index 830e10ea823a3..dc0816cf4d42c 100644
--- a/src/main/java/org/elasticsearch/common/Booleans.java
+++ b/src/main/java/org/elasticsearch/common/Booleans.java
@@ -24,6 +24,11 @@
*/
public class Booleans {
+ /**
+ * Returns <code>true</code> iff the sequence is neither of the following:
+ * <tt>false</tt>, <tt>0</tt>, <tt>off</tt>, <tt>no</tt>,
+ * otherwise <code>false</code>
+ */
public static boolean parseBoolean(char[] text, int offset, int length, boolean defaultValue) {
if (text == null || length == 0) {
return defaultValue;
@@ -73,27 +78,40 @@ public static boolean isBoolean(char[] text, int offset, int length) {
return false;
}
-
- public static boolean parseBoolean(String value, boolean defaultValue) {
- if (value == null) {
+ public static Boolean parseBoolean(String value, Boolean defaultValue) {
+ if (value == null) { // only for the null case we do that here!
return defaultValue;
}
- return !(value.equals("false") || value.equals("0") || value.equals("off") || value.equals("no"));
+ return parseBoolean(value, false);
}
-
- public static Boolean parseBoolean(String value, Boolean defaultValue) {
+ /**
+ * Returns <code>true</code> iff the value is neither of the following:
+ * <tt>false</tt>, <tt>0</tt>, <tt>off</tt>, <tt>no</tt>
+ * otherwise <code>false</code>
+ */
+ public static boolean parseBoolean(String value, boolean defaultValue) {
if (value == null) {
return defaultValue;
}
return !(value.equals("false") || value.equals("0") || value.equals("off") || value.equals("no"));
}
+ /**
+ * Returns <code>true</code> iff the value is either of the following:
+ * <tt>false</tt>, <tt>0</tt>, <tt>off</tt>, <tt>no</tt>
+ * otherwise <code>false</code>
+ */
public static boolean isExplicitFalse(String value) {
- return (value.equals("false") || value.equals("0") || value.equals("off") || value.equals("no"));
+ return value != null && (value.equals("false") || value.equals("0") || value.equals("off") || value.equals("no"));
}
+ /**
+ * Returns <code>true</code> iff the value is either of the following:
+ * <tt>true</tt>, <tt>1</tt>, <tt>on</tt>, <tt>yes</tt>
+ * otherwise <code>false</code>
+ */
public static boolean isExplicitTrue(String value) {
- return (value.equals("true") || value.equals("1") || value.equals("on") || value.equals("yes"));
+ return value != null && (value.equals("true") || value.equals("1") || value.equals("on") || value.equals("yes"));
}
}
diff --git a/src/main/java/org/elasticsearch/common/Preconditions.java b/src/main/java/org/elasticsearch/common/Preconditions.java
index bc6aef92b779a..045d24d761701 100644
--- a/src/main/java/org/elasticsearch/common/Preconditions.java
+++ b/src/main/java/org/elasticsearch/common/Preconditions.java
@@ -23,7 +23,6 @@
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.ElasticsearchNullPointerException;
-import java.util.Collection;
import java.util.NoSuchElementException;
/**
@@ -246,204 +245,6 @@ public static <T> T checkNotNull(T reference, String errorMessageTemplate,
return reference;
}
- /**
- * Ensures that an {@code Iterable} object passed as a parameter to the
- * calling method is not null and contains no null elements.
- *
- * @param iterable the iterable to check the contents of
- * @return the non-null {@code iterable} reference just validated
- * @throws org.elasticsearch.ElasticsearchNullPointerException
- * if {@code iterable} is null or contains at
- * least one null element
- */
- public static <T extends Iterable<?>> T checkContentsNotNull(T iterable) {
- if (containsOrIsNull(iterable)) {
- throw new ElasticsearchNullPointerException();
- }
- return iterable;
- }
-
- /**
- * Ensures that an {@code Iterable} object passed as a parameter to the
- * calling method is not null and contains no null elements.
- *
- * @param iterable the iterable to check the contents of
- * @param errorMessage the exception message to use if the check fails; will
- * be converted to a string using {@link String#valueOf(Object)}
- * @return the non-null {@code iterable} reference just validated
- * @throws org.elasticsearch.ElasticsearchNullPointerException
- * if {@code iterable} is null or contains at
- * least one null element
- */
- public static <T extends Iterable<?>> T checkContentsNotNull(
- T iterable, Object errorMessage) {
- if (containsOrIsNull(iterable)) {
- throw new ElasticsearchNullPointerException(String.valueOf(errorMessage));
- }
- return iterable;
- }
-
- /**
- * Ensures that an {@code Iterable} object passed as a parameter to the
- * calling method is not null and contains no null elements.
- *
- * @param iterable the iterable to check the contents of
- * @param errorMessageTemplate a template for the exception message should the
- * check fail. The message is formed by replacing each {@code %s}
- * placeholder in the template with an argument. These are matched by
- * position - the first {@code %s} gets {@code errorMessageArgs[0]}, etc.
- * Unmatched arguments will be appended to the formatted message in square
- * braces. Unmatched placeholders will be left as-is.
- * @param errorMessageArgs the arguments to be substituted into the message
- * template. Arguments are converted to strings using
- * {@link String#valueOf(Object)}.
- * @return the non-null {@code iterable} reference just validated
- * @throws org.elasticsearch.ElasticsearchNullPointerException
- * if {@code iterable} is null or contains at
- * least one null element
- */
- public static <T extends Iterable<?>> T checkContentsNotNull(T iterable,
- String errorMessageTemplate, Object... errorMessageArgs) {
- if (containsOrIsNull(iterable)) {
- throw new ElasticsearchNullPointerException(
- format(errorMessageTemplate, errorMessageArgs));
- }
- return iterable;
- }
-
- private static boolean containsOrIsNull(Iterable<?> iterable) {
- if (iterable == null) {
- return true;
- }
-
- if (iterable instanceof Collection) {
- Collection<?> collection = (Collection<?>) iterable;
- try {
- return collection.contains(null);
- } catch (ElasticsearchNullPointerException e) {
- // A NPE implies that the collection doesn't contain null.
- return false;
- }
- } else {
- for (Object element : iterable) {
- if (element == null) {
- return true;
- }
- }
- return false;
- }
- }
-
- /**
- * Ensures that {@code index} specifies a valid <i>element</i> in an array,
- * list or string of size {@code size}. An element index may range from zero,
- * inclusive, to {@code size}, exclusive.
- *
- * @param index a user-supplied index identifying an element of an array, list
- * or string
- * @param size the size of that array, list or string
- * @throws IndexOutOfBoundsException if {@code index} is negative or is not
- * less than {@code size}
- * @throws org.elasticsearch.ElasticsearchIllegalArgumentException
- * if {@code size} is negative
- */
- public static void checkElementIndex(int index, int size) {
- checkElementIndex(index, size, "index");
- }
-
- /**
- * Ensures that {@code index} specifies a valid <i>element</i> in an array,
- * list or string of size {@code size}. An element index may range from zero,
- * inclusive, to {@code size}, exclusive.
- *
- * @param index a user-supplied index identifying an element of an array, list
- * or string
- * @param size the size of that array, list or string
- * @param desc the text to use to describe this index in an error message
- * @throws IndexOutOfBoundsException if {@code index} is negative or is not
- * less than {@code size}
- * @throws org.elasticsearch.ElasticsearchIllegalArgumentException
- * if {@code size} is negative
- */
- public static void checkElementIndex(int index, int size, String desc) {
- checkArgument(size >= 0, "negative size: %s", size);
- if (index < 0) {
- throw new IndexOutOfBoundsException(
- format("%s (%s) must not be negative", desc, index));
- }
- if (index >= size) {
- throw new IndexOutOfBoundsException(
- format("%s (%s) must be less than size (%s)", desc, index, size));
- }
- }
-
- /**
- * Ensures that {@code index} specifies a valid <i>position</i> in an array,
- * list or string of size {@code size}. A position index may range from zero
- * to {@code size}, inclusive.
- *
- * @param index a user-supplied index identifying a position in an array, list
- * or string
- * @param size the size of that array, list or string
- * @throws IndexOutOfBoundsException if {@code index} is negative or is
- * greater than {@code size}
- * @throws org.elasticsearch.ElasticsearchIllegalArgumentException
- * if {@code size} is negative
- */
- public static void checkPositionIndex(int index, int size) {
- checkPositionIndex(index, size, "index");
- }
-
- /**
- * Ensures that {@code index} specifies a valid <i>position</i> in an array,
- * list or string of size {@code size}. A position index may range from zero
- * to {@code size}, inclusive.
- *
- * @param index a user-supplied index identifying a position in an array, list
- * or string
- * @param size the size of that array, list or string
- * @param desc the text to use to describe this index in an error message
- * @throws IndexOutOfBoundsException if {@code index} is negative or is
- * greater than {@code size}
- * @throws org.elasticsearch.ElasticsearchIllegalArgumentException
- * if {@code size} is negative
- */
- public static void checkPositionIndex(int index, int size, String desc) {
- checkArgument(size >= 0, "negative size: %s", size);
- if (index < 0) {
- throw new IndexOutOfBoundsException(format(
- "%s (%s) must not be negative", desc, index));
- }
- if (index > size) {
- throw new IndexOutOfBoundsException(format(
- "%s (%s) must not be greater than size (%s)", desc, index, size));
- }
- }
-
- /**
- * Ensures that {@code start} and {@code end} specify a valid <i>positions</i>
- * in an array, list or string of size {@code size}, and are in order. A
- * position index may range from zero to {@code size}, inclusive.
- *
- * @param start a user-supplied index identifying a starting position in an
- * array, list or string
- * @param end a user-supplied index identifying a ending position in an array,
- * list or string
- * @param size the size of that array, list or string
- * @throws IndexOutOfBoundsException if either index is negative or is
- * greater than {@code size}, or if {@code end} is less than {@code start}
- * @throws org.elasticsearch.ElasticsearchIllegalArgumentException
- * if {@code size} is negative
- */
- public static void checkPositionIndexes(int start, int end, int size) {
- checkPositionIndex(start, size, "start index");
- checkPositionIndex(end, size, "end index");
- if (end < start) {
- throw new IndexOutOfBoundsException(format(
- "end index (%s) must not be less than start index (%s)", end, start));
- }
- }
-
/**
* Substitutes each {@code %s} in {@code template} with an argument. These
* are matched by position - the first {@code %s} gets {@code args[0]}, etc.
diff --git a/src/main/java/org/elasticsearch/common/Strings.java b/src/main/java/org/elasticsearch/common/Strings.java
index adbc19fd81581..debfaf5c2e077 100644
--- a/src/main/java/org/elasticsearch/common/Strings.java
+++ b/src/main/java/org/elasticsearch/common/Strings.java
@@ -49,20 +49,6 @@ public class Strings {
private static final String CURRENT_PATH = ".";
- private static final char EXTENSION_SEPARATOR = '.';
-
- public static void tabify(int tabs, String from, StringBuilder to) throws Exception {
- try (BufferedReader reader = new BufferedReader(new FastStringReader(from))) {
- String line;
- while ((line = reader.readLine()) != null) {
- for (int i = 0; i < tabs; i++) {
- to.append('\t');
- }
- to.append(line).append('\n');
- }
- }
- }
-
public static void spaceify(int spaces, String from, StringBuilder to) throws Exception {
try (BufferedReader reader = new BufferedReader(new FastStringReader(from))) {
String line;
@@ -137,55 +123,6 @@ public static List<String> splitSmart(String s, String separator, boolean decode
}
- public static List<String> splitWS(String s, boolean decode) {
- ArrayList<String> lst = new ArrayList<>(2);
- StringBuilder sb = new StringBuilder();
- int pos = 0, end = s.length();
- while (pos < end) {
- char ch = s.charAt(pos++);
- if (Character.isWhitespace(ch)) {
- if (sb.length() > 0) {
- lst.add(sb.toString());
- sb = new StringBuilder();
- }
- continue;
- }
-
- if (ch == '\\') {
- if (!decode) sb.append(ch);
- if (pos >= end) break; // ERROR, or let it go?
- ch = s.charAt(pos++);
- if (decode) {
- switch (ch) {
- case 'n':
- ch = '\n';
- break;
- case 't':
- ch = '\t';
- break;
- case 'r':
- ch = '\r';
- break;
- case 'b':
- ch = '\b';
- break;
- case 'f':
- ch = '\f';
- break;
- }
- }
- }
-
- sb.append(ch);
- }
-
- if (sb.length() > 0) {
- lst.add(sb.toString());
- }
-
- return lst;
- }
-
//---------------------------------------------------------------------
// General convenience methods for working with Strings
//---------------------------------------------------------------------
@@ -316,63 +253,6 @@ public static boolean containsWhitespace(CharSequence str) {
return false;
}
- /**
- * Check whether the given String contains any whitespace characters.
- *
- * @param str the String to check (may be <code>null</code>)
- * @return <code>true</code> if the String is not empty and
- * contains at least 1 whitespace character
- * @see #containsWhitespace(CharSequence)
- */
- public static boolean containsWhitespace(String str) {
- return containsWhitespace((CharSequence) str);
- }
-
- /**
- * Trim leading and trailing whitespace from the given String.
- *
- * @param str the String to check
- * @return the trimmed String
- * @see java.lang.Character#isWhitespace
- */
- public static String trimWhitespace(String str) {
- if (!hasLength(str)) {
- return str;
- }
- StringBuilder sb = new StringBuilder(str);
- while (sb.length() > 0 && Character.isWhitespace(sb.charAt(0))) {
- sb.deleteCharAt(0);
- }
- while (sb.length() > 0 && Character.isWhitespace(sb.charAt(sb.length() - 1))) {
- sb.deleteCharAt(sb.length() - 1);
- }
- return sb.toString();
- }
-
- /**
- * Trim <i>all</i> whitespace from the given String:
- * leading, trailing, and inbetween characters.
- *
- * @param str the String to check
- * @return the trimmed String
- * @see java.lang.Character#isWhitespace
- */
- public static String trimAllWhitespace(String str) {
- if (!hasLength(str)) {
- return str;
- }
- StringBuilder sb = new StringBuilder(str);
- int index = 0;
- while (sb.length() > index) {
- if (Character.isWhitespace(sb.charAt(index))) {
- sb.deleteCharAt(index);
- } else {
- index++;
- }
- }
- return sb.toString();
- }
-
/**
* Trim leading whitespace from the given String.
*
@@ -391,24 +271,6 @@ public static String trimLeadingWhitespace(String str) {
return sb.toString();
}
- /**
- * Trim trailing whitespace from the given String.
- *
- * @param str the String to check
- * @return the trimmed String
- * @see java.lang.Character#isWhitespace
- */
- public static String trimTrailingWhitespace(String str) {
- if (!hasLength(str)) {
- return str;
- }
- StringBuilder sb = new StringBuilder(str);
- while (sb.length() > 0 && Character.isWhitespace(sb.charAt(sb.length() - 1))) {
- sb.deleteCharAt(sb.length() - 1);
- }
- return sb.toString();
- }
-
/**
* Trim all occurences of the supplied leading character from the given String.
*
@@ -427,72 +289,6 @@ public static String trimLeadingCharacter(String str, char leadingCharacter) {
return sb.toString();
}
- /**
- * Trim all occurences of the supplied trailing character from the given String.
- *
- * @param str the String to check
- * @param trailingCharacter the trailing character to be trimmed
- * @return the trimmed String
- */
- public static String trimTrailingCharacter(String str, char trailingCharacter) {
- if (!hasLength(str)) {
- return str;
- }
- StringBuilder sb = new StringBuilder(str);
- while (sb.length() > 0 && sb.charAt(sb.length() - 1) == trailingCharacter) {
- sb.deleteCharAt(sb.length() - 1);
- }
- return sb.toString();
- }
-
-
- /**
- * Test if the given String starts with the specified prefix,
- * ignoring upper/lower case.
- *
- * @param str the String to check
- * @param prefix the prefix to look for
- * @see java.lang.String#startsWith
- */
- public static boolean startsWithIgnoreCase(String str, String prefix) {
- if (str == null || prefix == null) {
- return false;
- }
- if (str.startsWith(prefix)) {
- return true;
- }
- if (str.length() < prefix.length()) {
- return false;
- }
- String lcStr = str.substring(0, prefix.length()).toLowerCase(Locale.ROOT);
- String lcPrefix = prefix.toLowerCase(Locale.ROOT);
- return lcStr.equals(lcPrefix);
- }
-
- /**
- * Test if the given String ends with the specified suffix,
- * ignoring upper/lower case.
- *
- * @param str the String to check
- * @param suffix the suffix to look for
- * @see java.lang.String#endsWith
- */
- public static boolean endsWithIgnoreCase(String str, String suffix) {
- if (str == null || suffix == null) {
- return false;
- }
- if (str.endsWith(suffix)) {
- return true;
- }
- if (str.length() < suffix.length()) {
- return false;
- }
-
- String lcStr = str.substring(str.length() - suffix.length()).toLowerCase(Locale.ROOT);
- String lcSuffix = suffix.toLowerCase(Locale.ROOT);
- return lcStr.equals(lcSuffix);
- }
-
/**
* Test whether the given string matches the given substring
* at the given index.
@@ -609,28 +405,6 @@ public static String quote(String str) {
return (str != null ? "'" + str + "'" : null);
}
- /**
- * Turn the given Object into a String with single quotes
- * if it is a String; keeping the Object as-is else.
- *
- * @param obj the input Object (e.g. "myString")
- * @return the quoted String (e.g. "'myString'"),
- * or the input object as-is if not a String
- */
- public static Object quoteIfString(Object obj) {
- return (obj instanceof String ? quote((String) obj) : obj);
- }
-
- /**
- * Unqualify a string qualified by a '.' dot character. For example,
- * "this.name.is.qualified", returns "qualified".
- *
- * @param qualifiedName the qualified name
- */
- public static String unqualify(String qualifiedName) {
- return unqualify(qualifiedName, '.');
- }
-
/**
* Unqualify a string qualified by a separator character. For example,
* "this:name:is:qualified" returns "qualified" if using a ':' separator.
@@ -654,18 +428,6 @@ public static String capitalize(String str) {
return changeFirstCharacterCase(str, true);
}
- /**
- * Uncapitalize a <code>String</code>, changing the first letter to
- * lower case as per {@link Character#toLowerCase(char)}.
- * No other letters are changed.
- *
- * @param str the String to uncapitalize, may be <code>null</code>
- * @return the uncapitalized String, <code>null</code> if null
- */
- public static String uncapitalize(String str) {
- return changeFirstCharacterCase(str, false);
- }
-
private static String changeFirstCharacterCase(String str, boolean capitalize) {
if (str == null || str.length() == 0) {
return str;
@@ -702,74 +464,6 @@ public static boolean validFileNameExcludingAstrix(String fileName) {
return true;
}
- /**
- * Extract the filename from the given path,
- * e.g. "mypath/myfile.txt" -> "myfile.txt".
- *
- * @param path the file path (may be <code>null</code>)
- * @return the extracted filename, or <code>null</code> if none
- */
- public static String getFilename(String path) {
- if (path == null) {
- return null;
- }
- int separatorIndex = path.lastIndexOf(FOLDER_SEPARATOR);
- return (separatorIndex != -1 ? path.substring(separatorIndex + 1) : path);
- }
-
- /**
- * Extract the filename extension from the given path,
- * e.g. "mypath/myfile.txt" -> "txt".
- *
- * @param path the file path (may be <code>null</code>)
- * @return the extracted filename extension, or <code>null</code> if none
- */
- public static String getFilenameExtension(String path) {
- if (path == null) {
- return null;
- }
- int sepIndex = path.lastIndexOf(EXTENSION_SEPARATOR);
- return (sepIndex != -1 ? path.substring(sepIndex + 1) : null);
- }
-
- /**
- * Strip the filename extension from the given path,
- * e.g. "mypath/myfile.txt" -> "mypath/myfile".
- *
- * @param path the file path (may be <code>null</code>)
- * @return the path with stripped filename extension,
- * or <code>null</code> if none
- */
- public static String stripFilenameExtension(String path) {
- if (path == null) {
- return null;
- }
- int sepIndex = path.lastIndexOf(EXTENSION_SEPARATOR);
- return (sepIndex != -1 ? path.substring(0, sepIndex) : path);
- }
-
- /**
- * Apply the given relative path to the given path,
- * assuming standard Java folder separation (i.e. "/" separators);
- *
- * @param path the path to start from (usually a full file path)
- * @param relativePath the relative path to apply
- * (relative to the full file path above)
- * @return the full file path that results from applying the relative path
- */
- public static String applyRelativePath(String path, String relativePath) {
- int separatorIndex = path.lastIndexOf(FOLDER_SEPARATOR);
- if (separatorIndex != -1) {
- String newPath = path.substring(0, separatorIndex);
- if (!relativePath.startsWith(FOLDER_SEPARATOR)) {
- newPath += FOLDER_SEPARATOR;
- }
- return newPath + relativePath;
- } else {
- return relativePath;
- }
- }
-
/**
* Normalize the path by suppressing sequences like "path/.." and
* inner simple dots.
@@ -830,142 +524,6 @@ public static String cleanPath(String path) {
return prefix + collectionToDelimitedString(pathElements, FOLDER_SEPARATOR);
}
- /**
- * Compare two paths after normalization of them.
- *
- * @param path1 first path for comparison
- * @param path2 second path for comparison
- * @return whether the two paths are equivalent after normalization
- */
- public static boolean pathEquals(String path1, String path2) {
- return cleanPath(path1).equals(cleanPath(path2));
- }
-
- /**
- * Parse the given <code>localeString</code> into a {@link Locale}.
- * <p>This is the inverse operation of {@link Locale#toString Locale's toString}.
- *
- * @param localeString the locale string, following <code>Locale's</code>
- * <code>toString()</code> format ("en", "en_UK", etc);
- * also accepts spaces as separators, as an alternative to underscores
- * @return a corresponding <code>Locale</code> instance
- */
- public static Locale parseLocaleString(String localeString) {
- String[] parts = tokenizeToStringArray(localeString, "_ ", false, false);
- String language = (parts.length != 0 ? parts[0] : "");
- String country = (parts.length > 1 ? parts[1] : "");
- String variant = "";
- if (parts.length >= 2) {
- // There is definitely a variant, and it is everything after the country
- // code sans the separator between the country code and the variant.
- int endIndexOfCountryCode = localeString.indexOf(country) + country.length();
- // Strip off any leading '_' and whitespace, what's left is the variant.
- variant = trimLeadingWhitespace(localeString.substring(endIndexOfCountryCode));
- if (variant.startsWith("_")) {
- variant = trimLeadingCharacter(variant, '_');
- }
- }
- return (language.length() > 0 ? new Locale(language, country, variant) : null);
- }
-
- /**
- * Determine the RFC 3066 compliant language tag,
- * as used for the HTTP "Accept-Language" header.
- *
- * @param locale the Locale to transform to a language tag
- * @return the RFC 3066 compliant language tag as String
- */
- public static String toLanguageTag(Locale locale) {
- return locale.getLanguage() + (hasText(locale.getCountry()) ? "-" + locale.getCountry() : "");
- }
-
-
- //---------------------------------------------------------------------
- // Convenience methods for working with String arrays
- //---------------------------------------------------------------------
-
- /**
- * Append the given String to the given String array, returning a new array
- * consisting of the input array contents plus the given String.
- *
- * @param array the array to append to (can be <code>null</code>)
- * @param str the String to append
- * @return the new array (never <code>null</code>)
- */
- public static String[] addStringToArray(String[] array, String str) {
- if (isEmpty(array)) {
- return new String[]{str};
- }
- String[] newArr = new String[array.length + 1];
- System.arraycopy(array, 0, newArr, 0, array.length);
- newArr[array.length] = str;
- return newArr;
- }
-
- /**
- * Concatenate the given String arrays into one,
- * with overlapping array elements included twice.
- * <p>The order of elements in the original arrays is preserved.
- *
- * @param array1 the first array (can be <code>null</code>)
- * @param array2 the second array (can be <code>null</code>)
- * @return the new array (<code>null</code> if both given arrays were <code>null</code>)
- */
- public static String[] concatenateStringArrays(String[] array1, String[] array2) {
- if (isEmpty(array1)) {
- return array2;
- }
- if (isEmpty(array2)) {
- return array1;
- }
- String[] newArr = new String[array1.length + array2.length];
- System.arraycopy(array1, 0, newArr, 0, array1.length);
- System.arraycopy(array2, 0, newArr, array1.length, array2.length);
- return newArr;
- }
-
- /**
- * Merge the given String arrays into one, with overlapping
- * array elements only included once.
- * <p>The order of elements in the original arrays is preserved
- * (with the exception of overlapping elements, which are only
- * included on their first occurence).
- *
- * @param array1 the first array (can be <code>null</code>)
- * @param array2 the second array (can be <code>null</code>)
- * @return the new array (<code>null</code> if both given arrays were <code>null</code>)
- */
- public static String[] mergeStringArrays(String[] array1, String[] array2) {
- if (isEmpty(array1)) {
- return array2;
- }
- if (isEmpty(array2)) {
- return array1;
- }
- List<String> result = new ArrayList<>();
- result.addAll(Arrays.asList(array1));
- for (String str : array2) {
- if (!result.contains(str)) {
- result.add(str);
- }
- }
- return toStringArray(result);
- }
-
- /**
- * Turn given source String array into sorted array.
- *
- * @param array the source array
- * @return the sorted array (never <code>null</code>)
- */
- public static String[] sortStringArray(String[] array) {
- if (isEmpty(array)) {
- return new String[0];
- }
- Arrays.sort(array);
- return array;
- }
-
/**
* Copy the given Collection into a String array.
* The Collection must contain String elements only.
@@ -981,57 +539,6 @@ public static String[] toStringArray(Collection<String> collection) {
return collection.toArray(new String[collection.size()]);
}
- /**
- * Copy the given Enumeration into a String array.
- * The Enumeration must contain String elements only.
- *
- * @param enumeration the Enumeration to copy
- * @return the String array (<code>null</code> if the passed-in
- * Enumeration was <code>null</code>)
- */
- public static String[] toStringArray(Enumeration<String> enumeration) {
- if (enumeration == null) {
- return null;
- }
- List<String> list = Collections.list(enumeration);
- return list.toArray(new String[list.size()]);
- }
-
- /**
- * Trim the elements of the given String array,
- * calling <code>String.trim()</code> on each of them.
- *
- * @param array the original String array
- * @return the resulting array (of the same size) with trimmed elements
- */
- public static String[] trimArrayElements(String[] array) {
- if (isEmpty(array)) {
- return new String[0];
- }
- String[] result = new String[array.length];
- for (int i = 0; i < array.length; i++) {
- String element = array[i];
- result[i] = (element != null ? element.trim() : null);
- }
- return result;
- }
-
- /**
- * Remove duplicate Strings from the given array.
- * Also sorts the array, as it uses a TreeSet.
- *
- * @param array the String array
- * @return an array without duplicates, in natural sort order
- */
- public static String[] removeDuplicateStrings(String[] array) {
- if (isEmpty(array)) {
- return array;
- }
- Set<String> set = new TreeSet<>();
- set.addAll(Arrays.asList(array));
- return toStringArray(set);
- }
-
public static Set<String> splitStringByCommaToSet(final String s) {
return splitStringToSet(s, ',');
}
@@ -1128,22 +635,6 @@ public static String[] split(String toSplit, String delimiter) {
return new String[]{beforeDelimiter, afterDelimiter};
}
- /**
- * Take an array Strings and split each element based on the given delimiter.
- * A <code>Properties</code> instance is then generated, with the left of the
- * delimiter providing the key, and the right of the delimiter providing the value.
- * <p>Will trim both the key and value before adding them to the
- * <code>Properties</code> instance.
- *
- * @param array the array to process
- * @param delimiter to split each element using (typically the equals symbol)
- * @return a <code>Properties</code> instance representing the array contents,
- * or <code>null</code> if the array to process was null or empty
- */
- public static Properties splitArrayElementsIntoProperties(String[] array, String delimiter) {
- return splitArrayElementsIntoProperties(array, delimiter, null);
- }
-
/**
* Take an array Strings and split each element based on the given delimiter.
* A <code>Properties</code> instance is then generated, with the left of the
@@ -1603,5 +1094,4 @@ public static boolean isAllOrWildcard(String[] data) {
return CollectionUtils.isEmpty(data) ||
data.length == 1 && ("_all".equals(data[0]) || "*".equals(data[0]));
}
-
-}
+}
\ No newline at end of file
diff --git a/src/main/java/org/elasticsearch/common/geo/GeoHashUtils.java b/src/main/java/org/elasticsearch/common/geo/GeoHashUtils.java
index a3b98ccbfc629..15aca20e23af8 100644
--- a/src/main/java/org/elasticsearch/common/geo/GeoHashUtils.java
+++ b/src/main/java/org/elasticsearch/common/geo/GeoHashUtils.java
@@ -21,7 +21,6 @@
import java.util.ArrayList;
import java.util.Collection;
-import java.util.Iterator;
/**
@@ -120,23 +119,6 @@ private static final char encode(int x, int y) {
public static Collection<? extends CharSequence> neighbors(String geohash) {
return addNeighbors(geohash, geohash.length(), new ArrayList<CharSequence>(8));
}
-
- /**
- * Create an {@link Iterable} which allows to iterate over the cells that
- * contain a given geohash
- *
- * @param geohash Geohash of a cell
- *
- * @return {@link Iterable} of path
- */
- public static Iterable<String> path(final String geohash) {
- return new Iterable<String>() {
- @Override
- public Iterator<String> iterator() {
- return new GeohashPathIterator(geohash);
- }
- };
- }
/**
* Calculate the geohash of a neighbor of a geohash
@@ -331,19 +313,6 @@ public static GeoPoint decode(String geohash, GeoPoint ret) {
return ret.reset((interval[0] + interval[1]) / 2D, (interval[2] + interval[3]) / 2D);
}
- /**
- * Decodes the given geohash into a geohash cell defined by the points nothWest and southEast
- *
- * @param geohash Geohash to deocde
- * @param northWest the point north/west of the cell
- * @param southEast the point south/east of the cell
- */
- public static void decodeCell(String geohash, GeoPoint northWest, GeoPoint southEast) {
- double[] interval = decodeCell(geohash);
- northWest.reset(interval[1], interval[2]);
- southEast.reset(interval[0], interval[3]);
- }
-
private static double[] decodeCell(String geohash) {
double[] interval = {-90.0, 90.0, -180.0, 180.0};
boolean isEven = true;
diff --git a/src/main/java/org/elasticsearch/common/geo/GeoPoint.java b/src/main/java/org/elasticsearch/common/geo/GeoPoint.java
index 8278c9ca7347c..0e1d6961de52e 100644
--- a/src/main/java/org/elasticsearch/common/geo/GeoPoint.java
+++ b/src/main/java/org/elasticsearch/common/geo/GeoPoint.java
@@ -78,11 +78,6 @@ public GeoPoint resetFromGeoHash(String hash) {
return this;
}
- void latlon(double lat, double lon) {
- this.lat = lat;
- this.lon = lon;
- }
-
public final double lat() {
return this.lat;
}
diff --git a/src/main/java/org/elasticsearch/common/geo/GeoUtils.java b/src/main/java/org/elasticsearch/common/geo/GeoUtils.java
index c1b9cca01d380..c4bc51d7bfb82 100644
--- a/src/main/java/org/elasticsearch/common/geo/GeoUtils.java
+++ b/src/main/java/org/elasticsearch/common/geo/GeoUtils.java
@@ -421,4 +421,7 @@ public static GeoPoint parseGeoPoint(XContentParser parser, GeoPoint point) thro
throw new ElasticsearchParseException("geo_point expected");
}
}
+
+ private GeoUtils() {
+ }
}
diff --git a/src/main/java/org/elasticsearch/common/geo/GeohashPathIterator.java b/src/main/java/org/elasticsearch/common/geo/GeohashPathIterator.java
deleted file mode 100644
index 6e6821ee81b96..0000000000000
--- a/src/main/java/org/elasticsearch/common/geo/GeohashPathIterator.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.common.geo;
-
-import java.util.Iterator;
-
-/**
- * This class iterates over the cells of a given geohash. Assume geohashes
- * form a tree, this iterator traverses this tree form a leaf (actual gehash)
- * to the root (geohash of length 1).
- */
-public final class GeohashPathIterator implements Iterator<String> {
-
- private final String geohash;
- private int currentLength;
-
- /**
- * Create a new {@link GeohashPathIterator} for a given geohash
- * @param geohash The geohash to traverse
- */
- public GeohashPathIterator(String geohash) {
- this.geohash = geohash;
- this.currentLength = geohash.length();
- }
-
- @Override
- public boolean hasNext() {
- return currentLength > 0;
- }
-
- @Override
- public String next() {
- String result = geohash.substring(0, currentLength);
- currentLength--;
- return result;
- }
-
- @Override
- public void remove() {
- throw new UnsupportedOperationException("unable to remove a geohash from this path");
- }
-}
\ No newline at end of file
diff --git a/src/main/java/org/elasticsearch/common/geo/SpatialStrategy.java b/src/main/java/org/elasticsearch/common/geo/SpatialStrategy.java
index 02ee8a774e114..a83f29156a1f3 100644
--- a/src/main/java/org/elasticsearch/common/geo/SpatialStrategy.java
+++ b/src/main/java/org/elasticsearch/common/geo/SpatialStrategy.java
@@ -18,12 +18,6 @@
*/
package org.elasticsearch.common.geo;
-import org.apache.lucene.spatial.prefix.PrefixTreeStrategy;
-import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
-import org.apache.lucene.spatial.prefix.TermQueryPrefixTreeStrategy;
-import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
-
-import java.lang.String;
/**
*
@@ -42,11 +36,4 @@ private SpatialStrategy(String strategyName) {
public String getStrategyName() {
return strategyName;
}
-
- public PrefixTreeStrategy create(SpatialPrefixTree grid, String fieldName) {
- if (this == TERM) {
- return new TermQueryPrefixTreeStrategy(grid, fieldName);
- }
- return new RecursivePrefixTreeStrategy(grid, fieldName);
- }
}
diff --git a/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java b/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java
index 6921840f5dd13..9e1fceff5251f 100644
--- a/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java
+++ b/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java
@@ -29,7 +29,6 @@
import org.apache.lucene.util.Accountable;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.SegmentReaderUtils;
-import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsIndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.index.shard.ShardId;
@@ -143,7 +142,7 @@ public void onRemoval(RemovalNotification<Key, Accountable> notification) {
public <FD extends AtomicFieldData, IFD extends IndexFieldData<FD>> FD load(final AtomicReaderContext context, final IFD indexFieldData) throws Exception {
final Key key = new Key(context.reader().getCoreCacheKey());
//noinspection unchecked
- Accountable fd = cache.get(key, new Callable<FD>() {
+ final Accountable accountable = cache.get(key, new Callable<FD>() {
@Override
public FD call() throws Exception {
SegmentReaderUtils.registerCoreListener(context.reader(), FieldBased.this);
@@ -157,7 +156,6 @@ public FD call() throws Exception {
}
}
final FD fieldData = indexFieldData.loadDirect(context);
- key.sizeInBytes = fieldData.ramBytesUsed();
for (Listener listener : key.listeners) {
try {
listener.onLoad(fieldNames, fieldDataType, fieldData);
@@ -166,20 +164,20 @@ public FD call() throws Exception {
logger.error("Failed to call listener on atomic field data loading", e);
}
}
+ key.sizeInBytes = fieldData.ramBytesUsed();
return fieldData;
}
});
- return (FD) fd;
+ return (FD) accountable;
}
public <FD extends AtomicFieldData, IFD extends IndexFieldData.Global<FD>> IFD load(final IndexReader indexReader, final IFD indexFieldData) throws Exception {
final Key key = new Key(indexReader.getCoreCacheKey());
//noinspection unchecked
- Accountable ifd = cache.get(key, new Callable<Accountable>() {
+ final Accountable accountable = cache.get(key, new Callable<Accountable>() {
@Override
- public GlobalOrdinalsIndexFieldData call() throws Exception {
+ public Accountable call() throws Exception {
indexReader.addReaderClosedListener(FieldBased.this);
-
key.listeners.add(indicesFieldDataCacheListener);
final ShardId shardId = ShardUtils.extractShardId(indexReader);
if (shardId != null) {
@@ -188,8 +186,7 @@ public GlobalOrdinalsIndexFieldData call() throws Exception {
key.listeners.add(shard.fieldData());
}
}
- GlobalOrdinalsIndexFieldData ifd = (GlobalOrdinalsIndexFieldData) indexFieldData.localGlobalDirect(indexReader);
- key.sizeInBytes = ifd.ramBytesUsed();
+ final Accountable ifd = (Accountable) indexFieldData.localGlobalDirect(indexReader);
for (Listener listener : key.listeners) {
try {
listener.onLoad(fieldNames, fieldDataType, ifd);
@@ -198,11 +195,11 @@ public GlobalOrdinalsIndexFieldData call() throws Exception {
logger.error("Failed to call listener on global ordinals loading", e);
}
}
-
+ key.sizeInBytes = ifd.ramBytesUsed();
return ifd;
}
});
- return (IFD) ifd;
+ return (IFD) accountable;
}
@Override
diff --git a/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java b/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java
index 3d289dcdb707f..077088b2c5a07 100644
--- a/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java
+++ b/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java
@@ -53,6 +53,11 @@
*/
public class IndexFieldDataService extends AbstractIndexComponent {
+ public static final String FIELDDATA_CACHE_KEY = "index.fielddata.cache";
+ public static final String FIELDDATA_CACHE_VALUE_SOFT = "soft";
+ public static final String FIELDDATA_CACHE_VALUE_NODE = "node";
+ public static final String FIELDDATA_CACHE_VALUE_RESIDENT = "resident";
+
private static final String DISABLED_FORMAT = "disabled";
private static final String DOC_VALUES_FORMAT = "doc_values";
private static final String ARRAY_FORMAT = "array";
@@ -273,12 +278,12 @@ public <IFD extends IndexFieldData<?>> IFD getForField(FieldMapper<?> mapper) {
if (cache == null) {
// we default to node level cache, which in turn defaults to be unbounded
// this means changing the node level settings is simple, just set the bounds there
- String cacheType = type.getSettings().get("cache", indexSettings.get("index.fielddata.cache", "node"));
- if ("resident".equals(cacheType)) {
+ String cacheType = type.getSettings().get("cache", indexSettings.get(FIELDDATA_CACHE_KEY, FIELDDATA_CACHE_VALUE_NODE));
+ if (FIELDDATA_CACHE_VALUE_RESIDENT.equals(cacheType)) {
cache = new IndexFieldDataCache.Resident(logger, indexService, fieldNames, type, indicesFieldDataCacheListener);
- } else if ("soft".equals(cacheType)) {
+ } else if (FIELDDATA_CACHE_VALUE_SOFT.equals(cacheType)) {
cache = new IndexFieldDataCache.Soft(logger, indexService, fieldNames, type, indicesFieldDataCacheListener);
- } else if ("node".equals(cacheType)) {
+ } else if (FIELDDATA_CACHE_VALUE_NODE.equals(cacheType)) {
cache = indicesFieldDataCache.buildIndexFieldDataCache(indexService, index, fieldNames, type);
} else if ("none".equals(cacheType)){
cache = new IndexFieldDataCache.None();
diff --git a/src/main/java/org/elasticsearch/index/gateway/local/LocalIndexShardGateway.java b/src/main/java/org/elasticsearch/index/gateway/local/LocalIndexShardGateway.java
index 4a9e99fec16f8..2dd45562bd8d3 100644
--- a/src/main/java/org/elasticsearch/index/gateway/local/LocalIndexShardGateway.java
+++ b/src/main/java/org/elasticsearch/index/gateway/local/LocalIndexShardGateway.java
@@ -127,7 +127,7 @@ public void recover(boolean indexShouldExists, RecoveryState recoveryState) thro
} catch (Throwable e1) {
files += " (failure=" + ExceptionsHelper.detailedMessage(e1) + ")";
}
- if (indexShouldExists && indexShard.store().indexStore().persistent()) {
+ if (indexShouldExists && indexShard.indexService().store().persistent()) {
throw new IndexShardGatewayRecoveryException(shardId(), "shard allocated for local recovery (post api), should exist, but doesn't, current files: " + files, e);
}
}
diff --git a/src/main/java/org/elasticsearch/index/query/TermsFilterParser.java b/src/main/java/org/elasticsearch/index/query/TermsFilterParser.java
index 421d699c3153f..af2a7d6bff356 100644
--- a/src/main/java/org/elasticsearch/index/query/TermsFilterParser.java
+++ b/src/main/java/org/elasticsearch/index/query/TermsFilterParser.java
@@ -52,6 +52,16 @@ public class TermsFilterParser implements FilterParser {
public static final String NAME = "terms";
private IndicesTermsFilterCache termsFilterCache;
+ public static final String EXECUTION_KEY = "execution";
+ public static final String EXECUTION_VALUE_PLAIN = "plain";
+ public static final String EXECUTION_VALUE_FIELDDATA = "fielddata";
+ public static final String EXECUTION_VALUE_BOOL = "bool";
+ public static final String EXECUTION_VALUE_BOOL_NOCACHE = "bool_nocache";
+ public static final String EXECUTION_VALUE_AND = "and";
+ public static final String EXECUTION_VALUE_AND_NOCACHE = "and_nocache";
+ public static final String EXECUTION_VALUE_OR = "or";
+ public static final String EXECUTION_VALUE_OR_NOCACHE = "or_nocache";
+
@Inject
public TermsFilterParser() {
}
@@ -84,7 +94,7 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar
CacheKeyFilter.Key cacheKey = null;
XContentParser.Token token;
- String execution = "plain";
+ String execution = EXECUTION_VALUE_PLAIN;
List<Object> terms = Lists.newArrayList();
String fieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
@@ -133,7 +143,7 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar
throw new QueryParsingException(parseContext.index(), "[terms] filter lookup element requires specifying the path");
}
} else if (token.isValue()) {
- if ("execution".equals(currentFieldName)) {
+ if (EXECUTION_KEY.equals(currentFieldName)) {
execution = parser.text();
} else if ("_name".equals(currentFieldName)) {
filterName = parser.text();
@@ -193,7 +203,7 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar
try {
Filter filter;
- if ("plain".equals(execution)) {
+ if (EXECUTION_VALUE_PLAIN.equals(execution)) {
if (fieldMapper != null) {
filter = fieldMapper.termsFilter(terms, parseContext);
} else {
@@ -207,7 +217,7 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar
if (cache == null || cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
- } else if ("fielddata".equals(execution)) {
+ } else if (EXECUTION_VALUE_FIELDDATA.equals(execution)) {
// if there are no mappings, then nothing has been indexing yet against this shard, so we can return
// no match (but not cached!), since the FieldDataTermsFilter relies on a mapping...
if (fieldMapper == null) {
@@ -218,7 +228,7 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar
if (cache != null && cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
- } else if ("bool".equals(execution)) {
+ } else if (EXECUTION_VALUE_BOOL.equals(execution)) {
XBooleanFilter boolFiler = new XBooleanFilter();
if (fieldMapper != null) {
for (Object term : terms) {
@@ -234,7 +244,7 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar
if (cache != null && cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
- } else if ("bool_nocache".equals(execution)) {
+ } else if (EXECUTION_VALUE_BOOL_NOCACHE.equals(execution)) {
XBooleanFilter boolFiler = new XBooleanFilter();
if (fieldMapper != null) {
for (Object term : terms) {
@@ -250,7 +260,7 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar
if (cache == null || cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
- } else if ("and".equals(execution)) {
+ } else if (EXECUTION_VALUE_AND.equals(execution)) {
List<Filter> filters = Lists.newArrayList();
if (fieldMapper != null) {
for (Object term : terms) {
@@ -266,7 +276,7 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar
if (cache != null && cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
- } else if ("and_nocache".equals(execution)) {
+ } else if (EXECUTION_VALUE_AND_NOCACHE.equals(execution)) {
List<Filter> filters = Lists.newArrayList();
if (fieldMapper != null) {
for (Object term : terms) {
@@ -282,7 +292,7 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar
if (cache == null || cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
- } else if ("or".equals(execution)) {
+ } else if (EXECUTION_VALUE_OR.equals(execution)) {
List<Filter> filters = Lists.newArrayList();
if (fieldMapper != null) {
for (Object term : terms) {
@@ -298,7 +308,7 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar
if (cache != null && cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
- } else if ("or_nocache".equals(execution)) {
+ } else if (EXECUTION_VALUE_OR_NOCACHE.equals(execution)) {
List<Filter> filters = Lists.newArrayList();
if (fieldMapper != null) {
for (Object term : terms) {
diff --git a/src/main/java/org/elasticsearch/index/store/IndexStore.java b/src/main/java/org/elasticsearch/index/store/IndexStore.java
index 743a25607bfc4..6f51f1ad0d2f8 100644
--- a/src/main/java/org/elasticsearch/index/store/IndexStore.java
+++ b/src/main/java/org/elasticsearch/index/store/IndexStore.java
@@ -20,7 +20,6 @@
package org.elasticsearch.index.store;
import org.apache.lucene.store.StoreRateLimiting;
-import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.index.CloseableIndexComponent;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.store.IndicesStore;
@@ -50,16 +49,6 @@ public interface IndexStore extends CloseableIndexComponent {
*/
Class<? extends DirectoryService> shardDirectory();
- /**
- * Returns the backing store total space. Return <tt>-1</tt> if not available.
- */
- ByteSizeValue backingStoreTotalSpace();
-
- /**
- * Returns the backing store free space. Return <tt>-1</tt> if not available.
- */
- ByteSizeValue backingStoreFreeSpace();
-
/**
* Returns <tt>true</tt> if this shard is allocated on this node. Allocated means
* that it has storage files that can be deleted using {@link #deleteUnallocated(org.elasticsearch.index.shard.ShardId)}.
diff --git a/src/main/java/org/elasticsearch/index/store/Store.java b/src/main/java/org/elasticsearch/index/store/Store.java
index 969a6c11d5fd5..9fac24e0fc093 100644
--- a/src/main/java/org/elasticsearch/index/store/Store.java
+++ b/src/main/java/org/elasticsearch/index/store/Store.java
@@ -83,7 +83,6 @@ public class Store extends AbstractIndexShardComponent implements CloseableIndex
private final AtomicBoolean isClosed = new AtomicBoolean(false);
private final AtomicInteger refCount = new AtomicInteger(1);
- private final IndexStore indexStore;
private final CodecService codecService;
private final DirectoryService directoryService;
private final StoreDirectory directory;
@@ -91,9 +90,8 @@ public class Store extends AbstractIndexShardComponent implements CloseableIndex
private final DistributorDirectory distributorDirectory;
@Inject
- public Store(ShardId shardId, @IndexSettings Settings indexSettings, IndexStore indexStore, CodecService codecService, DirectoryService directoryService, Distributor distributor) throws IOException {
+ public Store(ShardId shardId, @IndexSettings Settings indexSettings, CodecService codecService, DirectoryService directoryService, Distributor distributor) throws IOException {
super(shardId, indexSettings);
- this.indexStore = indexStore;
this.codecService = codecService;
this.directoryService = directoryService;
this.sync = componentSettings.getAsBoolean("sync", true);
@@ -101,10 +99,6 @@ public Store(ShardId shardId, @IndexSettings Settings indexSettings, IndexStore
this.directory = new StoreDirectory(distributorDirectory);
}
- public IndexStore indexStore() {
- ensureOpen();
- return this.indexStore;
- }
public Directory directory() {
ensureOpen();
@@ -132,7 +126,7 @@ private static SegmentInfos readSegmentsInfo(IndexCommit commit, Directory direc
}
}
- private final void ensureOpen() {
+ final void ensureOpen() { // for testing
if (this.refCount.get() <= 0) {
throw new AlreadyClosedException("Store is already closed");
}
diff --git a/src/main/java/org/elasticsearch/index/store/fs/FsIndexStore.java b/src/main/java/org/elasticsearch/index/store/fs/FsIndexStore.java
index d4ab729397cac..ad552b862292f 100644
--- a/src/main/java/org/elasticsearch/index/store/fs/FsIndexStore.java
+++ b/src/main/java/org/elasticsearch/index/store/fs/FsIndexStore.java
@@ -22,7 +22,6 @@
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.service.IndexService;
@@ -58,30 +57,6 @@ public boolean persistent() {
return true;
}
- @Override
- public ByteSizeValue backingStoreTotalSpace() {
- if (locations == null) {
- return new ByteSizeValue(0);
- }
- long totalSpace = 0;
- for (File location : locations) {
- totalSpace += location.getTotalSpace();
- }
- return new ByteSizeValue(totalSpace);
- }
-
- @Override
- public ByteSizeValue backingStoreFreeSpace() {
- if (locations == null) {
- return new ByteSizeValue(0);
- }
- long usableSpace = 0;
- for (File location : locations) {
- usableSpace += location.getUsableSpace();
- }
- return new ByteSizeValue(usableSpace);
- }
-
@Override
public boolean canDeleteUnallocated(ShardId shardId) {
if (locations == null) {
diff --git a/src/main/java/org/elasticsearch/index/store/ram/RamIndexStore.java b/src/main/java/org/elasticsearch/index/store/ram/RamIndexStore.java
index 96bb5da5ed07f..c39242f9f777c 100644
--- a/src/main/java/org/elasticsearch/index/store/ram/RamIndexStore.java
+++ b/src/main/java/org/elasticsearch/index/store/ram/RamIndexStore.java
@@ -21,15 +21,12 @@
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.index.store.DirectoryService;
import org.elasticsearch.index.store.support.AbstractIndexStore;
import org.elasticsearch.indices.store.IndicesStore;
-import org.elasticsearch.monitor.jvm.JvmInfo;
-import org.elasticsearch.monitor.jvm.JvmStats;
/**
*
@@ -50,14 +47,4 @@ public boolean persistent() {
public Class<? extends DirectoryService> shardDirectory() {
return RamDirectoryService.class;
}
-
- @Override
- public ByteSizeValue backingStoreTotalSpace() {
- return JvmInfo.jvmInfo().getMem().heapMax();
- }
-
- @Override
- public ByteSizeValue backingStoreFreeSpace() {
- return JvmStats.jvmStats().getMem().heapUsed();
- }
}
diff --git a/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java b/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java
index 9764c1056ec9f..dd54b817e9e5e 100644
--- a/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java
+++ b/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java
@@ -32,7 +32,10 @@
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.Index;
-import org.elasticsearch.index.fielddata.*;
+import org.elasticsearch.index.fielddata.AtomicFieldData;
+import org.elasticsearch.index.fielddata.FieldDataType;
+import org.elasticsearch.index.fielddata.IndexFieldData;
+import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.index.shard.ShardId;
@@ -110,10 +113,7 @@ public void onRemoval(RemovalNotification<Key, Accountable> notification) {
IndexFieldCache indexCache = key.indexCache;
long sizeInBytes = key.sizeInBytes;
final Accountable value = notification.getValue();
- assert sizeInBytes >= 0 || value != null : "Expected size [" + sizeInBytes + "] to be positive or value [" + value + "] to be non-null";
- if (sizeInBytes == -1 && value != null) {
- sizeInBytes = value.ramBytesUsed();
- }
+ assert value == null || sizeInBytes > 0 && sizeInBytes == value.ramBytesUsed() : "Expected size [" + sizeInBytes + "] to be positive or value [" + value + "] to be non-null";
for (IndexFieldDataCache.Listener listener : key.listeners) {
try {
listener.onUnload(indexCache.fieldNames, indexCache.fieldDataType, notification.wasEvicted(), sizeInBytes);
@@ -198,7 +198,6 @@ public AtomicFieldData call() throws Exception {
public <FD extends AtomicFieldData, IFD extends IndexFieldData.Global<FD>> IFD load(final IndexReader indexReader, final IFD indexFieldData) throws Exception {
final Key key = new Key(this, indexReader.getCoreCacheKey());
-
//noinspection unchecked
final Accountable accountable = cache.get(key, new Callable<Accountable>() {
@Override
@@ -221,6 +220,7 @@ public Accountable call() throws Exception {
logger.error("Failed to call listener on global ordinals loading", e);
}
}
+ key.sizeInBytes = ifd.ramBytesUsed();
return ifd;
}
});
diff --git a/src/main/java/org/elasticsearch/script/ScriptService.java b/src/main/java/org/elasticsearch/script/ScriptService.java
index 4674498dac7e3..3f6a2ee1fabc0 100644
--- a/src/main/java/org/elasticsearch/script/ScriptService.java
+++ b/src/main/java/org/elasticsearch/script/ScriptService.java
@@ -24,7 +24,6 @@
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
-
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.delete.DeleteRequest;
@@ -53,8 +52,6 @@
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.VersionType;
-import org.elasticsearch.index.fielddata.IndexFieldDataService;
-import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.TemplateQueryParser;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.watcher.FileChangesListener;
@@ -78,6 +75,8 @@ public class ScriptService extends AbstractComponent {
public static final String DEFAULT_SCRIPTING_LANGUAGE_SETTING = "script.default_lang";
public static final String DISABLE_DYNAMIC_SCRIPTING_SETTING = "script.disable_dynamic";
+ public static final String SCRIPT_CACHE_SIZE_SETTING = "script.cache.max_size";
+ public static final String SCRIPT_CACHE_EXPIRE_SETTING = "script.cache.expire";
public static final String DISABLE_DYNAMIC_SCRIPTING_DEFAULT = "sandbox";
public static final String SCRIPT_INDEX = ".scripts";
@@ -219,8 +218,8 @@ public ScriptService(Settings settings, Environment env, Set<ScriptEngineService
ResourceWatcherService resourceWatcherService) {
super(settings);
- int cacheMaxSize = componentSettings.getAsInt("cache.max_size", 500);
- TimeValue cacheExpire = componentSettings.getAsTime("cache.expire", null);
+ int cacheMaxSize = settings.getAsInt(SCRIPT_CACHE_SIZE_SETTING, 500);
+ TimeValue cacheExpire = settings.getAsTime(SCRIPT_CACHE_EXPIRE_SETTING, null);
logger.debug("using script cache with max_size [{}], expire [{}]", cacheMaxSize, cacheExpire);
this.defaultLang = settings.get(DEFAULT_SCRIPTING_LANGUAGE_SETTING, "groovy");
@@ -347,9 +346,6 @@ public CompiledScript compile(String lang, String script, ScriptType scriptType
throw new ScriptException("dynamic scripting for [" + lang + "] disabled");
}
- if (cacheKey == null) {
- cacheKey = new CacheKey(lang, script);
- }
// not the end of the world if we compile it twice...
compiled = getCompiledScript(lang, script);
//Since the cache key is the script content itself we don't need to
@@ -509,18 +505,6 @@ public SearchScript search(SearchLookup lookup, String lang, String script, Scri
return search(compile(lang, script, scriptType), lookup, vars);
}
- public SearchScript search(MapperService mapperService, IndexFieldDataService fieldDataService, String lang, String script, ScriptType scriptType, @Nullable Map<String, Object> vars) {
- return search(compile(lang, script), new SearchLookup(mapperService, fieldDataService, null), vars);
- }
-
- public Object execute(CompiledScript compiledScript, Map vars) {
- return scriptEngines.get(compiledScript.lang()).execute(compiledScript.compiled(), vars);
- }
-
- public void clear() {
- cache.invalidateAll();
- }
-
private boolean dynamicScriptEnabled(String lang) {
ScriptEngineService service = scriptEngines.get(lang);
if (service == null) {
diff --git a/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalOrder.java b/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalOrder.java
index 485c51c65f60d..8413ac05edb95 100644
--- a/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalOrder.java
+++ b/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalOrder.java
@@ -152,7 +152,7 @@ protected Comparator<Terms.Bucket> comparator(Aggregator termsAggregator) {
// attached to the order will still be used in the reduce phase of the Aggregation.
OrderPath path = path();
- final Aggregator aggregator = path.resolveAggregator(termsAggregator, false);
+ final Aggregator aggregator = path.resolveAggregator(termsAggregator);
final String key = path.tokens[path.tokens.length - 1].key;
if (aggregator instanceof SingleBucketAggregator) {
diff --git a/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java b/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java
index 93afda694d874..e669a46e6bd97 100644
--- a/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java
+++ b/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java
@@ -139,7 +139,7 @@ public TermsAggregator(String name, BucketAggregationMode bucketAggregationMode,
// Don't defer any child agg if we are dependent on it for pruning results
if (order instanceof Aggregation){
OrderPath path = ((Aggregation) order).path();
- aggUsedForSorting = path.resolveTopmostAggregator(this, false);
+ aggUsedForSorting = path.resolveTopmostAggregator(this);
}
}
diff --git a/src/main/java/org/elasticsearch/search/aggregations/support/OrderPath.java b/src/main/java/org/elasticsearch/search/aggregations/support/OrderPath.java
index 29a2abfe8d7cb..94eedb36db4c3 100644
--- a/src/main/java/org/elasticsearch/search/aggregations/support/OrderPath.java
+++ b/src/main/java/org/elasticsearch/search/aggregations/support/OrderPath.java
@@ -236,13 +236,9 @@ public double resolveValue(HasAggregations root) {
* Resolves the aggregator pointed by this path using the given root as a point of reference.
*
* @param root The point of reference of this path
- * @param validate Indicates whether the path should be validated first over the given root aggregator
* @return The aggregator pointed by this path starting from the given aggregator as a point of reference
*/
- public Aggregator resolveAggregator(Aggregator root, boolean validate) {
- if (validate) {
- validate(root);
- }
+ public Aggregator resolveAggregator(Aggregator root) {
Aggregator aggregator = root;
for (int i = 0; i < tokens.length; i++) {
OrderPath.Token token = tokens[i];
@@ -258,14 +254,9 @@ public Aggregator resolveAggregator(Aggregator root, boolean validate) {
* Resolves the topmost aggregator pointed by this path using the given root as a point of reference.
*
* @param root The point of reference of this path
- * @param validate Indicates whether the path should be validated first over the given root aggregator
* @return The first child aggregator of the root pointed by this path
*/
- public Aggregator resolveTopmostAggregator(Aggregator root, boolean validate) {
- if (validate) {
- validate(root);
- }
-
+ public Aggregator resolveTopmostAggregator(Aggregator root) {
OrderPath.Token token = tokens[0];
Aggregator aggregator = root.subAggregator(token.name);
assert (aggregator instanceof SingleBucketAggregator )
diff --git a/src/test/java/org/elasticsearch/common/BooleansTests.java b/src/test/java/org/elasticsearch/common/BooleansTests.java
index 7440d8cba6d46..252baaba25770 100644
--- a/src/test/java/org/elasticsearch/common/BooleansTests.java
+++ b/src/test/java/org/elasticsearch/common/BooleansTests.java
@@ -23,12 +23,18 @@
import org.hamcrest.Matchers;
import org.junit.Test;
+import java.util.Locale;
+
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.nullValue;
+
public class BooleansTests extends ElasticsearchTestCase {
@Test
public void testIsBoolean() {
String[] booleans = new String[]{"true", "false", "on", "off", "yes", "no", "0", "1"};
String[] notBooleans = new String[]{"11", "00", "sdfsdfsf", "F", "T"};
+ assertThat(Booleans.isBoolean(null, 0, 1), is(false));
for (String b : booleans) {
String t = "prefix" + b + "suffix";
@@ -40,4 +46,33 @@ public void testIsBoolean() {
assertThat("recognized [" + nb + "] as boolean", Booleans.isBoolean(t.toCharArray(), "prefix".length(), nb.length()), Matchers.equalTo(false));
}
}
+ @Test
+ public void parseBoolean() {
+ assertThat(Booleans.parseBoolean(randomFrom("true", "on", "yes", "1"), randomBoolean()), is(true));
+ assertThat(Booleans.parseBoolean(randomFrom("false", "off", "no", "0"), randomBoolean()), is(false));
+ assertThat(Booleans.parseBoolean(randomFrom("true", "on", "yes").toUpperCase(Locale.ROOT), randomBoolean()), is(true));
+ assertThat(Booleans.parseBoolean(null, false), is(false));
+ assertThat(Booleans.parseBoolean(null, true), is(true));
+
+ assertThat(Booleans.parseBoolean(randomFrom("true", "on", "yes", "1"), randomFrom(null, Boolean.TRUE, Boolean.FALSE)), is(true));
+ assertThat(Booleans.parseBoolean(randomFrom("false", "off", "no", "0"), randomFrom(null, Boolean.TRUE, Boolean.FALSE)), is(false));
+ assertThat(Booleans.parseBoolean(randomFrom("true", "on", "yes").toUpperCase(Locale.ROOT),randomFrom(null, Boolean.TRUE, Boolean.FALSE)), is(true));
+ assertThat(Booleans.parseBoolean(null, Boolean.FALSE), is(false));
+ assertThat(Booleans.parseBoolean(null, Boolean.TRUE), is(true));
+ assertThat(Booleans.parseBoolean(null, null), nullValue());
+
+ char[] chars = randomFrom("true", "on", "yes", "1").toCharArray();
+ assertThat(Booleans.parseBoolean(chars, 0, chars.length, randomBoolean()), is(true));
+ chars = randomFrom("false", "off", "no", "0").toCharArray();
+ assertThat(Booleans.parseBoolean(chars,0, chars.length, randomBoolean()), is(false));
+ chars = randomFrom("true", "on", "yes").toUpperCase(Locale.ROOT).toCharArray();
+ assertThat(Booleans.parseBoolean(chars,0, chars.length, randomBoolean()), is(true));
+ }
+
+ public void testIsExplict() {
+ assertThat(Booleans.isExplicitFalse(randomFrom("true", "on", "yes", "1", "foo", null)), is(false));
+ assertThat(Booleans.isExplicitFalse(randomFrom("false", "off", "no", "0")), is(true));
+ assertThat(Booleans.isExplicitTrue(randomFrom("true", "on", "yes", "1")), is(true));
+ assertThat(Booleans.isExplicitTrue(randomFrom("false", "off", "no", "0", "foo", null)), is(false));
+ }
}
diff --git a/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java b/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java
index 1214ffcc38d33..211f7a54b6258 100644
--- a/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java
+++ b/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java
@@ -19,6 +19,8 @@
package org.elasticsearch.common.geo;
+import com.spatial4j.core.shape.Circle;
+import com.spatial4j.core.shape.Rectangle;
import com.spatial4j.core.shape.Shape;
import com.spatial4j.core.shape.ShapeCollection;
import com.spatial4j.core.shape.jts.JtsGeometry;
@@ -103,6 +105,30 @@ public void testParse_multiLineString() throws IOException {
assertGeometryEquals(jtsGeom(expected), multilinesGeoJson);
}
+ @Test
+ public void testParse_circle() throws IOException {
+ String multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "circle")
+ .startArray("coordinates").value(100.0).value(0.0).endArray()
+ .field("radius", "100m")
+ .endObject().string();
+
+ Circle expected = SPATIAL_CONTEXT.makeCircle(100.0, 0.0, 360 * 100 / GeoUtils.EARTH_EQUATOR);
+ assertGeometryEquals(expected, multilinesGeoJson);
+ }
+
+ @Test
+ public void testParse_envelope() throws IOException {
+ String multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "envelope")
+ .startArray("coordinates")
+ .startArray().value(-50).value(30).endArray()
+ .startArray().value(50).value(-30).endArray()
+ .endArray()
+ .endObject().string();
+
+ Rectangle expected = SPATIAL_CONTEXT.makeRectangle(-50, 50, -30, 30);
+ assertGeometryEquals(expected, multilinesGeoJson);
+ }
+
@Test
public void testParse_polygonNoHoles() throws IOException {
String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
diff --git a/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java b/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java
index 060288178a947..4c6703d3ef3fe 100644
--- a/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java
+++ b/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java
@@ -69,6 +69,34 @@ public void testNewPolygon() {
assertEquals(exterior.getCoordinateN(2), new Coordinate(45, -30));
assertEquals(exterior.getCoordinateN(3), new Coordinate(-45, -30));
}
+
+ @Test
+ public void testNewPolygon_coordinate() {
+ Polygon polygon = ShapeBuilder.newPolygon()
+ .point(new Coordinate(-45, 30))
+ .point(new Coordinate(45, 30))
+ .point(new Coordinate(45, -30))
+ .point(new Coordinate(-45, -30))
+ .point(new Coordinate(-45, 30)).toPolygon();
+
+ LineString exterior = polygon.getExteriorRing();
+ assertEquals(exterior.getCoordinateN(0), new Coordinate(-45, 30));
+ assertEquals(exterior.getCoordinateN(1), new Coordinate(45, 30));
+ assertEquals(exterior.getCoordinateN(2), new Coordinate(45, -30));
+ assertEquals(exterior.getCoordinateN(3), new Coordinate(-45, -30));
+ }
+
+ @Test
+ public void testNewPolygon_coordinates() {
+ Polygon polygon = ShapeBuilder.newPolygon()
+ .points(new Coordinate(-45, 30), new Coordinate(45, 30), new Coordinate(45, -30), new Coordinate(-45, -30), new Coordinate(-45, 30)).toPolygon();
+
+ LineString exterior = polygon.getExteriorRing();
+ assertEquals(exterior.getCoordinateN(0), new Coordinate(-45, 30));
+ assertEquals(exterior.getCoordinateN(1), new Coordinate(45, 30));
+ assertEquals(exterior.getCoordinateN(2), new Coordinate(45, -30));
+ assertEquals(exterior.getCoordinateN(3), new Coordinate(-45, -30));
+ }
@Test
public void testLineStringBuilder() {
diff --git a/src/test/java/org/elasticsearch/index/TermsFilterIntegrationTests.java b/src/test/java/org/elasticsearch/index/TermsFilterIntegrationTests.java
new file mode 100644
index 0000000000000..25e96f7fedded
--- /dev/null
+++ b/src/test/java/org/elasticsearch/index/TermsFilterIntegrationTests.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index;
+
+import org.elasticsearch.common.logging.ESLogger;
+import org.elasticsearch.common.logging.Loggers;
+import org.elasticsearch.index.query.FilterBuilders;
+import org.elasticsearch.index.query.QueryBuilders;
+import org.elasticsearch.test.ElasticsearchIntegrationTest;
+
+import java.util.Arrays;
+
+import static org.elasticsearch.index.query.TermsFilterParser.*;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
+
+public class TermsFilterIntegrationTests extends ElasticsearchIntegrationTest {
+
+ private final ESLogger logger = Loggers.getLogger(TermsFilterIntegrationTests.class);
+
+ public void testExecution() throws Exception {
+ assertAcked(prepareCreate("test").addMapping("type", "f", "type=string"));
+ ensureYellow();
+ indexRandom(true,
+ client().prepareIndex("test", "type").setSource("f", new String[] {"a", "b", "c"}),
+ client().prepareIndex("test", "type").setSource("f", "b"));
+
+ for (boolean cache : new boolean[] {false, true}) {
+ logger.info("cache=" + cache);
+ for (String execution : Arrays.asList(
+ EXECUTION_VALUE_PLAIN,
+ EXECUTION_VALUE_FIELDDATA,
+ EXECUTION_VALUE_BOOL,
+ EXECUTION_VALUE_BOOL_NOCACHE,
+ EXECUTION_VALUE_OR,
+ EXECUTION_VALUE_OR_NOCACHE)) {
+ logger.info("Execution=" + execution);
+ assertHitCount(client().prepareCount("test").setQuery(
+ QueryBuilders.filteredQuery(QueryBuilders.matchAllQuery(),
+ FilterBuilders.termsFilter("f", "a", "b").execution(execution).cache(cache))).get(), 2L);
+ }
+
+ for (String execution : Arrays.asList(
+ EXECUTION_VALUE_AND,
+ EXECUTION_VALUE_AND_NOCACHE)) {
+ logger.info("Execution=" + execution);
+ assertHitCount(client().prepareCount("test").setQuery(
+ QueryBuilders.filteredQuery(QueryBuilders.matchAllQuery(),
+ FilterBuilders.termsFilter("f", "a", "b").execution(execution).cache(cache))).get(), 1L);
+ }
+ }
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/index/engine/internal/InternalEngineTests.java b/src/test/java/org/elasticsearch/index/engine/internal/InternalEngineTests.java
index f92561c3e632b..612274ba3b3ea 100644
--- a/src/test/java/org/elasticsearch/index/engine/internal/InternalEngineTests.java
+++ b/src/test/java/org/elasticsearch/index/engine/internal/InternalEngineTests.java
@@ -170,12 +170,12 @@ private ParsedDocument testParsedDocument(String uid, String id, String type, St
protected Store createStore() throws IOException {
DirectoryService directoryService = new RamDirectoryService(shardId, EMPTY_SETTINGS);
- return new Store(shardId, EMPTY_SETTINGS, null, null, directoryService, new LeastUsedDistributor(directoryService));
+ return new Store(shardId, EMPTY_SETTINGS, null, directoryService, new LeastUsedDistributor(directoryService));
}
protected Store createStoreReplica() throws IOException {
DirectoryService directoryService = new RamDirectoryService(shardId, EMPTY_SETTINGS);
- return new Store(shardId, EMPTY_SETTINGS, null, null, directoryService, new LeastUsedDistributor(directoryService));
+ return new Store(shardId, EMPTY_SETTINGS, null, directoryService, new LeastUsedDistributor(directoryService));
}
protected Translog createTranslog() {
diff --git a/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesTests.java b/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesTests.java
new file mode 100644
index 0000000000000..a51af09f76edb
--- /dev/null
+++ b/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesTests.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index.fielddata;
+
+import org.elasticsearch.common.geo.GeoDistance;
+import org.elasticsearch.common.geo.GeoPoint;
+import org.elasticsearch.common.unit.DistanceUnit;
+import org.elasticsearch.test.ElasticsearchTestCase;
+
+import java.util.Arrays;
+
+public class ScriptDocValuesTests extends ElasticsearchTestCase {
+
+ private static MultiGeoPointValues wrap(final GeoPoint... points) {
+ return new MultiGeoPointValues() {
+ int docID = -1;
+
+ @Override
+ public GeoPoint valueAt(int i) {
+ if (docID != 0) {
+ fail();
+ }
+ return points[i];
+ }
+
+ @Override
+ public void setDocument(int docId) {
+ this.docID = docId;
+ }
+
+ @Override
+ public int count() {
+ if (docID != 0) {
+ return 0;
+ }
+ return points.length;
+ }
+ };
+ }
+
+ private static double randomLat() {
+ return randomDouble() * 180 - 90;
+ }
+
+ private static double randomLon() {
+ return randomDouble() * 360 - 180;
+ }
+
+ public void testGeoGetLatLon() {
+ final double lat1 = randomLat();
+ final double lat2 = randomLat();
+ final double lon1 = randomLon();
+ final double lon2 = randomLon();
+ final MultiGeoPointValues values = wrap(new GeoPoint(lat1, lon1), new GeoPoint(lat2, lon2));
+ final ScriptDocValues.GeoPoints script = new ScriptDocValues.GeoPoints(values);
+ script.setNextDocId(1);
+ assertEquals(true, script.isEmpty());
+ script.setNextDocId(0);
+ assertEquals(false, script.isEmpty());
+ assertEquals(new GeoPoint(lat1, lon1), script.getValue());
+ assertEquals(Arrays.asList(new GeoPoint(lat1, lon1), new GeoPoint(lat2, lon2)), script.getValues());
+ assertEquals(lat1, script.getLat(), 0);
+ assertEquals(lon1, script.getLon(), 0);
+ assertTrue(Arrays.equals(new double[] {lat1, lat2}, script.getLats()));
+ assertTrue(Arrays.equals(new double[] {lon1, lon2}, script.getLons()));
+ }
+
+ public void testGeoDistance() {
+ final double lat = randomLat();
+ final double lon = randomLon();
+ final MultiGeoPointValues values = wrap(new GeoPoint(lat, lon));
+ final ScriptDocValues.GeoPoints script = new ScriptDocValues.GeoPoints(values);
+ script.setNextDocId(0);
+
+ final ScriptDocValues.GeoPoints emptyScript = new ScriptDocValues.GeoPoints(wrap());
+ emptyScript.setNextDocId(0);
+
+ final double otherLat = randomLat();
+ final double otherLon = randomLon();
+
+ assertEquals(GeoDistance.ARC.calculate(lat, lon, otherLat, otherLon, DistanceUnit.KILOMETERS),
+ script.arcDistanceInKm(otherLat, otherLon), 0.01);
+ assertEquals(GeoDistance.ARC.calculate(lat, lon, otherLat, otherLon, DistanceUnit.KILOMETERS),
+ script.arcDistanceInKmWithDefault(otherLat, otherLon, 42), 0.01);
+ assertEquals(42, emptyScript.arcDistanceInKmWithDefault(otherLat, otherLon, 42), 0);
+
+ assertEquals(GeoDistance.PLANE.calculate(lat, lon, otherLat, otherLon, DistanceUnit.KILOMETERS),
+ script.distanceInKm(otherLat, otherLon), 0.01);
+ assertEquals(GeoDistance.PLANE.calculate(lat, lon, otherLat, otherLon, DistanceUnit.KILOMETERS),
+ script.distanceInKmWithDefault(otherLat, otherLon, 42), 0.01);
+ assertEquals(42, emptyScript.distanceInKmWithDefault(otherLat, otherLon, 42), 0);
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/index/mapper/FileBasedMappingsTests.java b/src/test/java/org/elasticsearch/index/mapper/FileBasedMappingsTests.java
new file mode 100644
index 0000000000000..2bc209daa0275
--- /dev/null
+++ b/src/test/java/org/elasticsearch/index/mapper/FileBasedMappingsTests.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index.mapper;
+
+import com.google.common.io.Files;
+import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse;
+import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData;
+import org.elasticsearch.cluster.ClusterName;
+import org.elasticsearch.common.io.FileSystemUtils;
+import org.elasticsearch.common.logging.ESLogger;
+import org.elasticsearch.common.logging.Loggers;
+import org.elasticsearch.common.settings.ImmutableSettings;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.json.JsonXContent;
+import org.elasticsearch.node.Node;
+import org.elasticsearch.node.NodeBuilder;
+import org.elasticsearch.test.ElasticsearchTestCase;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.util.Arrays;
+
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
+
+public class FileBasedMappingsTests extends ElasticsearchTestCase {
+
+ private static final String NAME = FileBasedMappingsTests.class.getSimpleName();
+
+ private final ESLogger logger = Loggers.getLogger(FileBasedMappingsTests.class);
+
+ public void testFileBasedMappings() throws Exception {
+ File configDir = Files.createTempDir();
+ File mappingsDir = new File(configDir, "mappings");
+ File indexMappings = new File(new File(mappingsDir, "index"), "type.json");
+ File defaultMappings = new File(new File(mappingsDir, "_default"), "type.json");
+ try {
+ indexMappings.getParentFile().mkdirs();
+ defaultMappings.getParentFile().mkdirs();
+
+ try (XContentBuilder builder = new XContentBuilder(JsonXContent.jsonXContent, new FileOutputStream(indexMappings))) {
+ builder.startObject()
+ .startObject("type")
+ .startObject("properties")
+ .startObject("f")
+ .field("type", "string")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject();
+ }
+
+ try (XContentBuilder builder = new XContentBuilder(JsonXContent.jsonXContent, new FileOutputStream(defaultMappings))) {
+ builder.startObject()
+ .startObject("type")
+ .startObject("properties")
+ .startObject("g")
+ .field("type", "string")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject();
+ }
+
+ Settings settings = ImmutableSettings.builder()
+ .put(ClusterName.SETTING, NAME)
+ .put("node.name", NAME)
+ .put("path.conf", configDir.getAbsolutePath())
+ .put("http.enabled", false)
+ .put("index.store.type", "ram")
+ .put("gateway.type", "none")
+ .build();
+
+ try (Node node = NodeBuilder.nodeBuilder().local(true).data(true).settings(settings).build()) {
+ node.start();
+
+ assertAcked(node.client().admin().indices().prepareCreate("index").addMapping("type", "h", "type=string").get());
+ final GetFieldMappingsResponse response = node.client().admin().indices().prepareGetFieldMappings("index").setTypes("type").setFields("f", "g", "h").get();
+ System.out.println(response.mappings());
+ for (String field : Arrays.asList("f", "g", "h")) {
+ logger.info("Checking field " + field);
+ FieldMappingMetaData fMappings = response.fieldMappings("index", "type", field);
+ assertNotNull(fMappings);
+ assertEquals(field, fMappings.fullName());
+ }
+ }
+ } finally {
+ FileSystemUtils.deleteRecursively(configDir);
+ }
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalRootMapper.java b/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalRootMapper.java
new file mode 100644
index 0000000000000..cb9596917c92c
--- /dev/null
+++ b/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalRootMapper.java
@@ -0,0 +1,105 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index.mapper.externalvalues;
+
+import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.document.StringField;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.index.mapper.*;
+
+import java.io.IOException;
+import java.util.Map;
+
+public class ExternalRootMapper implements RootMapper {
+
+ static final String CONTENT_TYPE = "_external_root";
+ static final String FIELD_NAME = "_is_external";
+ static final String FIELD_VALUE = "true";
+
+ @Override
+ public String name() {
+ return CONTENT_TYPE;
+ }
+
+ @Override
+ public void parse(ParseContext context) throws IOException {
+ }
+
+ @Override
+ public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
+ if (!(mergeWith instanceof ExternalRootMapper)) {
+ mergeContext.addConflict("Trying to merge " + mergeWith + " with " + this);
+ }
+ }
+
+ @Override
+ public void traverse(FieldMapperListener fieldMapperListener) {
+ }
+
+ @Override
+ public void traverse(ObjectMapperListener objectMapperListener) {
+ }
+
+ @Override
+ public void close() {
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ return builder.startObject(CONTENT_TYPE).endObject();
+ }
+
+ @Override
+ public void preParse(ParseContext context) throws IOException {
+ }
+
+ @Override
+ public void postParse(ParseContext context) throws IOException {
+ context.doc().add(new StringField(FIELD_NAME, FIELD_VALUE, Store.YES));
+ }
+
+ @Override
+ public boolean includeInObject() {
+ return false;
+ }
+
+ public static class Builder extends Mapper.Builder<Builder, ExternalRootMapper> {
+
+ protected Builder() {
+ super(CONTENT_TYPE);
+ }
+
+ @Override
+ public ExternalRootMapper build(BuilderContext context) {
+ return new ExternalRootMapper();
+ }
+
+ }
+
+ public static class TypeParser implements Mapper.TypeParser {
+
+ @Override
+ public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
+ return new Builder();
+ }
+
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationTests.java b/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationTests.java
index 5d61f770909a5..d92c5a0fa6961 100644
--- a/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationTests.java
+++ b/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationTests.java
@@ -49,6 +49,8 @@ protected Settings nodeSettings(int nodeOrdinal) {
public void testExternalValues() throws Exception {
prepareCreate("test-idx").addMapping("type",
XContentFactory.jsonBuilder().startObject().startObject("type")
+ .startObject(ExternalRootMapper.CONTENT_TYPE)
+ .endObject()
.startObject("properties")
.startObject("field").field("type", RegisterExternalTypes.EXTERNAL).endObject()
.endObject()
diff --git a/src/test/java/org/elasticsearch/index/mapper/externalvalues/RegisterExternalTypes.java b/src/test/java/org/elasticsearch/index/mapper/externalvalues/RegisterExternalTypes.java
index 8a9ed9cac8e55..5cd8110a37166 100755
--- a/src/test/java/org/elasticsearch/index/mapper/externalvalues/RegisterExternalTypes.java
+++ b/src/test/java/org/elasticsearch/index/mapper/externalvalues/RegisterExternalTypes.java
@@ -35,6 +35,7 @@ public class RegisterExternalTypes extends AbstractIndexComponent {
public RegisterExternalTypes(Index index, @IndexSettings Settings indexSettings, MapperService mapperService) {
super(index, indexSettings);
+ mapperService.documentMapperParser().putRootTypeParser(ExternalRootMapper.CONTENT_TYPE, new ExternalRootMapper.TypeParser());
mapperService.documentMapperParser().putTypeParser(EXTERNAL, new ExternalMapper.TypeParser(EXTERNAL, "foo"));
mapperService.documentMapperParser().putTypeParser(EXTERNAL_BIS, new ExternalMapper.TypeParser(EXTERNAL_BIS, "bar"));
mapperService.documentMapperParser().putTypeParser(EXTERNAL_UPPER, new ExternalMapper.TypeParser(EXTERNAL_UPPER, "FOO BAR"));
diff --git a/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java
index 504bf4f4a368a..d9765dcb96b13 100644
--- a/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java
+++ b/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java
@@ -36,11 +36,15 @@ public class SimpleExternalMappingTests extends ElasticsearchSingleNodeLuceneTes
@Test
public void testExternalValues() throws Exception {
MapperService mapperService = createIndex("test").mapperService();
+ mapperService.documentMapperParser().putRootTypeParser(ExternalRootMapper.CONTENT_TYPE,
+ new ExternalRootMapper.TypeParser());
mapperService.documentMapperParser().putTypeParser(RegisterExternalTypes.EXTERNAL,
new ExternalMapper.TypeParser(RegisterExternalTypes.EXTERNAL, "foo"));
DocumentMapper documentMapper = mapperService.documentMapperParser().parse(
XContentFactory.jsonBuilder().startObject().startObject("type")
+ .startObject(ExternalRootMapper.CONTENT_TYPE)
+ .endObject()
.startObject("properties")
.startObject("field").field("type", "external").endObject()
.endObject()
@@ -64,6 +68,7 @@ public void testExternalValues() throws Exception {
assertThat(doc.rootDoc().getField("field.field"), notNullValue());
assertThat(doc.rootDoc().getField("field.field").stringValue(), is("foo"));
+ assertThat(doc.rootDoc().getField(ExternalRootMapper.FIELD_NAME).stringValue(), is(ExternalRootMapper.FIELD_VALUE));
}
diff --git a/src/test/java/org/elasticsearch/index/merge/policy/MergePolicySettingsTest.java b/src/test/java/org/elasticsearch/index/merge/policy/MergePolicySettingsTest.java
index e62aca50b1d5c..46d55f996887f 100644
--- a/src/test/java/org/elasticsearch/index/merge/policy/MergePolicySettingsTest.java
+++ b/src/test/java/org/elasticsearch/index/merge/policy/MergePolicySettingsTest.java
@@ -34,9 +34,7 @@
import java.io.IOException;
import static org.elasticsearch.common.settings.ImmutableSettings.Builder.EMPTY_SETTINGS;
-import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.is;
public class MergePolicySettingsTest extends ElasticsearchTestCase {
@@ -174,7 +172,7 @@ public Settings build(boolean value) {
protected Store createStore(Settings settings) throws IOException {
DirectoryService directoryService = new RamDirectoryService(shardId, EMPTY_SETTINGS);
- return new Store(shardId, settings, null, null, directoryService, new LeastUsedDistributor(directoryService));
+ return new Store(shardId, settings, null, directoryService, new LeastUsedDistributor(directoryService));
}
}
diff --git a/src/test/java/org/elasticsearch/index/store/StoreTest.java b/src/test/java/org/elasticsearch/index/store/StoreTest.java
index 2d31689c5b655..292dd0d9c621e 100644
--- a/src/test/java/org/elasticsearch/index/store/StoreTest.java
+++ b/src/test/java/org/elasticsearch/index/store/StoreTest.java
@@ -40,12 +40,65 @@
import java.nio.file.NoSuchFileException;
import java.util.*;
+import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean;
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomInt;
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomIntBetween;
import static org.hamcrest.Matchers.*;
public class StoreTest extends ElasticsearchLuceneTestCase {
+ @Test
+ public void testRefCount() throws IOException {
+ final ShardId shardId = new ShardId(new Index("index"), 1);
+ DirectoryService directoryService = new LuceneManagedDirectoryService(random());
+ Store store = new Store(shardId, ImmutableSettings.EMPTY, null, directoryService, randomDistributor(directoryService));
+ int incs = randomIntBetween(1, 100);
+ for (int i = 0; i < incs; i++) {
+ if (randomBoolean()) {
+ store.incRef();
+ } else {
+ assertTrue(store.tryIncRef());
+ }
+ store.ensureOpen();
+ }
+
+ for (int i = 0; i < incs; i++) {
+ store.decRef();
+ store.ensureOpen();
+ }
+
+ store.incRef();
+ store.close();
+ for (int i = 0; i < incs; i++) {
+ if (randomBoolean()) {
+ store.incRef();
+ } else {
+ assertTrue(store.tryIncRef());
+ }
+ store.ensureOpen();
+ }
+
+ for (int i = 0; i < incs; i++) {
+ store.decRef();
+ store.ensureOpen();
+ }
+
+ store.decRef();
+ assertFalse(store.tryIncRef());
+ try {
+ store.incRef();
+ fail(" expected exception");
+ } catch (AlreadyClosedException ex) {
+
+ }
+ try {
+ store.ensureOpen();
+ fail(" expected exception");
+ } catch (AlreadyClosedException ex) {
+
+ }
+ }
+
@Test
public void testVerifyingIndexOutput() throws IOException {
Directory dir = newDirectory();
@@ -106,7 +159,7 @@ public void testVerifyingIndexOutputWithBogusInput() throws IOException {
public void testWriteLegacyChecksums() throws IOException {
final ShardId shardId = new ShardId(new Index("index"), 1);
DirectoryService directoryService = new LuceneManagedDirectoryService(random());
- Store store = new Store(shardId, ImmutableSettings.EMPTY, null, null, directoryService, randomDistributor(directoryService));
+ Store store = new Store(shardId, ImmutableSettings.EMPTY, null, directoryService, randomDistributor(directoryService));
// set default codec - all segments need checksums
IndexWriter writer = new IndexWriter(store.directory(), newIndexWriterConfig(random(), TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(actualDefaultCodec()));
int docs = 1 + random().nextInt(100);
@@ -169,7 +222,7 @@ public void testWriteLegacyChecksums() throws IOException {
public void testNewChecksums() throws IOException {
final ShardId shardId = new ShardId(new Index("index"), 1);
DirectoryService directoryService = new LuceneManagedDirectoryService(random());
- Store store = new Store(shardId, ImmutableSettings.EMPTY, null, null, directoryService, randomDistributor(directoryService));
+ Store store = new Store(shardId, ImmutableSettings.EMPTY, null, directoryService, randomDistributor(directoryService));
// set default codec - all segments need checksums
IndexWriter writer = new IndexWriter(store.directory(), newIndexWriterConfig(random(), TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(actualDefaultCodec()));
int docs = 1 + random().nextInt(100);
@@ -224,7 +277,7 @@ public void testNewChecksums() throws IOException {
public void testMixedChecksums() throws IOException {
final ShardId shardId = new ShardId(new Index("index"), 1);
DirectoryService directoryService = new LuceneManagedDirectoryService(random());
- Store store = new Store(shardId, ImmutableSettings.EMPTY, null, null, directoryService, randomDistributor(directoryService));
+ Store store = new Store(shardId, ImmutableSettings.EMPTY, null, directoryService, randomDistributor(directoryService));
// this time random codec....
IndexWriter writer = new IndexWriter(store.directory(), newIndexWriterConfig(random(), TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(actualDefaultCodec()));
int docs = 1 + random().nextInt(100);
@@ -310,7 +363,7 @@ public void testMixedChecksums() throws IOException {
public void testRenameFile() throws IOException {
final ShardId shardId = new ShardId(new Index("index"), 1);
DirectoryService directoryService = new LuceneManagedDirectoryService(random(), false);
- Store store = new Store(shardId, ImmutableSettings.EMPTY, null, null, directoryService, randomDistributor(directoryService));
+ Store store = new Store(shardId, ImmutableSettings.EMPTY, null, directoryService, randomDistributor(directoryService));
{
IndexOutput output = store.directory().createOutput("foo.bar", IOContext.DEFAULT);
int iters = scaledRandomIntBetween(10, 100);
@@ -528,7 +581,7 @@ public void testRecoveryDiff() throws IOException, InterruptedException {
iwc.setMaxThreadStates(1);
final ShardId shardId = new ShardId(new Index("index"), 1);
DirectoryService directoryService = new LuceneManagedDirectoryService(random);
- Store store = new Store(shardId, ImmutableSettings.EMPTY, null, null, directoryService, randomDistributor(random, directoryService));
+ Store store = new Store(shardId, ImmutableSettings.EMPTY, null, directoryService, randomDistributor(random, directoryService));
IndexWriter writer = new IndexWriter(store.directory(), iwc);
final boolean lotsOfSegments = rarely(random);
for (Document d : docs) {
@@ -558,7 +611,7 @@ public void testRecoveryDiff() throws IOException, InterruptedException {
iwc.setMaxThreadStates(1);
final ShardId shardId = new ShardId(new Index("index"), 1);
DirectoryService directoryService = new LuceneManagedDirectoryService(random);
- store = new Store(shardId, ImmutableSettings.EMPTY, null, null, directoryService, randomDistributor(random, directoryService));
+ store = new Store(shardId, ImmutableSettings.EMPTY, null, directoryService, randomDistributor(random, directoryService));
IndexWriter writer = new IndexWriter(store.directory(), iwc);
final boolean lotsOfSegments = rarely(random);
for (Document d : docs) {
diff --git a/src/test/java/org/elasticsearch/script/IndexedScriptTests.java b/src/test/java/org/elasticsearch/script/IndexedScriptTests.java
index e879ccb035f11..3024b0ec6dc8f 100644
--- a/src/test/java/org/elasticsearch/script/IndexedScriptTests.java
+++ b/src/test/java/org/elasticsearch/script/IndexedScriptTests.java
@@ -22,6 +22,7 @@
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.junit.Test;
@@ -40,7 +41,7 @@ public void testFieldIndexedScript() throws ExecutionException, InterruptedExce
List<IndexRequestBuilder> builders = new ArrayList();
builders.add(client().prepareIndex(ScriptService.SCRIPT_INDEX, "groovy", "script1").setSource("{" +
"\"script\":\"2\""+
- "}"));
+ "}").setTimeout(TimeValue.timeValueSeconds(randomIntBetween(2,10))));
builders.add(client().prepareIndex(ScriptService.SCRIPT_INDEX, "groovy", "script2").setSource("{" +
"\"script\":\"factor*2\""+
diff --git a/src/test/java/org/elasticsearch/search/geo/GeoPolygonTests.java b/src/test/java/org/elasticsearch/search/geo/GeoPolygonTests.java
new file mode 100644
index 0000000000000..d370041a24a13
--- /dev/null
+++ b/src/test/java/org/elasticsearch/search/geo/GeoPolygonTests.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.search.geo;
+
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.search.SearchHit;
+import org.elasticsearch.test.ElasticsearchIntegrationTest;
+import org.junit.Test;
+
+import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
+import static org.elasticsearch.index.query.FilterBuilders.geoPolygonFilter;
+import static org.elasticsearch.index.query.QueryBuilders.filteredQuery;
+import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
+import static org.hamcrest.Matchers.anyOf;
+import static org.hamcrest.Matchers.equalTo;
+
[email protected]
+public class GeoPolygonTests extends ElasticsearchIntegrationTest {
+
+ @Override
+ protected void setupSuiteScopeCluster() throws Exception {
+ XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
+ .startObject("properties").startObject("location").field("type", "geo_point").field("lat_lon", true)
+ .startObject("fielddata").field("format", randomNumericFieldDataFormat()).endObject().endObject().endObject()
+ .endObject().endObject();
+ assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder));
+ ensureGreen();
+
+ indexRandom(true, client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
+ .field("name", "New York")
+ .startObject("location").field("lat", 40.714).field("lon", -74.006).endObject()
+ .endObject()),
+ // to NY: 5.286 km
+ client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject()
+ .field("name", "Times Square")
+ .startObject("location").field("lat", 40.759).field("lon", -73.984).endObject()
+ .endObject()),
+ // to NY: 0.4621 km
+ client().prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject()
+ .field("name", "Tribeca")
+ .startObject("location").field("lat", 40.718).field("lon", -74.008).endObject()
+ .endObject()),
+ // to NY: 1.055 km
+ client().prepareIndex("test", "type1", "4").setSource(jsonBuilder().startObject()
+ .field("name", "Wall Street")
+ .startObject("location").field("lat", 40.705).field("lon", -74.009).endObject()
+ .endObject()),
+ // to NY: 1.258 km
+ client().prepareIndex("test", "type1", "5").setSource(jsonBuilder().startObject()
+ .field("name", "Soho")
+ .startObject("location").field("lat", 40.725).field("lon", -74).endObject()
+ .endObject()),
+ // to NY: 2.029 km
+ client().prepareIndex("test", "type1", "6").setSource(jsonBuilder().startObject()
+ .field("name", "Greenwich Village")
+ .startObject("location").field("lat", 40.731).field("lon", -73.996).endObject()
+ .endObject()),
+ // to NY: 8.572 km
+ client().prepareIndex("test", "type1", "7").setSource(jsonBuilder().startObject()
+ .field("name", "Brooklyn")
+ .startObject("location").field("lat", 40.65).field("lon", -73.95).endObject()
+ .endObject()));
+ ensureSearchable("test");
+ }
+
+ @Test
+ public void simplePolygonTest() throws Exception {
+
+ SearchResponse searchResponse = client().prepareSearch("test") // from NY
+ .setQuery(filteredQuery(matchAllQuery(), geoPolygonFilter("location")
+ .addPoint(40.7, -74.0)
+ .addPoint(40.7, -74.1)
+ .addPoint(40.8, -74.1)
+ .addPoint(40.8, -74.0)
+ .addPoint(40.7, -74.0)))
+ .execute().actionGet();
+ assertHitCount(searchResponse, 4);
+ assertThat(searchResponse.getHits().hits().length, equalTo(4));
+ for (SearchHit hit : searchResponse.getHits()) {
+ assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5")));
+ }
+ }
+}
diff --git a/src/test/java/org/elasticsearch/search/query/SimpleQueryTests.java b/src/test/java/org/elasticsearch/search/query/SimpleQueryTests.java
index e53c36c50873f..ae6d8addb5ac2 100644
--- a/src/test/java/org/elasticsearch/search/query/SimpleQueryTests.java
+++ b/src/test/java/org/elasticsearch/search/query/SimpleQueryTests.java
@@ -2089,6 +2089,15 @@ public void testSimpleQueryString() throws ExecutionException, InterruptedExcept
assertFirstHit(searchResponse, hasId("5"));
assertSearchHits(searchResponse, "5", "6");
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("myquery"));
+
+ searchResponse = client().prepareSearch().setQuery(simpleQueryString("spaghetti").field("*body")).get();
+ assertHitCount(searchResponse, 2l);
+ assertSearchHits(searchResponse, "5", "6");
+
+ // Have to bypass the builder here because the builder always uses "fields" instead of "field"
+ searchResponse = client().prepareSearch().setQuery("{\"simple_query_string\": {\"query\": \"spaghetti\", \"field\": \"_all\"}}").get();
+ assertHitCount(searchResponse, 2l);
+ assertSearchHits(searchResponse, "5", "6");
}
@Test
@@ -2192,6 +2201,11 @@ public void testSimpleQueryStringFlags() throws ExecutionException, InterruptedE
assertHitCount(searchResponse, 3l);
assertSearchHits(searchResponse, "1", "2", "3");
+ // Sending a negative 'flags' value is the same as SimpleQueryStringFlag.ALL
+ searchResponse = client().prepareSearch().setQuery("{\"simple_query_string\": {\"query\": \"foo bar\", \"flags\": -1}}").get();
+ assertHitCount(searchResponse, 3l);
+ assertSearchHits(searchResponse, "1", "2", "3");
+
searchResponse = client().prepareSearch().setQuery(
simpleQueryString("foo | bar")
.defaultOperator(SimpleQueryStringBuilder.Operator.AND)
diff --git a/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java b/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java
index 88108512bfb68..7d1ed84e1b3f0 100644
--- a/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java
+++ b/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java
@@ -75,12 +75,11 @@
import org.elasticsearch.discovery.zen.elect.ElectMasterService;
import org.elasticsearch.index.codec.CodecService;
import org.elasticsearch.index.fielddata.FieldDataType;
+import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMapper.Loading;
-import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
-import org.elasticsearch.index.mapper.internal.IdFieldMapper;
-import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
+import org.elasticsearch.index.mapper.internal.*;
import org.elasticsearch.index.merge.policy.*;
import org.elasticsearch.index.merge.scheduler.ConcurrentMergeSchedulerProvider;
import org.elasticsearch.index.merge.scheduler.MergeSchedulerModule;
@@ -95,6 +94,7 @@
import org.elasticsearch.indices.recovery.RecoverySettings;
import org.elasticsearch.indices.store.IndicesStore;
import org.elasticsearch.rest.RestStatus;
+import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchService;
import org.elasticsearch.test.client.RandomizingClient;
import org.hamcrest.Matchers;
@@ -344,6 +344,29 @@ private void randomIndexTemplate() throws IOException {
.field("index", randomFrom("no", "not_analyzed"))
.endObject();
}
+ if (randomBoolean()) {
+ mappings.startObject(TimestampFieldMapper.NAME)
+ .field("enabled", randomBoolean())
+ .startObject("fielddata")
+ .field(FieldDataType.FORMAT_KEY, randomFrom("array", "doc_values"))
+ .endObject()
+ .endObject();
+ }
+ if (randomBoolean()) {
+ mappings.startObject(SizeFieldMapper.NAME)
+ .field("enabled", randomBoolean())
+ .endObject();
+ }
+ if (randomBoolean()) {
+ mappings.startObject(AllFieldMapper.NAME)
+ .field("auto_boost", true)
+ .endObject();
+ }
+ if (randomBoolean()) {
+ mappings.startObject(SourceFieldMapper.NAME)
+ .field("compress", randomBoolean())
+ .endObject();
+ }
if (compatibilityVersion().onOrAfter(Version.V_1_3_0)) {
mappings.startObject(FieldNamesFieldMapper.NAME)
.startObject("fielddata")
@@ -420,6 +443,7 @@ private static ImmutableSettings.Builder setRandomSettings(Random random, Immuta
setRandomMerge(random, builder);
setRandomTranslogSettings(random, builder);
setRandomNormsLoading(random, builder);
+ setRandomScriptingSettings(random, builder);
if (random.nextBoolean()) {
if (random.nextInt(10) == 0) { // do something crazy slow here
builder.put(IndicesStore.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC, new ByteSizeValue(RandomInts.randomIntBetween(random, 1, 10), ByteSizeUnit.MB));
@@ -453,6 +477,23 @@ private static ImmutableSettings.Builder setRandomSettings(Random random, Immuta
builder.put(IndicesQueryCache.INDEX_CACHE_QUERY_ENABLED, random.nextBoolean());
}
+ if (random.nextBoolean()) {
+ builder.put(IndexFieldDataService.FIELDDATA_CACHE_KEY, randomFrom(
+ IndexFieldDataService.FIELDDATA_CACHE_VALUE_NODE,
+ IndexFieldDataService.FIELDDATA_CACHE_VALUE_RESIDENT,
+ IndexFieldDataService.FIELDDATA_CACHE_VALUE_SOFT));
+ }
+
+ return builder;
+ }
+
+ private static ImmutableSettings.Builder setRandomScriptingSettings(Random random, ImmutableSettings.Builder builder) {
+ if (random.nextBoolean()) {
+ builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING, RandomInts.randomIntBetween(random, -100, 2000));
+ }
+ if (random.nextBoolean()) {
+ builder.put(ScriptService.SCRIPT_CACHE_EXPIRE_SETTING, TimeValue.timeValueMillis(RandomInts.randomIntBetween(random, 750, 10000000)));
+ }
return builder;
}
diff --git a/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java b/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java
index e4cb23b921fec..1c3c636beee9a 100644
--- a/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java
+++ b/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java
@@ -21,6 +21,8 @@
import com.spatial4j.core.shape.Shape;
import com.spatial4j.core.shape.ShapeCollection;
+import com.spatial4j.core.shape.impl.GeoCircle;
+import com.spatial4j.core.shape.impl.RectangleImpl;
import com.spatial4j.core.shape.jts.JtsGeometry;
import com.spatial4j.core.shape.jts.JtsPoint;
import com.vividsolutions.jts.geom.*;
@@ -197,6 +199,10 @@ public static void assertEquals(Shape s1, Shape s2) {
Assert.assertEquals(p1, p2);
} else if (s1 instanceof ShapeCollection && s2 instanceof ShapeCollection) {
assertEquals((ShapeCollection)s1, (ShapeCollection)s2);
+ } else if (s1 instanceof GeoCircle && s2 instanceof GeoCircle) {
+ Assert.assertEquals((GeoCircle)s1, (GeoCircle)s2);
+ } else if (s1 instanceof RectangleImpl && s2 instanceof RectangleImpl) {
+ Assert.assertEquals((RectangleImpl)s1, (RectangleImpl)s2);
} else {
//We want to know the type of the shape because we test shape equality in a special way...
//... in particular we test that one ring is equivalent to another ring even if the points are rotated or reversed.
diff --git a/src/test/java/org/elasticsearch/test/store/MockRamIndexStore.java b/src/test/java/org/elasticsearch/test/store/MockRamIndexStore.java
index 51aacad21804c..a8ce2ac036210 100644
--- a/src/test/java/org/elasticsearch/test/store/MockRamIndexStore.java
+++ b/src/test/java/org/elasticsearch/test/store/MockRamIndexStore.java
@@ -20,16 +20,12 @@
package org.elasticsearch.test.store;
import org.elasticsearch.common.inject.Inject;
-
import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.index.store.DirectoryService;
import org.elasticsearch.index.store.support.AbstractIndexStore;
import org.elasticsearch.indices.store.IndicesStore;
-import org.elasticsearch.monitor.jvm.JvmInfo;
-import org.elasticsearch.monitor.jvm.JvmStats;
public class MockRamIndexStore extends AbstractIndexStore{
@@ -47,15 +43,4 @@ public boolean persistent() {
public Class<? extends DirectoryService> shardDirectory() {
return MockRamDirectoryService.class;
}
-
- @Override
- public ByteSizeValue backingStoreTotalSpace() {
- return JvmInfo.jvmInfo().getMem().heapMax();
- }
-
- @Override
- public ByteSizeValue backingStoreFreeSpace() {
- return JvmStats.jvmStats().getMem().heapUsed();
- }
-
}
|
a2444e9494e0bf9572f0c76f3307080974ce3b56
|
elasticsearch
|
Remove get index templates deprecated methods In- 0.90.4, we deprecated some code:--* `GetIndexTemplatesRequest-GetIndexTemplatesRequest(String)` moved to `GetIndexTemplatesRequest-GetIndexTemplatesRequest(String...)`-* `GetIndexTemplatesRequest-name(String)` moved to `GetIndexTemplatesRequest-names(String...)`-* `GetIndexTemplatesRequest-name()` moved to `GetIndexTemplatesRequest-names()`--* `GetIndexTemplatesRequestBuilder-GetIndexTemplatesRequestBuilder(IndicesAdminClient, String)` moved to `GetIndexTemplatesRequestBuilder-GetIndexTemplatesRequestBuilder(IndicesAdminClient, String...)`--* `IndicesAdminClient-prepareGetTemplates(String)` moved to `IndicesAdminClient-prepareGetTemplates(String...)`--* `AbstractIndicesAdminClient-prepareGetTemplates(String)` moved to `AbstractIndicesAdminClient-prepareGetTemplates(String...)`--We can now remove that old methods in 1.0.--**Note**: it breaks the Java API--Relative to -2532.-Closes -3681.-
|
p
|
https://github.com/elastic/elasticsearch
|
diff --git a/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java
index 13256cb97efd1..50864ce2cc9dd 100644
--- a/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java
+++ b/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java
@@ -38,12 +38,6 @@ public class GetIndexTemplatesRequest extends MasterNodeOperationRequest<GetInde
public GetIndexTemplatesRequest() {}
- @Deprecated
- public GetIndexTemplatesRequest(String name) {
- this.names = new String[1];
- this.names[0] = name;
- }
-
public GetIndexTemplatesRequest(String... names) {
this.names = names;
}
@@ -63,27 +57,6 @@ public ActionRequestValidationException validate() {
return validationException;
}
- /**
- * Sets the name of the index template.
- */
- @Deprecated
- public GetIndexTemplatesRequest name(String name) {
- this.names = new String[1];
- this.names[0] = name;
- return this;
- }
-
- /**
- * The name of the index template.
- */
- @Deprecated
- public String name() {
- if (this.names != null && this.names.length > 0) {
- return this.names[0];
- }
- return null;
- }
-
/**
* Sets the names of the index templates.
*/
diff --git a/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequestBuilder.java b/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequestBuilder.java
index 53d0872d66061..254541618dd35 100644
--- a/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequestBuilder.java
+++ b/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequestBuilder.java
@@ -32,11 +32,6 @@ public GetIndexTemplatesRequestBuilder(IndicesAdminClient indicesClient) {
super((InternalIndicesAdminClient) indicesClient, new GetIndexTemplatesRequest());
}
- @Deprecated
- public GetIndexTemplatesRequestBuilder(IndicesAdminClient indicesClient, String name) {
- super((InternalIndicesAdminClient) indicesClient, new GetIndexTemplatesRequest(name));
- }
-
public GetIndexTemplatesRequestBuilder(IndicesAdminClient indicesClient, String... names) {
super((InternalIndicesAdminClient) indicesClient, new GetIndexTemplatesRequest(names));
}
diff --git a/src/main/java/org/elasticsearch/client/IndicesAdminClient.java b/src/main/java/org/elasticsearch/client/IndicesAdminClient.java
index 5bb2b0149523e..5df537e97bc73 100644
--- a/src/main/java/org/elasticsearch/client/IndicesAdminClient.java
+++ b/src/main/java/org/elasticsearch/client/IndicesAdminClient.java
@@ -649,14 +649,6 @@ public interface IndicesAdminClient {
*/
void getTemplates(GetIndexTemplatesRequest request, ActionListener<GetIndexTemplatesResponse> listener);
- /**
- * Gets an index template.
- *
- * @param name The name of the template.
- */
- @Deprecated
- GetIndexTemplatesRequestBuilder prepareGetTemplates(String name);
-
/**
* Gets an index template (optional).
*/
diff --git a/src/main/java/org/elasticsearch/client/support/AbstractIndicesAdminClient.java b/src/main/java/org/elasticsearch/client/support/AbstractIndicesAdminClient.java
index 66da282205950..04ce96c45bd76 100644
--- a/src/main/java/org/elasticsearch/client/support/AbstractIndicesAdminClient.java
+++ b/src/main/java/org/elasticsearch/client/support/AbstractIndicesAdminClient.java
@@ -509,11 +509,6 @@ public void getTemplates(final GetIndexTemplatesRequest request, final ActionLis
execute(GetIndexTemplatesAction.INSTANCE, request, listener);
}
- @Override @Deprecated
- public GetIndexTemplatesRequestBuilder prepareGetTemplates(String name) {
- return new GetIndexTemplatesRequestBuilder(this, name);
- }
-
@Override
public GetIndexTemplatesRequestBuilder prepareGetTemplates(String... names) {
return new GetIndexTemplatesRequestBuilder(this, names);
|
50ac59fa8530bbd35c21cd61cfd64d2bd7d3eb57
|
hbase
|
HBASE-11558 Caching set on Scan object gets lost- when using TableMapReduceUtil in 0.95+ (Ishan Chhabra)--
|
c
|
https://github.com/apache/hbase
|
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index f7531eec052e..eea3b72fe79e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -904,6 +904,9 @@ public static ClientProtos.Scan toScan(
if (scan.getConsistency() == Consistency.TIMELINE) {
scanBuilder.setConsistency(toConsistency(scan.getConsistency()));
}
+ if (scan.getCaching() > 0) {
+ scanBuilder.setCaching(scan.getCaching());
+ }
return scanBuilder.build();
}
@@ -986,6 +989,9 @@ public static Scan toScan(
if (proto.hasConsistency()) {
scan.setConsistency(toConsistency(proto.getConsistency()));
}
+ if (proto.hasCaching()) {
+ scan.setCaching(proto.getCaching());
+ }
return scan;
}
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
index 6956b310b154..c197eb706c95 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
@@ -13658,6 +13658,16 @@ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder ge
* <code>optional .Consistency consistency = 16 [default = STRONG];</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency();
+
+ // optional uint32 caching = 17;
+ /**
+ * <code>optional uint32 caching = 17;</code>
+ */
+ boolean hasCaching();
+ /**
+ * <code>optional uint32 caching = 17;</code>
+ */
+ int getCaching();
}
/**
* Protobuf type {@code Scan}
@@ -13829,6 +13839,11 @@ private Scan(
}
break;
}
+ case 136: {
+ bitField0_ |= 0x00004000;
+ caching_ = input.readUInt32();
+ break;
+ }
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
@@ -14191,6 +14206,22 @@ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getCo
return consistency_;
}
+ // optional uint32 caching = 17;
+ public static final int CACHING_FIELD_NUMBER = 17;
+ private int caching_;
+ /**
+ * <code>optional uint32 caching = 17;</code>
+ */
+ public boolean hasCaching() {
+ return ((bitField0_ & 0x00004000) == 0x00004000);
+ }
+ /**
+ * <code>optional uint32 caching = 17;</code>
+ */
+ public int getCaching() {
+ return caching_;
+ }
+
private void initFields() {
column_ = java.util.Collections.emptyList();
attribute_ = java.util.Collections.emptyList();
@@ -14208,6 +14239,7 @@ private void initFields() {
small_ = false;
reversed_ = false;
consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
+ caching_ = 0;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
@@ -14287,6 +14319,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
if (((bitField0_ & 0x00002000) == 0x00002000)) {
output.writeEnum(16, consistency_.getNumber());
}
+ if (((bitField0_ & 0x00004000) == 0x00004000)) {
+ output.writeUInt32(17, caching_);
+ }
getUnknownFields().writeTo(output);
}
@@ -14360,6 +14395,10 @@ public int getSerializedSize() {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(16, consistency_.getNumber());
}
+ if (((bitField0_ & 0x00004000) == 0x00004000)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt32Size(17, caching_);
+ }
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
@@ -14457,6 +14496,11 @@ public boolean equals(final java.lang.Object obj) {
result = result &&
(getConsistency() == other.getConsistency());
}
+ result = result && (hasCaching() == other.hasCaching());
+ if (hasCaching()) {
+ result = result && (getCaching()
+ == other.getCaching());
+ }
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
@@ -14534,6 +14578,10 @@ public int hashCode() {
hash = (37 * hash) + CONSISTENCY_FIELD_NUMBER;
hash = (53 * hash) + hashEnum(getConsistency());
}
+ if (hasCaching()) {
+ hash = (37 * hash) + CACHING_FIELD_NUMBER;
+ hash = (53 * hash) + getCaching();
+ }
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
@@ -14706,6 +14754,8 @@ public Builder clear() {
bitField0_ = (bitField0_ & ~0x00004000);
consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
bitField0_ = (bitField0_ & ~0x00008000);
+ caching_ = 0;
+ bitField0_ = (bitField0_ & ~0x00010000);
return this;
}
@@ -14816,6 +14866,10 @@ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan buildPartial
to_bitField0_ |= 0x00002000;
}
result.consistency_ = consistency_;
+ if (((from_bitField0_ & 0x00010000) == 0x00010000)) {
+ to_bitField0_ |= 0x00004000;
+ }
+ result.caching_ = caching_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
@@ -14926,6 +14980,9 @@ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos
if (other.hasConsistency()) {
setConsistency(other.getConsistency());
}
+ if (other.hasCaching()) {
+ setCaching(other.getCaching());
+ }
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
@@ -16106,6 +16163,39 @@ public Builder clearConsistency() {
return this;
}
+ // optional uint32 caching = 17;
+ private int caching_ ;
+ /**
+ * <code>optional uint32 caching = 17;</code>
+ */
+ public boolean hasCaching() {
+ return ((bitField0_ & 0x00010000) == 0x00010000);
+ }
+ /**
+ * <code>optional uint32 caching = 17;</code>
+ */
+ public int getCaching() {
+ return caching_;
+ }
+ /**
+ * <code>optional uint32 caching = 17;</code>
+ */
+ public Builder setCaching(int value) {
+ bitField0_ |= 0x00010000;
+ caching_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional uint32 caching = 17;</code>
+ */
+ public Builder clearCaching() {
+ bitField0_ = (bitField0_ & ~0x00010000);
+ caching_ = 0;
+ onChanged();
+ return this;
+ }
+
// @@protoc_insertion_point(builder_scope:Scan)
}
@@ -30643,7 +30733,7 @@ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse mul
"(\0132\016.MutationProto\022\035\n\tcondition\030\003 \001(\0132\n." +
"Condition\022\023\n\013nonce_group\030\004 \001(\004\"<\n\016Mutate" +
"Response\022\027\n\006result\030\001 \001(\0132\007.Result\022\021\n\tpro" +
- "cessed\030\002 \001(\010\"\250\003\n\004Scan\022\027\n\006column\030\001 \003(\0132\007." +
+ "cessed\030\002 \001(\010\"\271\003\n\004Scan\022\027\n\006column\030\001 \003(\0132\007." +
"Column\022!\n\tattribute\030\002 \003(\0132\016.NameBytesPai" +
"r\022\021\n\tstart_row\030\003 \001(\014\022\020\n\010stop_row\030\004 \001(\014\022\027",
"\n\006filter\030\005 \001(\0132\007.Filter\022\036\n\ntime_range\030\006 " +
@@ -30653,55 +30743,55 @@ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse mul
"re_limit\030\013 \001(\r\022\024\n\014store_offset\030\014 \001(\r\022&\n\036" +
"load_column_families_on_demand\030\r \001(\010\022\r\n\005" +
"small\030\016 \001(\010\022\027\n\010reversed\030\017 \001(\010:\005false\022)\n\013" +
- "consistency\030\020 \001(\0162\014.Consistency:\006STRONG\"" +
- "\236\001\n\013ScanRequest\022 \n\006region\030\001 \001(\0132\020.Region" +
- "Specifier\022\023\n\004scan\030\002 \001(\0132\005.Scan\022\022\n\nscanne",
- "r_id\030\003 \001(\004\022\026\n\016number_of_rows\030\004 \001(\r\022\025\n\rcl" +
- "ose_scanner\030\005 \001(\010\022\025\n\rnext_call_seq\030\006 \001(\004" +
- "\"\210\001\n\014ScanResponse\022\030\n\020cells_per_result\030\001 " +
- "\003(\r\022\022\n\nscanner_id\030\002 \001(\004\022\024\n\014more_results\030" +
- "\003 \001(\010\022\013\n\003ttl\030\004 \001(\r\022\030\n\007results\030\005 \003(\0132\007.Re" +
- "sult\022\r\n\005stale\030\006 \001(\010\"\263\001\n\024BulkLoadHFileReq" +
- "uest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\0225" +
- "\n\013family_path\030\002 \003(\0132 .BulkLoadHFileReque" +
- "st.FamilyPath\022\026\n\016assign_seq_num\030\003 \001(\010\032*\n" +
- "\nFamilyPath\022\016\n\006family\030\001 \002(\014\022\014\n\004path\030\002 \002(",
- "\t\"\'\n\025BulkLoadHFileResponse\022\016\n\006loaded\030\001 \002" +
- "(\010\"a\n\026CoprocessorServiceCall\022\013\n\003row\030\001 \002(" +
- "\014\022\024\n\014service_name\030\002 \002(\t\022\023\n\013method_name\030\003" +
- " \002(\t\022\017\n\007request\030\004 \002(\014\"9\n\030CoprocessorServ" +
- "iceResult\022\035\n\005value\030\001 \001(\0132\016.NameBytesPair" +
- "\"d\n\031CoprocessorServiceRequest\022 \n\006region\030" +
- "\001 \002(\0132\020.RegionSpecifier\022%\n\004call\030\002 \002(\0132\027." +
- "CoprocessorServiceCall\"]\n\032CoprocessorSer" +
- "viceResponse\022 \n\006region\030\001 \002(\0132\020.RegionSpe" +
- "cifier\022\035\n\005value\030\002 \002(\0132\016.NameBytesPair\"{\n",
- "\006Action\022\r\n\005index\030\001 \001(\r\022 \n\010mutation\030\002 \001(\013" +
- "2\016.MutationProto\022\021\n\003get\030\003 \001(\0132\004.Get\022-\n\014s" +
- "ervice_call\030\004 \001(\0132\027.CoprocessorServiceCa" +
- "ll\"Y\n\014RegionAction\022 \n\006region\030\001 \002(\0132\020.Reg" +
- "ionSpecifier\022\016\n\006atomic\030\002 \001(\010\022\027\n\006action\030\003" +
- " \003(\0132\007.Action\"\221\001\n\021ResultOrException\022\r\n\005i" +
- "ndex\030\001 \001(\r\022\027\n\006result\030\002 \001(\0132\007.Result\022!\n\te" +
- "xception\030\003 \001(\0132\016.NameBytesPair\0221\n\016servic" +
- "e_result\030\004 \001(\0132\031.CoprocessorServiceResul" +
- "t\"f\n\022RegionActionResult\022-\n\021resultOrExcep",
- "tion\030\001 \003(\0132\022.ResultOrException\022!\n\texcept" +
- "ion\030\002 \001(\0132\016.NameBytesPair\"G\n\014MultiReques" +
- "t\022#\n\014regionAction\030\001 \003(\0132\r.RegionAction\022\022" +
- "\n\nnonceGroup\030\002 \001(\004\"@\n\rMultiResponse\022/\n\022r" +
- "egionActionResult\030\001 \003(\0132\023.RegionActionRe" +
- "sult*\'\n\013Consistency\022\n\n\006STRONG\020\000\022\014\n\010TIMEL" +
- "INE\020\0012\261\002\n\rClientService\022 \n\003Get\022\013.GetRequ" +
- "est\032\014.GetResponse\022)\n\006Mutate\022\016.MutateRequ" +
- "est\032\017.MutateResponse\022#\n\004Scan\022\014.ScanReque" +
- "st\032\r.ScanResponse\022>\n\rBulkLoadHFile\022\025.Bul",
- "kLoadHFileRequest\032\026.BulkLoadHFileRespons" +
- "e\022F\n\013ExecService\022\032.CoprocessorServiceReq" +
- "uest\032\033.CoprocessorServiceResponse\022&\n\005Mul" +
- "ti\022\r.MultiRequest\032\016.MultiResponseBB\n*org" +
- ".apache.hadoop.hbase.protobuf.generatedB" +
- "\014ClientProtosH\001\210\001\001\240\001\001"
+ "consistency\030\020 \001(\0162\014.Consistency:\006STRONG\022" +
+ "\017\n\007caching\030\021 \001(\r\"\236\001\n\013ScanRequest\022 \n\006regi" +
+ "on\030\001 \001(\0132\020.RegionSpecifier\022\023\n\004scan\030\002 \001(\013",
+ "2\005.Scan\022\022\n\nscanner_id\030\003 \001(\004\022\026\n\016number_of" +
+ "_rows\030\004 \001(\r\022\025\n\rclose_scanner\030\005 \001(\010\022\025\n\rne" +
+ "xt_call_seq\030\006 \001(\004\"\210\001\n\014ScanResponse\022\030\n\020ce" +
+ "lls_per_result\030\001 \003(\r\022\022\n\nscanner_id\030\002 \001(\004" +
+ "\022\024\n\014more_results\030\003 \001(\010\022\013\n\003ttl\030\004 \001(\r\022\030\n\007r" +
+ "esults\030\005 \003(\0132\007.Result\022\r\n\005stale\030\006 \001(\010\"\263\001\n" +
+ "\024BulkLoadHFileRequest\022 \n\006region\030\001 \002(\0132\020." +
+ "RegionSpecifier\0225\n\013family_path\030\002 \003(\0132 .B" +
+ "ulkLoadHFileRequest.FamilyPath\022\026\n\016assign" +
+ "_seq_num\030\003 \001(\010\032*\n\nFamilyPath\022\016\n\006family\030\001",
+ " \002(\014\022\014\n\004path\030\002 \002(\t\"\'\n\025BulkLoadHFileRespo" +
+ "nse\022\016\n\006loaded\030\001 \002(\010\"a\n\026CoprocessorServic" +
+ "eCall\022\013\n\003row\030\001 \002(\014\022\024\n\014service_name\030\002 \002(\t" +
+ "\022\023\n\013method_name\030\003 \002(\t\022\017\n\007request\030\004 \002(\014\"9" +
+ "\n\030CoprocessorServiceResult\022\035\n\005value\030\001 \001(" +
+ "\0132\016.NameBytesPair\"d\n\031CoprocessorServiceR" +
+ "equest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier" +
+ "\022%\n\004call\030\002 \002(\0132\027.CoprocessorServiceCall\"" +
+ "]\n\032CoprocessorServiceResponse\022 \n\006region\030" +
+ "\001 \002(\0132\020.RegionSpecifier\022\035\n\005value\030\002 \002(\0132\016",
+ ".NameBytesPair\"{\n\006Action\022\r\n\005index\030\001 \001(\r\022" +
+ " \n\010mutation\030\002 \001(\0132\016.MutationProto\022\021\n\003get" +
+ "\030\003 \001(\0132\004.Get\022-\n\014service_call\030\004 \001(\0132\027.Cop" +
+ "rocessorServiceCall\"Y\n\014RegionAction\022 \n\006r" +
+ "egion\030\001 \002(\0132\020.RegionSpecifier\022\016\n\006atomic\030" +
+ "\002 \001(\010\022\027\n\006action\030\003 \003(\0132\007.Action\"\221\001\n\021Resul" +
+ "tOrException\022\r\n\005index\030\001 \001(\r\022\027\n\006result\030\002 " +
+ "\001(\0132\007.Result\022!\n\texception\030\003 \001(\0132\016.NameBy" +
+ "tesPair\0221\n\016service_result\030\004 \001(\0132\031.Coproc" +
+ "essorServiceResult\"f\n\022RegionActionResult",
+ "\022-\n\021resultOrException\030\001 \003(\0132\022.ResultOrEx" +
+ "ception\022!\n\texception\030\002 \001(\0132\016.NameBytesPa" +
+ "ir\"G\n\014MultiRequest\022#\n\014regionAction\030\001 \003(\013" +
+ "2\r.RegionAction\022\022\n\nnonceGroup\030\002 \001(\004\"@\n\rM" +
+ "ultiResponse\022/\n\022regionActionResult\030\001 \003(\013" +
+ "2\023.RegionActionResult*\'\n\013Consistency\022\n\n\006" +
+ "STRONG\020\000\022\014\n\010TIMELINE\020\0012\261\002\n\rClientService" +
+ "\022 \n\003Get\022\013.GetRequest\032\014.GetResponse\022)\n\006Mu" +
+ "tate\022\016.MutateRequest\032\017.MutateResponse\022#\n" +
+ "\004Scan\022\014.ScanRequest\032\r.ScanResponse\022>\n\rBu",
+ "lkLoadHFile\022\025.BulkLoadHFileRequest\032\026.Bul" +
+ "kLoadHFileResponse\022F\n\013ExecService\022\032.Copr" +
+ "ocessorServiceRequest\032\033.CoprocessorServi" +
+ "ceResponse\022&\n\005Multi\022\r.MultiRequest\032\016.Mul" +
+ "tiResponseBB\n*org.apache.hadoop.hbase.pr" +
+ "otobuf.generatedB\014ClientProtosH\001\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -30791,7 +30881,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors(
internal_static_Scan_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_Scan_descriptor,
- new java.lang.String[] { "Column", "Attribute", "StartRow", "StopRow", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "BatchSize", "MaxResultSize", "StoreLimit", "StoreOffset", "LoadColumnFamiliesOnDemand", "Small", "Reversed", "Consistency", });
+ new java.lang.String[] { "Column", "Attribute", "StartRow", "StopRow", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "BatchSize", "MaxResultSize", "StoreLimit", "StoreOffset", "LoadColumnFamiliesOnDemand", "Small", "Reversed", "Consistency", "Caching", });
internal_static_ScanRequest_descriptor =
getDescriptor().getMessageTypes().get(12);
internal_static_ScanRequest_fieldAccessorTable = new
diff --git a/hbase-protocol/src/main/protobuf/Client.proto b/hbase-protocol/src/main/protobuf/Client.proto
index 8c71ef1018dc..9eedd1b0c903 100644
--- a/hbase-protocol/src/main/protobuf/Client.proto
+++ b/hbase-protocol/src/main/protobuf/Client.proto
@@ -247,6 +247,7 @@ message Scan {
optional bool small = 14;
optional bool reversed = 15 [default = false];
optional Consistency consistency = 16 [default = STRONG];
+ optional uint32 caching = 17;
}
/**
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java
index f4b71f788fd1..d51f007f8ed0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java
@@ -291,15 +291,21 @@ public void testScan() throws IOException {
scanBuilder.addColumn(columnBuilder.build());
ClientProtos.Scan proto = scanBuilder.build();
- // default fields
+
+ // Verify default values
assertEquals(1, proto.getMaxVersions());
assertEquals(true, proto.getCacheBlocks());
+ // Verify fields survive ClientProtos.Scan -> Scan -> ClientProtos.Scan
+ // conversion
scanBuilder = ClientProtos.Scan.newBuilder(proto);
- scanBuilder.setMaxVersions(1);
- scanBuilder.setCacheBlocks(true);
-
- Scan scan = ProtobufUtil.toScan(proto);
- assertEquals(scanBuilder.build(), ProtobufUtil.toScan(scan));
+ scanBuilder.setMaxVersions(2);
+ scanBuilder.setCacheBlocks(false);
+ scanBuilder.setCaching(1024);
+ ClientProtos.Scan expectedProto = scanBuilder.build();
+
+ ClientProtos.Scan actualProto = ProtobufUtil.toScan(
+ ProtobufUtil.toScan(expectedProto));
+ assertEquals(expectedProto, actualProto);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.