diff --git "a/test.csv" "b/test.csv" --- "a/test.csv" +++ "b/test.csv" @@ -1,67642 +1,45195 @@ commit_id,project,commit_message,type,url,git_diff -4f8fa5ff72b1ae2ef4a5c2e4be4f9c8f749da9bb,Delta Spike,"DELTASPIKE-315 add more JavaDoc -",p,https://github.com/apache/deltaspike,"diff --git a/deltaspike/modules/jpa/api/src/main/java/org/apache/deltaspike/test/jpa/api/entitymanager/PersistenceUnitName.java b/deltaspike/modules/jpa/api/src/main/java/org/apache/deltaspike/test/jpa/api/entitymanager/PersistenceUnitName.java -index 45e94581f..5d16153c6 100644 ---- a/deltaspike/modules/jpa/api/src/main/java/org/apache/deltaspike/test/jpa/api/entitymanager/PersistenceUnitName.java -+++ b/deltaspike/modules/jpa/api/src/main/java/org/apache/deltaspike/test/jpa/api/entitymanager/PersistenceUnitName.java -@@ -35,7 +35,7 @@ - * EntityManagerFactoryProducer. - */ - @Target( { TYPE, METHOD, PARAMETER, FIELD }) --@Retention(value= RetentionPolicy.RUNTIME) -+@Retention(value = RetentionPolicy.RUNTIME) - @Documented - @Qualifier - public @interface PersistenceUnitName -diff --git a/deltaspike/modules/jpa/impl/src/main/java/org/apache/deltaspike/jpa/impl/entitymanager/EntityManagerFactoryProducer.java b/deltaspike/modules/jpa/impl/src/main/java/org/apache/deltaspike/jpa/impl/entitymanager/EntityManagerFactoryProducer.java -index 05129f3a5..002e28cb8 100644 ---- a/deltaspike/modules/jpa/impl/src/main/java/org/apache/deltaspike/jpa/impl/entitymanager/EntityManagerFactoryProducer.java -+++ b/deltaspike/modules/jpa/impl/src/main/java/org/apache/deltaspike/jpa/impl/entitymanager/EntityManagerFactoryProducer.java -@@ -32,7 +32,32 @@ +84ff885f72d1ec8228407b6cdc9c09d342b5a0dc,kotlin,Constructor body generation extracted as a method- (+ migrated to Printer for convenience)--,p,https://github.com/JetBrains/kotlin,"diff --git a/injector-generator/src/org/jetbrains/jet/di/DependencyInjectorGenerator.java b/injector-generator/src/org/jetbrains/jet/di/DependencyInjectorGenerator.java +index 6fc139585610e..a0af388cecfbc 100644 +--- a/injector-generator/src/org/jetbrains/jet/di/DependencyInjectorGenerator.java ++++ b/injector-generator/src/org/jetbrains/jet/di/DependencyInjectorGenerator.java +@@ -16,15 +16,13 @@ + package org.jetbrains.jet.di; - /** -- * TODO -+ *

Built in support for injecting EntityManagerFactories into own beans. -+ * The injection point must use the Qualifier {@link PersistenceUnitName} -+ * to express the desired persistence unit name.

-+ * -+ *

The EntityManagerFactory for the given persistence unit will be produced -+ * as @Dependent scoped. It can be used to easily implement own -+ * EntityManagerProviders as shown in the following example which provides -+ * a producer according to the entitymanager-per-request design pattern:

-+ *
-+ * @ApplicationScoped
-+ * public class SampleEntityManagerProducer {
-+ *   @Inject
-+ *   @PersistenceUnitName(""testPersistenceUnit"")
-+ *   private EntityManagerFactory emf;
-+ *
-+ *   @Produces
-+ *   @RequestScoped
-+ *   public EntityManager createEntityManager() {
-+ *     return emf.createEntityManager();
-+ *   }
-+ *
-+ *   public void closeEm(@Disposes EntityManager em) {
-+ *     em.close();
-+ *   }
-+ * }
-+ *  
- */ - public class EntityManagerFactoryProducer - { -@@ -44,7 +69,7 @@ public class EntityManagerFactoryProducer +-import com.google.common.collect.HashMultimap; +-import com.google.common.collect.Lists; +-import com.google.common.collect.Multimap; +-import com.google.common.collect.Sets; ++import com.google.common.collect.*; + import com.intellij.openapi.util.SystemInfo; + import com.intellij.openapi.util.io.FileUtil; + import com.intellij.openapi.util.text.StringUtil; + import org.jetbrains.annotations.NotNull; + import org.jetbrains.annotations.Nullable; ++import org.jetbrains.jet.utils.Printer; - @Produces - @Dependent -- @PersistenceUnitName(""any"") // the value is nonbinding, thus this is just a dummy parameter here -+ @PersistenceUnitName(""any"") // the value is nonbinding, thus 'any' is just a dummy parameter here - public EntityManagerFactory createEntityManagerFactoryForUnit(InjectionPoint injectionPoint) - { - PersistenceUnitName unitNameAnnotation = injectionPoint.getAnnotated().getAnnotation(PersistenceUnitName.class);" -53c2131d4411fac6fbe43fc757bc6acea780e2a1,crashub$crash,"Add flexibility for plugin a custom class manager -",p,https://github.com/crashub/crash,"diff --git a/shell/core/src/main/java/org/crsh/shell/impl/command/AbstractClassManager.java b/shell/core/src/main/java/org/crsh/shell/impl/command/AbstractClassManager.java -new file mode 100644 -index 000000000..f9a8180ba ---- /dev/null -+++ b/shell/core/src/main/java/org/crsh/shell/impl/command/AbstractClassManager.java -@@ -0,0 +1,143 @@ -+/* -+ * Copyright (C) 2012 eXo Platform SAS. -+ * -+ * This is free software; you can redistribute it and/or modify it -+ * under the terms of the GNU Lesser General Public License as -+ * published by the Free Software Foundation; either version 2.1 of -+ * the License, or (at your option) any later version. -+ * -+ * This software is distributed in the hope that it will be useful, -+ * but WITHOUT ANY WARRANTY; without even the implied warranty of -+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ * Lesser General Public License for more details. -+ * -+ * You should have received a copy of the GNU Lesser General Public -+ * License along with this software; if not, write to the Free -+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -+ * 02110-1301 USA, or see the FSF site: http://www.fsf.org. -+ */ + import javax.annotation.PostConstruct; + import javax.annotation.PreDestroy; +@@ -281,70 +279,80 @@ private static DiType getEffectiveFieldType(Field field) { + } + + private void generateConstructor(String injectorClassName, PrintStream out) { +- String indent = "" ""; ++ Printer p = new Printer(out); ++ p.pushIndent(); + + // Constructor parameters + if (parameters.isEmpty()) { +- out.println("" public "" + injectorClassName + ""() {""); ++ p.println(""public "", injectorClassName, ""() {""); + } + else { +- out.println("" public "" + injectorClassName + ""(""); ++ p.println(""public "", injectorClassName, ""(""); ++ p.pushIndent(); + for (Iterator iterator = parameters.iterator(); iterator.hasNext(); ) { + Parameter parameter = iterator.next(); +- out.print(indent); ++ p.print(); // indent + if (parameter.isRequired()) { +- out.print(""@NotNull ""); ++ p.printWithNoIndent(""@NotNull ""); + } +- out.print(parameter.getType().getSimpleName() + "" "" + parameter.getName()); ++ p.printWithNoIndent(parameter.getType().getSimpleName(), "" "", parameter.getName()); + if (iterator.hasNext()) { +- out.println("",""); ++ p.printlnWithNoIndent("",""); + } + } +- out.println(); +- out.println("" ) {""); ++ p.printlnWithNoIndent(); ++ p.popIndent(); ++ p.println("") {""); + } + ++ p.pushIndent(); + if (lazy) { + // Remember parameters + for (Parameter parameter : parameters) { +- out.println(indent + ""this."" + parameter.getField().getName() + "" = "" + parameter.getName() + "";""); ++ p.println(""this."", parameter.getField().getName(), "" = "", parameter.getName(), "";""); + } + } + else { +- // Initialize fields +- for (Field field : fields) { +- //if (!backsParameter.contains(field) || field.isPublic()) { +- String prefix = ""this.""; +- out.println(indent + prefix + field.getName() + "" = "" + field.getInitialization().renderAsCode() + "";""); +- //} +- } +- out.println(); ++ generateInitializingCode(p, fields); ++ } + +- // Call setters +- for (Field field : fields) { +- for (SetterDependency dependency : field.getDependencies()) { +- String prefix = field.isPublic() ? ""this."" : """"; +- out.println(indent + prefix + dependency.getDependent().getName() + ""."" + dependency.getSetterName() + ""("" + dependency.getDependency().getName() + "");""); +- } +- if (!field.getDependencies().isEmpty()) { +- out.println(); +- } +- } ++ p.popIndent(); ++ p.println(""}""); ++ } + +- // call @PostConstruct +- for (Field field : fields) { +- // TODO: type of field may be different from type of object +- List postConstructMethods = getPostConstructMethods(getEffectiveFieldType(field).getClazz()); +- for (Method postConstruct : postConstructMethods) { +- out.println(indent + field.getName() + ""."" + postConstruct.getName() + ""();""); +- } +- if (postConstructMethods.size() > 0) { +- out.println(); +- } ++ private static void generateInitializingCode(@NotNull Printer p, @NotNull Collection fields) { ++ // Initialize fields ++ for (Field field : fields) { ++ //if (!backsParameter.contains(field) || field.isPublic()) { ++ p.println(""this."", field.getName(), "" = "", field.getInitialization().renderAsCode(), "";""); ++ //} ++ } ++ p.printlnWithNoIndent(); ++ ++ // Call setters ++ for (Field field : fields) { ++ for (SetterDependency dependency : field.getDependencies()) { ++ String prefix = field.isPublic() ? ""this."" : """"; ++ String dependencyName = dependency.getDependency().getName(); ++ String dependentName = dependency.getDependent().getName(); ++ p.println(prefix, dependentName, ""."", dependency.getSetterName(), ""("", dependencyName, "");""); ++ } ++ if (!field.getDependencies().isEmpty()) { ++ p.printlnWithNoIndent(); + } + } + +- out.println("" }""); ++ // call @PostConstruct ++ for (Field field : fields) { ++ // TODO: type of field may be different from type of object ++ List postConstructMethods = getPostConstructMethods(getEffectiveFieldType(field).getClazz()); ++ for (Method postConstruct : postConstructMethods) { ++ p.println(field.getName(), ""."", postConstruct.getName(), ""();""); ++ } ++ if (postConstructMethods.size() > 0) { ++ p.printlnWithNoIndent(); ++ } ++ } + } + + private static List getPostConstructMethods(Class clazz) {" +e0e1bdf57aa4f1dae1de5b7a51b58fad1fd4696b,brandonborkholder$glg2d,Abstracted some of the init/tear-down functionality to make it easier to customize.,p,https://github.com/brandonborkholder/glg2d,"diff --git a/src/main/java/glg2d/G2DGLCanvas.java b/src/main/java/glg2d/G2DGLCanvas.java +index 33a0d734..db9405a2 100644 +--- a/src/main/java/glg2d/G2DGLCanvas.java ++++ b/src/main/java/glg2d/G2DGLCanvas.java +@@ -107,7 +107,7 @@ public boolean isGLDrawing() { + /** + * Sets the drawing path, {@code true} for OpenGL, {@code false} for normal + * Java2D. +- * ++ * + * @see #isGLDrawing() + */ + public void setGLDrawing(boolean drawGL) { +@@ -131,12 +131,20 @@ public void setDrawableComponent(JComponent component) { + + drawableComponent = component; + if (drawableComponent != null) { +- g2dglListener = new G2DGLEventListener(drawableComponent); ++ g2dglListener = createG2DListener(drawableComponent); + canvas.addGLEventListener(g2dglListener); + add(drawableComponent); + } + } + ++ /** ++ * Creates the GLEventListener that will draw the given component to the ++ * canvas. ++ */ ++ protected GLEventListener createG2DListener(JComponent drawingComponent) { ++ return new G2DGLEventListener(drawingComponent); ++ } ++ + public JComponent getDrawableComponent() { + return drawableComponent; + } +diff --git a/src/main/java/glg2d/G2DGLEventListener.java b/src/main/java/glg2d/G2DGLEventListener.java +index bcb48139..c5e0c74f 100644 +--- a/src/main/java/glg2d/G2DGLEventListener.java ++++ b/src/main/java/glg2d/G2DGLEventListener.java +@@ -18,7 +18,9 @@ + + import java.awt.Component; + ++import javax.media.opengl.GL; + import javax.media.opengl.GLAutoDrawable; ++import javax.media.opengl.GLContext; + import javax.media.opengl.GLEventListener; + import javax.swing.RepaintManager; + +@@ -36,7 +38,7 @@ public class G2DGLEventListener implements GLEventListener { + * {@code baseComponent} is used to provide default font, backgroundColor, + * etc. to the {@code GLGraphics2D} object. It is also used for width, height + * of the viewport in OpenGL. +- * ++ * + * @param baseComponent + * The component to use for default settings. + */ +@@ -47,9 +49,73 @@ public G2DGLEventListener(Component baseComponent) { + @Override + public void display(GLAutoDrawable drawable) { + g2d.setCanvas(drawable); +- g2d.prePaint(baseComponent); ++ prePaint(drawable.getContext()); + paintGL(g2d); ++ postPaint(drawable.getContext()); ++ } + -+package org.crsh.shell.impl.command; ++ /** ++ * Called after the canvas is set on {@code g2d} but before any painting is ++ * done. This should setup the matrices and ask {@code g2d} to setup any ++ * client state. ++ */ ++ protected void prePaint(GLContext context) { ++ setupMatrices(context); ++ g2d.prePaint(baseComponent); ++ } + -+import groovy.lang.GroovyClassLoader; -+import groovy.lang.GroovyCodeSource; -+import groovy.lang.Script; -+import org.codehaus.groovy.control.CompilationFailedException; -+import org.codehaus.groovy.control.CompilerConfiguration; -+import org.crsh.command.CommandInvoker; -+import org.crsh.command.NoSuchCommandException; -+import org.crsh.plugin.PluginContext; -+import org.crsh.shell.ErrorType; -+import org.crsh.util.TimestampedObject; -+import org.crsh.vfs.Resource; ++ /** ++ * Sets up the three matrices, including the transform from OpenGL coordinate ++ * system to Java2D coordinates. ++ */ ++ protected void setupMatrices(GLContext context) { ++ GL gl = context.getGL(); + -+import java.io.UnsupportedEncodingException; ++ // push all the matrices ++ gl.glMatrixMode(GL.GL_PROJECTION); ++ gl.glPushMatrix(); + -+public abstract class AbstractClassManager { ++ int width = baseComponent.getWidth(); ++ int height = baseComponent.getHeight(); + -+ /** . */ -+ private final PluginContext context; ++ // and setup the viewport ++ gl.glViewport(0, 0, width, height); ++ gl.glLoadIdentity(); ++ gl.glOrtho(0, width, 0, height, -1, 1); + -+ /** . */ -+ private final CompilerConfiguration config; ++ gl.glMatrixMode(GL.GL_MODELVIEW); ++ gl.glPushMatrix(); ++ gl.glLoadIdentity(); + -+ /** . */ -+ private final Class baseClass; ++ // do the transform from Graphics2D coords to openGL coords ++ gl.glTranslatef(0, height, 0); ++ gl.glScalef(1, -1, 1); + -+ protected AbstractClassManager(PluginContext context, Class baseClass, Class baseScriptClass) { -+ CompilerConfiguration config = new CompilerConfiguration(); -+ config.setRecompileGroovySource(true); -+ config.setScriptBaseClass(baseScriptClass.getName()); ++ gl.glMatrixMode(GL.GL_TEXTURE); ++ gl.glPushMatrix(); ++ gl.glLoadIdentity(); ++ } + -+ // -+ this.context = context; -+ this.config = config; -+ this.baseClass = baseClass; ++ /** ++ * Called after all Java2D painting is complete. This should restore the ++ * matrices if they were modified. ++ */ ++ protected void postPaint(GLContext context) { + g2d.postPaint(); ++ popMatrices(context); + } -+ -+ protected abstract TimestampedObject> loadClass(String name); + -+ protected abstract void saveClass(String name, TimestampedObject> clazz); ++ /** ++ * Pops all the matrices. ++ */ ++ protected void popMatrices(GLContext context) { ++ GL gl = context.getGL(); ++ gl.glMatrixMode(GL.GL_MODELVIEW); ++ gl.glPopMatrix(); ++ gl.glMatrixMode(GL.GL_PROJECTION); ++ gl.glPopMatrix(); ++ gl.glMatrixMode(GL.GL_TEXTURE); ++ gl.glPopMatrix(); + } + + /** +diff --git a/src/main/java/glg2d/GLGraphics2D.java b/src/main/java/glg2d/GLGraphics2D.java +index 324c3ed3..f3d4e437 100644 +--- a/src/main/java/glg2d/GLGraphics2D.java ++++ b/src/main/java/glg2d/GLGraphics2D.java +@@ -131,37 +131,9 @@ protected void prePaint(Component component) { + gl.glDisable(GL.GL_CULL_FACE); + gl.glShadeModel(GL.GL_FLAT); + gl.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT); +- +- // push all the matrices +- gl.glMatrixMode(GL.GL_PROJECTION); +- gl.glPushMatrix(); +- +- // and setup the viewport +- gl.glViewport(0, 0, width, height); +- gl.glLoadIdentity(); +- gl.glOrtho(0, width, 0, height, -1, 1); +- +- gl.glMatrixMode(GL.GL_MODELVIEW); +- gl.glPushMatrix(); +- gl.glLoadIdentity(); +- +- // do the transform from Graphics2D coords to openGL coords +- gl.glTranslatef(0, height, 0); +- gl.glScalef(1, -1, 1); +- +- gl.glMatrixMode(GL.GL_TEXTURE); +- gl.glPushMatrix(); +- gl.glLoadIdentity(); + } + + protected void postPaint() { +- gl.glMatrixMode(GL.GL_MODELVIEW); +- gl.glPopMatrix(); +- gl.glMatrixMode(GL.GL_PROJECTION); +- gl.glPopMatrix(); +- gl.glMatrixMode(GL.GL_TEXTURE); +- gl.glPopMatrix(); +- + gl.glPopClientAttrib(); + gl.glPopAttrib(); + gl.glFlush();" +4cc637f7b8b94f0cd1a97a6b618e333e7ef4d525,mctcp$terraincontrol,"Refactor CustomObjects and Resources +There are now CustomObjects, and all BO2 are a CustomObject. UseWorld +and UseBiome are also CustomObjects. Even all trees are now a +CustomObject. +All resources now inherit Resource. All settings don't have to be stored +in a generic Resource object, but they are stored in the resource +itself. This allow for more flexibility. +Plugin developers can now add their own CustomObjects and Resources. +",p,https://github.com/mctcp/terraincontrol,"diff --git a/bukkit/src/com/khorn/terraincontrol/bukkit/TCListener.java b/bukkit/src/com/khorn/terraincontrol/bukkit/TCListener.java +index bbc001aae..9e5d29205 100644 +--- a/bukkit/src/com/khorn/terraincontrol/bukkit/TCListener.java ++++ b/bukkit/src/com/khorn/terraincontrol/bukkit/TCListener.java +@@ -12,20 +12,15 @@ + import org.bukkit.event.world.WorldInitEvent; + + import com.khorn.terraincontrol.configuration.BiomeConfig; +-import com.khorn.terraincontrol.configuration.Resource; +-import com.khorn.terraincontrol.generator.resourcegens.TreeGen; ++import com.khorn.terraincontrol.generator.resourcegens.SaplingGen; + + public class TCListener implements Listener + { + private TCPlugin tcPlugin; +- private Random random; +- +- private TreeGen treeGenerator = new TreeGen(); + + public TCListener(TCPlugin plugin) + { + this.tcPlugin = plugin; +- this.random = new Random(); + Bukkit.getServer().getPluginManager().registerEvents(this, plugin); + } + +@@ -51,7 +46,7 @@ public void onStructureGrow(StructureGrowEvent event) + return; + + BiomeConfig biomeConfig = bukkitWorld.getSettings().biomeConfigs[biomeId]; +- Resource sapling; ++ SaplingGen sapling; + + switch (event.getSpecies()) + { +@@ -76,8 +71,25 @@ public void onStructureGrow(StructureGrowEvent event) + + if (sapling != null) + { +- treeGenerator.SpawnTree(bukkitWorld, this.random, sapling, x, y, z); +- event.getBlocks().clear(); ++ boolean success = false; ++ for(int i = 0; i < bukkitWorld.getSettings().objectSpawnRatio; i++) ++ { ++ if(sapling.growSapling(bukkitWorld, new Random(), x, y, z)) ++ { ++ success = true; ++ break; ++ } ++ } ++ ++ if(success) ++ { ++ // Just spawned the tree, clear the blocks list to prevent Bukkit spawning another tree ++ event.getBlocks().clear(); ++ } else ++ { ++ // Cannot grow, so leave the sapling there ++ event.setCancelled(true); ++ } + } + } + +diff --git a/bukkit/src/com/khorn/terraincontrol/bukkit/TCPlugin.java b/bukkit/src/com/khorn/terraincontrol/bukkit/TCPlugin.java +index 030e4dfb0..e309e4e64 100644 +--- a/bukkit/src/com/khorn/terraincontrol/bukkit/TCPlugin.java ++++ b/bukkit/src/com/khorn/terraincontrol/bukkit/TCPlugin.java +@@ -6,7 +6,7 @@ + import com.khorn.terraincontrol.bukkit.commands.TCCommandExecutor; + import com.khorn.terraincontrol.configuration.TCDefaultValues; + import com.khorn.terraincontrol.configuration.WorldConfig; +-import com.khorn.terraincontrol.customobjects.ObjectsStore; ++import com.khorn.terraincontrol.customobjects.BODefaultValues; + import com.khorn.terraincontrol.util.Txt; + import net.minecraft.server.BiomeBase; + import org.bukkit.Bukkit; +@@ -50,8 +50,6 @@ public void onEnable() + + this.listener = new TCListener(this); + +- ObjectsStore.ReadObjects(this.getDataFolder()); +- + Bukkit.getMessenger().registerOutgoingPluginChannel(this, TCDefaultValues.ChannelName.stringValue()); + + TerrainControl.log(""Enabled""); +@@ -185,4 +183,10 @@ public LocalWorld getWorld(String name) + } + return this.worlds.get(world.getUID()); + } ++ ++ @Override ++ public File getGlobalObjectsDirectory() ++ { ++ return new File(this.getDataFolder(), BODefaultValues.BO_GlobalDirectoryName.stringValue()); ++ } + } +\ No newline at end of file +diff --git a/bukkit/src/com/khorn/terraincontrol/bukkit/commands/ListCommand.java b/bukkit/src/com/khorn/terraincontrol/bukkit/commands/ListCommand.java +index c8efffcff..bce1ac038 100644 +--- a/bukkit/src/com/khorn/terraincontrol/bukkit/commands/ListCommand.java ++++ b/bukkit/src/com/khorn/terraincontrol/bukkit/commands/ListCommand.java +@@ -1,93 +1,91 @@ +-package com.khorn.terraincontrol.bukkit.commands; +- +-import com.khorn.terraincontrol.bukkit.BukkitWorld; +-import com.khorn.terraincontrol.bukkit.TCPerm; +-import com.khorn.terraincontrol.bukkit.TCPlugin; +-import com.khorn.terraincontrol.customobjects.CustomObject; +-import com.khorn.terraincontrol.customobjects.CustomObjectCompiled; +-import com.khorn.terraincontrol.customobjects.ObjectsStore; +-import org.bukkit.command.CommandSender; +- +-import java.util.ArrayList; +-import java.util.List; +- +-public class ListCommand extends BaseCommand +-{ +- public ListCommand(TCPlugin _plugin) +- { +- super(_plugin); +- name = ""list""; +- perm = TCPerm.CMD_LIST.node; +- usage = ""list [-w World] [page]""; +- workOnConsole = false; +- } +- +- @Override +- public boolean onCommand(CommandSender sender, List args) +- { +- +- int page = 1; +- +- if (args.size() > 1 && args.get(0).equals(""-w"")) +- { +- String worldName = args.get(1); +- if (args.size() > 2) +- { +- try +- { +- page = Integer.parseInt(args.get(2)); +- } catch (Exception e) +- { +- sender.sendMessage(ErrorColor + ""Wrong page number "" + args.get(2)); +- } +- } +- BukkitWorld world = this.getWorld(sender, worldName); +- +- if (world != null) +- { +- if (world.getSettings().CustomObjectsCompiled.size() == 0) +- sender.sendMessage(MessageColor + ""This world does not have custom objects""); +- +- List pluginList = new ArrayList(); +- for (CustomObjectCompiled object : world.getSettings().CustomObjectsCompiled) +- { +- pluginList.add(ValueColor + object.Name); +- } +- +- this.ListMessage(sender, pluginList, page, ""World bo2 objects""); +- +- } else +- sender.sendMessage(ErrorColor + ""World not found "" + worldName); +- return true; +- +- +- } +- if (args.size() > 0) +- { +- try +- { +- page = Integer.parseInt(args.get(0)); +- } catch (Exception e) +- { +- sender.sendMessage(ErrorColor + ""Wrong page number "" + args.get(0)); +- } +- } +- +- ArrayList globalObjects = ObjectsStore.LoadObjectsFromDirectory(ObjectsStore.GlobalDirectory); +- +- if (globalObjects.size() == 0) +- sender.sendMessage(MessageColor + ""This global directory does not have custom objects""); +- +- List pluginList = new ArrayList(); +- for (CustomObject object : globalObjects) +- { +- pluginList.add(ValueColor + object.Name); +- } +- +- this.ListMessage(sender, pluginList, page, ""Global bo2 objects""); +- +- +- return true; +- +- } ++package com.khorn.terraincontrol.bukkit.commands; ++ ++import com.khorn.terraincontrol.TerrainControl; ++import com.khorn.terraincontrol.bukkit.BukkitWorld; ++import com.khorn.terraincontrol.bukkit.TCPerm; ++import com.khorn.terraincontrol.bukkit.TCPlugin; ++import com.khorn.terraincontrol.customobjects.CustomObject; ++import org.bukkit.command.CommandSender; + -+ protected abstract Resource getResource(String name); ++import java.util.ArrayList; ++import java.util.Collection; ++import java.util.List; + -+ Class getClass(String name) throws NoSuchCommandException, NullPointerException { -+ if (name == null) { -+ throw new NullPointerException(""No null argument allowed""); ++public class ListCommand extends BaseCommand ++{ ++ public ListCommand(TCPlugin _plugin) ++ { ++ super(_plugin); ++ name = ""list""; ++ perm = TCPerm.CMD_LIST.node; ++ usage = ""list [-w World] [page]""; ++ workOnConsole = false; + } + -+ TimestampedObject> providerRef = loadClass(name); ++ @Override ++ public boolean onCommand(CommandSender sender, List args) ++ { ++ ++ int page = 1; + -+ // -+ Resource script = getResource(name); ++ if (args.size() > 1 && args.get(0).equals(""-w"")) ++ { ++ String worldName = args.get(1); ++ if (args.size() > 2) ++ { ++ try ++ { ++ page = Integer.parseInt(args.get(2)); ++ } catch (Exception e) ++ { ++ sender.sendMessage(ErrorColor + ""Wrong page number "" + args.get(2)); ++ } ++ } ++ BukkitWorld world = this.getWorld(sender, worldName); ++ ++ if (world != null) ++ { ++ if (world.getSettings().customObjects.size() == 0) ++ sender.sendMessage(MessageColor + ""This world does not have custom objects""); ++ ++ List pluginList = new ArrayList(); ++ for (CustomObject object : world.getSettings().customObjects.values()) ++ { ++ pluginList.add(ValueColor + object.getName()); ++ } ++ ++ this.ListMessage(sender, pluginList, page, ""World bo2 objects""); ++ ++ } else ++ sender.sendMessage(ErrorColor + ""World not found "" + worldName); ++ return true; + -+ // -+ if (script != null) { -+ if (providerRef != null) { -+ if (script.getTimestamp() != providerRef.getTimestamp()) { -+ providerRef = null; + } -+ } ++ if (args.size() > 0) ++ { ++ try ++ { ++ page = Integer.parseInt(args.get(0)); ++ } catch (Exception e) ++ { ++ sender.sendMessage(ErrorColor + ""Wrong page number "" + args.get(0)); ++ } ++ } + -+ // -+ if (providerRef == null) { ++ Collection globalObjects = TerrainControl.getCustomObjectManager().globalObjects.values(); + -+ // -+ String source; -+ try { -+ source = new String(script.getContent(), ""UTF-8""); ++ if (globalObjects.size() == 0) ++ sender.sendMessage(MessageColor + ""This global directory does not have custom objects""); ++ ++ List pluginList = new ArrayList(); ++ for (CustomObject object : globalObjects) ++ { ++ pluginList.add(ValueColor + object.getName()); + } -+ catch (UnsupportedEncodingException e) { -+ throw new NoSuchCommandException(name, ErrorType.INTERNAL, ""Could not compile command script "" + name, e); ++ ++ this.ListMessage(sender, pluginList, page, ""Global bo2 objects""); ++ ++ return true; ++ ++ } + } +\ No newline at end of file +diff --git a/bukkit/src/com/khorn/terraincontrol/bukkit/commands/SpawnCommand.java b/bukkit/src/com/khorn/terraincontrol/bukkit/commands/SpawnCommand.java +index c706d9558..6c3252e26 100644 +--- a/bukkit/src/com/khorn/terraincontrol/bukkit/commands/SpawnCommand.java ++++ b/bukkit/src/com/khorn/terraincontrol/bukkit/commands/SpawnCommand.java +@@ -1,97 +1,90 @@ +-package com.khorn.terraincontrol.bukkit.commands; +- +-import com.khorn.terraincontrol.bukkit.BukkitWorld; +-import com.khorn.terraincontrol.bukkit.TCPerm; +-import com.khorn.terraincontrol.bukkit.TCPlugin; +-import com.khorn.terraincontrol.customobjects.CustomObjectCompiled; +-import com.khorn.terraincontrol.customobjects.CustomObjectGen; +-import com.khorn.terraincontrol.customobjects.ObjectsStore; +-import org.bukkit.block.Block; +-import org.bukkit.command.CommandSender; +-import org.bukkit.entity.Player; +-import org.bukkit.util.BlockIterator; +- +-import java.util.Iterator; +-import java.util.List; +-import java.util.Random; +- +-public class SpawnCommand extends BaseCommand +-{ +- public SpawnCommand(TCPlugin _plugin) +- { +- super(_plugin); +- name = ""spawn""; +- perm = TCPerm.CMD_SPAWN.node; +- usage = ""spawn Name [World]""; +- workOnConsole = false; +- } +- +- @Override +- public boolean onCommand(CommandSender sender, List args) +- { +- Player me = (Player) sender; +- +- BukkitWorld bukkitWorld = this.getWorld(me, args.size() > 1 ? args.get(1) : """"); +- +- if (args.size() == 0) +- { +- me.sendMessage(ErrorColor + ""You must enter the name of the BO2.""); +- return true; +- } +- CustomObjectCompiled spawnObject = null; +- +- if (bukkitWorld != null) +- spawnObject = ObjectsStore.CompileString(args.get(0), bukkitWorld.getSettings().CustomObjectsDirectory); +- +- if (spawnObject == null) +- { +- me.sendMessage(BaseCommand.MessageColor + ""BO2 not found in world directory. Searching in global directory.""); +- spawnObject = ObjectsStore.CompileString(args.get(0), ObjectsStore.GlobalDirectory); +- } +- +- if (spawnObject == null) +- { +- sender.sendMessage(ErrorColor + ""BO2 not found, use '/tc list' to list the available ones.""); +- return true; +- } +- +- Block block = this.getWatchedBlock(me, true); +- if (block == null) +- return true; +- +- if (CustomObjectGen.GenerateCustomObject(bukkitWorld, new Random(), block.getX(), block.getY(), block.getZ(), spawnObject)) +- { +- me.sendMessage(BaseCommand.MessageColor + spawnObject.Name + "" was spawned.""); +- } else +- { +- me.sendMessage(BaseCommand.ErrorColor + ""BO2 cant be spawned over there.""); +- } +- +- return true; +- } +- +- public Block getWatchedBlock(Player me, boolean verbose) +- { +- if (me == null) +- return null; +- +- Block block; +- +- Iterator itr = new BlockIterator(me, 200); +- while (itr.hasNext()) +- { +- block = itr.next(); +- if (block.getTypeId() != 0) +- { +- return block; +- } +- } +- +- if (verbose) +- { +- me.sendMessage(ErrorColor + ""No block in sight.""); +- } +- +- return null; +- } ++package com.khorn.terraincontrol.bukkit.commands; ++ ++import com.khorn.terraincontrol.TerrainControl; ++import com.khorn.terraincontrol.bukkit.BukkitWorld; ++import com.khorn.terraincontrol.bukkit.TCPerm; ++import com.khorn.terraincontrol.bukkit.TCPlugin; ++import com.khorn.terraincontrol.customobjects.CustomObject; ++import org.bukkit.block.Block; ++import org.bukkit.command.CommandSender; ++import org.bukkit.entity.Player; ++import org.bukkit.util.BlockIterator; ++ ++import java.util.Iterator; ++import java.util.List; ++import java.util.Random; ++ ++public class SpawnCommand extends BaseCommand ++{ ++ public SpawnCommand(TCPlugin _plugin) ++ { ++ super(_plugin); ++ name = ""spawn""; ++ perm = TCPerm.CMD_SPAWN.node; ++ usage = ""spawn Name [World]""; ++ workOnConsole = false; ++ } ++ ++ @Override ++ public boolean onCommand(CommandSender sender, List args) ++ { ++ Player me = (Player) sender; ++ ++ BukkitWorld bukkitWorld = this.getWorld(me, args.size() > 1 ? args.get(1) : """"); ++ ++ if (args.size() == 0) ++ { ++ me.sendMessage(ErrorColor + ""You must enter the name of the BO2.""); ++ return true; + } ++ CustomObject spawnObject = null; + -+ // -+ Class clazz; -+ try { -+ GroovyCodeSource gcs = new GroovyCodeSource(source, name, ""/groovy/shell""); -+ GroovyClassLoader gcl = new GroovyClassLoader(context.getLoader(), config); -+ clazz = gcl.parseClass(gcs, false); ++ if (bukkitWorld != null) ++ spawnObject = TerrainControl.getCustomObjectManager().getObjectFromString(args.get(0), bukkitWorld); ++ ++ if (spawnObject == null) ++ { ++ sender.sendMessage(ErrorColor + ""Object not found, use '/tc list' to list the available ones.""); ++ return true; + } -+ catch (NoClassDefFoundError e) { -+ throw new NoSuchCommandException(name, ErrorType.INTERNAL, ""Could not compile command script "" + name, e); ++ ++ Block block = this.getWatchedBlock(me, true); ++ if (block == null) ++ return true; ++ ++ if (spawnObject.spawn(bukkitWorld, new Random(), block.getX(), block.getY(), block.getZ())) ++ { ++ me.sendMessage(BaseCommand.MessageColor + spawnObject.getName() + "" was spawned.""); ++ } else ++ { ++ me.sendMessage(BaseCommand.ErrorColor + ""BO2 cant be spawned over there.""); + } -+ catch (CompilationFailedException e) { -+ throw new NoSuchCommandException(name, ErrorType.INTERNAL, ""Could not compile command script "" + name, e); ++ ++ return true; ++ } ++ ++ public Block getWatchedBlock(Player me, boolean verbose) ++ { ++ if (me == null) ++ return null; ++ ++ Block block; ++ ++ Iterator itr = new BlockIterator(me, 200); ++ while (itr.hasNext()) ++ { ++ block = itr.next(); ++ if (block.getTypeId() != 0) ++ { ++ return block; ++ } + } + -+ // -+ if (baseClass.isAssignableFrom(clazz)) { -+ Class providerClass = clazz.asSubclass(baseClass); -+ providerRef = new TimestampedObject>(script.getTimestamp(), providerClass); -+ saveClass(name, providerRef); -+ } else { -+ throw new NoSuchCommandException(name, ErrorType.INTERNAL, ""Parsed script "" + clazz.getName() + -+ "" does not implements "" + CommandInvoker.class.getName()); ++ if (verbose) ++ { ++ me.sendMessage(ErrorColor + ""No block in sight.""); + } -+ } ++ ++ return null; + } + } +\ No newline at end of file +diff --git a/common/src/com/khorn/terraincontrol/TerrainControl.java b/common/src/com/khorn/terraincontrol/TerrainControl.java +index 0384414d4..5fa04436f 100644 +--- a/common/src/com/khorn/terraincontrol/TerrainControl.java ++++ b/common/src/com/khorn/terraincontrol/TerrainControl.java +@@ -1,10 +1,37 @@ + package com.khorn.terraincontrol; + ++import java.util.HashMap; ++import java.util.Map; + import java.util.logging.Level; + ++import com.khorn.terraincontrol.configuration.Resource; ++import com.khorn.terraincontrol.customobjects.CustomObject; ++import com.khorn.terraincontrol.customobjects.CustomObjectLoader; ++import com.khorn.terraincontrol.customobjects.CustomObjectManager; ++import com.khorn.terraincontrol.generator.resourcegens.ResourcesManager; + -+ // -+ if (providerRef == null) { -+ return null; + public class TerrainControl + { ++ /** ++ * The world height that the engine supports. Not the actual height the ++ * world is capped at. 256 in Minecraft. ++ */ ++ public static int worldHeight = 256; ++ ++ /** ++ * The world depth that the engine supports. Not the actual depth the world ++ * is capped at. 0 in Minecraft. ++ */ ++ public static int worldDepth = 0; ++ + private static TerrainControlEngine engine; ++ private static ResourcesManager resourcesManager; ++ private static CustomObjectManager customObjectManager; ++ ++ // Used before TerrainControl is initialized ++ private static Map customObjectLoaders = new HashMap(); ++ private static Map specialCustomObjects; ++ private static Map> resourceTypes; + + private TerrainControl() + { +@@ -12,7 +39,7 @@ private TerrainControl() + } + + /** +- * Starts the engine, making all API methods availible. ++ * Starts the engine, making all API methods available. + * + * @param engine + * The implementation of the engine. +@@ -24,6 +51,24 @@ public static void startEngine(TerrainControlEngine engine) + throw new UnsupportedOperationException(""Engine is already set!""); + } + TerrainControl.engine = engine; ++ ++ if (customObjectLoaders == null) ++ { ++ customObjectLoaders = new HashMap(); ++ } ++ if (specialCustomObjects == null) ++ { ++ specialCustomObjects = new HashMap(); ++ } ++ customObjectManager = new CustomObjectManager(customObjectLoaders, specialCustomObjects); ++ ++ if (resourceTypes == null) ++ { ++ resourceTypes = new HashMap>(); ++ } ++ ++ resourcesManager = new ResourcesManager(resourceTypes); ++ //resourcesManager.start(resourceTypes); + } + + /** +@@ -33,6 +78,12 @@ public static void startEngine(TerrainControlEngine engine) + public static void stopEngine() + { + engine = null; ++ customObjectManager = null; ++ resourcesManager = null; ++ ++ customObjectLoaders.clear(); ++ specialCustomObjects.clear(); ++ resourceTypes.clear(); + } + + /** +@@ -105,4 +156,75 @@ public static void log(Level level, String... messages) + { + engine.log(level, messages); + } ++ ++ /** ++ * Returns the CustomObject manager, with hooks to spawn CustomObjects. ++ * ++ * @return The CustomObject manager. ++ */ ++ public static CustomObjectManager getCustomObjectManager() ++ { ++ return customObjectManager; + } + -+ // -+ return providerRef.getObject(); -+ } ++ /** ++ * Returns the Resource manager. ++ * ++ * @return The Resource manager. ++ */ ++ public static ResourcesManager getResourcesManager() ++ { ++ return resourcesManager; ++ } + -+ T getInstance(String name) throws NoSuchCommandException, NullPointerException { -+ Class clazz = getClass(name); -+ if (clazz == null) { -+ return null; ++ /** ++ * Registers a CustomObject loader. Can be called before Terrain Control is ++ * fully loaded. ++ * ++ * @param extension ++ * The file extension, without a dot. ++ * @param loader ++ * The loader. ++ */ ++ public static void registerCustomObjectLoader(String extension, CustomObjectLoader loader) ++ { ++ if (customObjectLoaders == null) ++ { ++ customObjectLoaders = new HashMap(); ++ } ++ customObjectLoaders.put(extension.toLowerCase(), loader); + } + -+ // -+ try { -+ return clazz.newInstance(); ++ /** ++ * Registers a special CustomObject, like UseWorld or UseBiome or a tree. ++ * Can be called before Terrain Control is fully loaded. ++ * ++ * @param extension ++ * @param loader ++ */ ++ public static void registerSpecialCustomObject(String name, CustomObject object) ++ { ++ if (specialCustomObjects == null) ++ { ++ specialCustomObjects = new HashMap(); ++ } ++ specialCustomObjects.put(name.toLowerCase(), object); + } -+ catch (Exception e) { -+ throw new NoSuchCommandException(name, ErrorType.INTERNAL, ""Could not create command "" + name + "" instance"", e); ++ ++ /** ++ * Register a new Resource type. Can be called before Terrain Control is ++ * fully loaded. ++ * ++ * @param name ++ * @param resourceType ++ */ ++ public static void registerResourceType(String name, Class resourceType) ++ { ++ if (resourceTypes == null) ++ { ++ resourceTypes = new HashMap>(); ++ } ++ resourceTypes.put(name.toLowerCase(), resourceType); + } -+ } -+} -diff --git a/shell/core/src/main/java/org/crsh/shell/impl/command/CRaSH.java b/shell/core/src/main/java/org/crsh/shell/impl/command/CRaSH.java -index ce0d38949..35c96e519 100644 ---- a/shell/core/src/main/java/org/crsh/shell/impl/command/CRaSH.java -+++ b/shell/core/src/main/java/org/crsh/shell/impl/command/CRaSH.java -@@ -19,7 +19,6 @@ ++ + } +diff --git a/common/src/com/khorn/terraincontrol/TerrainControlEngine.java b/common/src/com/khorn/terraincontrol/TerrainControlEngine.java +index 118bda6a0..c3f5c54ed 100644 +--- a/common/src/com/khorn/terraincontrol/TerrainControlEngine.java ++++ b/common/src/com/khorn/terraincontrol/TerrainControlEngine.java +@@ -1,5 +1,6 @@ + package com.khorn.terraincontrol; - package org.crsh.shell.impl.command; ++import java.io.File; + import java.util.logging.Level; --import groovy.lang.Script; - import org.crsh.command.GroovyScript; - import org.crsh.command.GroovyScriptCommand; - import org.crsh.command.NoSuchCommandException; -@@ -33,10 +32,10 @@ public class CRaSH { + public interface TerrainControlEngine +@@ -9,11 +10,17 @@ public interface TerrainControlEngine + * @param name The name of the world. + * @return The world object. + */ +- public abstract LocalWorld getWorld(String name); ++ public LocalWorld getWorld(String name); + + /** + * Logs the messages. + * @param message The messages to log. + */ +- public abstract void log(Level level, String... message); ++ public void log(Level level, String... message); ++ ++ /** ++ * Returns the folder where the global objects are stored in. ++ * @return ++ */ ++ public File getGlobalObjectsDirectory(); + } +diff --git a/common/src/com/khorn/terraincontrol/configuration/BiomeConfig.java b/common/src/com/khorn/terraincontrol/configuration/BiomeConfig.java +index b72c0087e..0d4d81fb3 100644 +--- a/common/src/com/khorn/terraincontrol/configuration/BiomeConfig.java ++++ b/common/src/com/khorn/terraincontrol/configuration/BiomeConfig.java +@@ -4,11 +4,26 @@ + import com.khorn.terraincontrol.DefaultMaterial; + import com.khorn.terraincontrol.DefaultMobType; + import com.khorn.terraincontrol.LocalBiome; +-import com.khorn.terraincontrol.customobjects.BODefaultValues; +-import com.khorn.terraincontrol.customobjects.CustomObjectCompiled; +-import com.khorn.terraincontrol.customobjects.ObjectsStore; ++import com.khorn.terraincontrol.LocalWorld; ++import com.khorn.terraincontrol.TerrainControl; ++import com.khorn.terraincontrol.customobjects.CustomObject; ++import com.khorn.terraincontrol.generator.resourcegens.AboveWaterGen; ++import com.khorn.terraincontrol.generator.resourcegens.CactusGen; ++import com.khorn.terraincontrol.generator.resourcegens.CustomObjectGen; ++import com.khorn.terraincontrol.generator.resourcegens.DungeonGen; ++import com.khorn.terraincontrol.generator.resourcegens.GrassGen; ++import com.khorn.terraincontrol.generator.resourcegens.LiquidGen; ++import com.khorn.terraincontrol.generator.resourcegens.OreGen; ++import com.khorn.terraincontrol.generator.resourcegens.PlantGen; ++import com.khorn.terraincontrol.generator.resourcegens.ReedGen; + import com.khorn.terraincontrol.generator.resourcegens.ResourceType; ++import com.khorn.terraincontrol.generator.resourcegens.SaplingGen; ++import com.khorn.terraincontrol.generator.resourcegens.SmallLakeGen; ++import com.khorn.terraincontrol.generator.resourcegens.TreeGen; + import com.khorn.terraincontrol.generator.resourcegens.TreeType; ++import com.khorn.terraincontrol.generator.resourcegens.UnderWaterOreGen; ++import com.khorn.terraincontrol.generator.resourcegens.UndergroundLakeGen; ++import com.khorn.terraincontrol.generator.resourcegens.VinesGen; + import com.khorn.terraincontrol.util.Txt; + import java.io.DataInputStream; +@@ -16,6 +31,7 @@ + import java.io.File; + import java.io.IOException; + import java.util.ArrayList; ++import java.util.Arrays; + import java.util.List; + import java.util.Map; - /** . */ -- final ClassManager commandManager; -+ final AbstractClassManager commandManager; +@@ -33,7 +49,7 @@ public class BiomeConfig extends ConfigFile + public ArrayList IsleInBiome; + public ArrayList NotBorderNear; - /** . */ -- final ClassManager scriptManager; -+ final AbstractClassManager scriptManager; +- //Surface config ++ // Surface config + public float BiomeHeight; + public float BiomeVolatility; - /** . */ - final PluginContext context; -@@ -55,7 +54,7 @@ public CRaSH(PluginContext context) throws NullPointerException { - ); - } +@@ -47,7 +63,6 @@ public class BiomeConfig extends ConfigFile -- public CRaSH(PluginContext context, ClassManager commandManager, ClassManager scriptManager) { -+ public CRaSH(PluginContext context, AbstractClassManager commandManager, AbstractClassManager scriptManager) { - this.context = context; - this.commandManager = commandManager; - this.scriptManager = scriptManager; -diff --git a/shell/core/src/main/java/org/crsh/shell/impl/command/ClassManager.java b/shell/core/src/main/java/org/crsh/shell/impl/command/ClassManager.java -index 1369cee1d..ddf3cfaea 100644 ---- a/shell/core/src/main/java/org/crsh/shell/impl/command/ClassManager.java -+++ b/shell/core/src/main/java/org/crsh/shell/impl/command/ClassManager.java -@@ -19,25 +19,16 @@ - - package org.crsh.shell.impl.command; - --import groovy.lang.GroovyClassLoader; --import groovy.lang.GroovyCodeSource; - import groovy.lang.Script; --import org.codehaus.groovy.control.CompilationFailedException; --import org.codehaus.groovy.control.CompilerConfiguration; --import org.crsh.command.CommandInvoker; --import org.crsh.command.GroovyScriptCommand; --import org.crsh.command.NoSuchCommandException; - import org.crsh.plugin.PluginContext; - import org.crsh.plugin.ResourceKind; --import org.crsh.shell.ErrorType; - import org.crsh.util.TimestampedObject; - import org.crsh.vfs.Resource; - --import java.io.UnsupportedEncodingException; - import java.util.Map; - import java.util.concurrent.ConcurrentHashMap; + public String ReplaceBiomeName; --class ClassManager { -+public class ClassManager extends AbstractClassManager { +- + public boolean UseWorldWaterLevel; + public int waterLevelMax; + public int waterLevelMin; +@@ -63,10 +78,11 @@ public class BiomeConfig extends ConfigFile + public boolean FoliageColorIsMultiplier; - /** . */ - private final Map>> classes = new ConcurrentHashMap>>(); -@@ -45,104 +36,29 @@ class ClassManager { - /** . */ - private final PluginContext context; + public Resource[] ResourceSequence = new Resource[256]; +- public Resource[] SaplingTypes = new Resource[4]; +- public Resource SaplingResource = null; ++ public SaplingGen[] SaplingTypes = new SaplingGen[4]; ++ public SaplingGen SaplingResource = null; -- /** . */ -- private final CompilerConfiguration config; -- -- /** . */ -- private final Class baseClass; -- - /** . */ - private final ResourceKind kind; +- public ArrayList CustomObjectsCompiled; ++ public ArrayList biomeObjects; ++ public ArrayList biomeObjectStrings; -- ClassManager(PluginContext context, ResourceKind kind, Class baseClass, Class baseScriptClass) { -- CompilerConfiguration config = new CompilerConfiguration(); -- config.setRecompileGroovySource(true); -- config.setScriptBaseClass(baseScriptClass.getName()); -+ public ClassManager(PluginContext context, ResourceKind kind, Class baseClass, Class baseScriptClass) { -+ super(context, baseClass, baseScriptClass); + public double maxAverageHeight; + public double maxAverageDepth; +@@ -83,13 +99,12 @@ public class BiomeConfig extends ConfigFile - // - this.context = context; -- this.config = config; -- this.baseClass = baseClass; - this.kind = kind; - } + public int ResourceCount = 0; -- Class getClass(String name) throws NoSuchCommandException, NullPointerException { -- if (name == null) { -- throw new NullPointerException(""No null argument allowed""); -- } - -- TimestampedObject> providerRef = classes.get(name); -- -- // -- Resource script = context.loadResource(name, kind); -- -- // -- if (script != null) { -- if (providerRef != null) { -- if (script.getTimestamp() != providerRef.getTimestamp()) { -- providerRef = null; -- } -- } -- -- // -- if (providerRef == null) { -- -- // -- String source; -- try { -- source = new String(script.getContent(), ""UTF-8""); -- } -- catch (UnsupportedEncodingException e) { -- throw new NoSuchCommandException(name, ErrorType.INTERNAL, ""Could not compile command script "" + name, e); -- } + public LocalBiome Biome; + + public WorldConfig worldConfig; + public String Name; + +- //Spawn Config ++ // Spawn Config + public boolean spawnMonstersAddDefaults = true; + public List spawnMonsters = new ArrayList(); + public boolean spawnCreaturesAddDefaults = true; +@@ -124,12 +139,10 @@ public BiomeConfig(File settingsDir, LocalBiome biome, WorldConfig config) + this.iceBlock = worldConfig.iceBlock; + } + - -- // -- Class clazz; -- try { -- GroovyCodeSource gcs = new GroovyCodeSource(source, name, ""/groovy/shell""); -- GroovyClassLoader gcl = new GroovyClassLoader(context.getLoader(), config); -- clazz = gcl.parseClass(gcs, false); -- } -- catch (NoClassDefFoundError e) { -- throw new NoSuchCommandException(name, ErrorType.INTERNAL, ""Could not compile command script "" + name, e); -- } -- catch (CompilationFailedException e) { -- throw new NoSuchCommandException(name, ErrorType.INTERNAL, ""Could not compile command script "" + name, e); -- } + if (biome.isCustom()) + biome.setVisuals(this); + } + - -- // -- if (baseClass.isAssignableFrom(clazz)) { -- Class providerClass = clazz.asSubclass(baseClass); -- providerRef = new TimestampedObject>(script.getTimestamp(), providerClass); -- classes.put(name, providerRef); -- } else { -- throw new NoSuchCommandException(name, ErrorType.INTERNAL, ""Parsed script "" + clazz.getName() + -- "" does not implements "" + CommandInvoker.class.getName()); -- } -- } -- } + public int getTemperature() + { + return (int) (this.BiomeTemperature * 65536.0F); +@@ -144,199 +157,193 @@ private void CreateDefaultResources() + { + Resource resource; + +- //Small lakes +- resource = new Resource(ResourceType.SmallLake, DefaultMaterial.WATER.id, TCDefaultValues.SmallLakeWaterFrequency.intValue(), TCDefaultValues.SmallLakeWaterRarity.intValue(), TCDefaultValues.SmallLakeMinAltitude.intValue(), TCDefaultValues.SmallLakeMaxAltitude.intValue()); ++ // Small lakes ++ resource = Resource.create(worldConfig, SmallLakeGen.class, DefaultMaterial.WATER.id, TCDefaultValues.SmallLakeWaterFrequency.intValue(), TCDefaultValues.SmallLakeWaterRarity.intValue(), TCDefaultValues.SmallLakeMinAltitude.intValue(), TCDefaultValues.SmallLakeMaxAltitude.intValue()); + this.ResourceSequence[this.ResourceCount++] = resource; + +- //Small lakes +- resource = new Resource(ResourceType.SmallLake, DefaultMaterial.LAVA.id, TCDefaultValues.SmallLakeLavaFrequency.intValue(), TCDefaultValues.SmallLakeLavaRarity.intValue(), TCDefaultValues.SmallLakeMinAltitude.intValue(), TCDefaultValues.SmallLakeMaxAltitude.intValue()); ++ // Small lakes ++ resource = Resource.create(worldConfig, SmallLakeGen.class, DefaultMaterial.LAVA.id, TCDefaultValues.SmallLakeLavaFrequency.intValue(), TCDefaultValues.SmallLakeLavaRarity.intValue(), TCDefaultValues.SmallLakeMinAltitude.intValue(), TCDefaultValues.SmallLakeMaxAltitude.intValue()); + this.ResourceSequence[this.ResourceCount++] = resource; + +- //Underground lakes +- resource = new Resource(ResourceType.UnderGroundLake, TCDefaultValues.undergroundLakeMinSize.intValue(), TCDefaultValues.undergroundLakeMaxSize.intValue(), TCDefaultValues.undergroundLakeFrequency.intValue(), TCDefaultValues.undergroundLakeRarity.intValue(), TCDefaultValues.undergroundLakeMinAltitude.intValue(), TCDefaultValues.undergroundLakeMaxAltitude.intValue()); ++ // Underground lakes ++ resource = Resource.create(worldConfig, UndergroundLakeGen.class, TCDefaultValues.undergroundLakeMinSize.intValue(), TCDefaultValues.undergroundLakeMaxSize.intValue(), TCDefaultValues.undergroundLakeFrequency.intValue(), TCDefaultValues.undergroundLakeRarity.intValue(), TCDefaultValues.undergroundLakeMinAltitude.intValue(), TCDefaultValues.undergroundLakeMaxAltitude.intValue()); + this.ResourceSequence[this.ResourceCount++] = resource; + +- //Dungeon +- resource = new Resource(ResourceType.Dungeon, 0, 0, 0, TCDefaultValues.dungeonFrequency.intValue(), TCDefaultValues.dungeonRarity.intValue(), TCDefaultValues.dungeonMinAltitude.intValue(), this.worldConfig.WorldHeight, new int[]{DefaultMaterial.STONE.id}); ++ // Dungeon ++ resource = Resource.create(worldConfig, DungeonGen.class, TCDefaultValues.dungeonFrequency.intValue(), TCDefaultValues.dungeonRarity.intValue(), TCDefaultValues.dungeonMinAltitude.intValue(), this.worldConfig.WorldHeight); + this.ResourceSequence[this.ResourceCount++] = resource; + +- //Resource(ResourceType type,int blockId, int blockData, int size,int frequency, int rarity, int minAltitude,int maxAltitude,int[] sourceBlockIds) +- //Dirt +- resource = new Resource(ResourceType.Ore, DefaultMaterial.DIRT.id, 0, TCDefaultValues.dirtDepositSize.intValue(), TCDefaultValues.dirtDepositFrequency.intValue(), TCDefaultValues.dirtDepositRarity.intValue(), TCDefaultValues.dirtDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, new int[]{DefaultMaterial.STONE.id}); ++ // Dirt ++ resource = Resource.create(worldConfig, OreGen.class, DefaultMaterial.DIRT.id, TCDefaultValues.dirtDepositSize.intValue(), TCDefaultValues.dirtDepositFrequency.intValue(), TCDefaultValues.dirtDepositRarity.intValue(), TCDefaultValues.dirtDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, DefaultMaterial.STONE.id); + this.ResourceSequence[this.ResourceCount++] = resource; + +- //Gravel +- resource = new Resource(ResourceType.Ore, DefaultMaterial.GRAVEL.id, 0, TCDefaultValues.gravelDepositSize.intValue(), TCDefaultValues.gravelDepositFrequency.intValue(), TCDefaultValues.gravelDepositRarity.intValue(), TCDefaultValues.gravelDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, new int[]{DefaultMaterial.STONE.id}); ++ // Gravel ++ resource = Resource.create(worldConfig, OreGen.class, DefaultMaterial.GRAVEL.id, TCDefaultValues.gravelDepositSize.intValue(), TCDefaultValues.gravelDepositFrequency.intValue(), TCDefaultValues.gravelDepositRarity.intValue(), TCDefaultValues.gravelDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, DefaultMaterial.STONE.id); + this.ResourceSequence[this.ResourceCount++] = resource; + +- //Clay +- resource = new Resource(ResourceType.Ore, DefaultMaterial.CLAY.id, 0, TCDefaultValues.clayDepositSize.intValue(), TCDefaultValues.clayDepositFrequency.intValue(), TCDefaultValues.clayDepositRarity.intValue(), TCDefaultValues.clayDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, new int[]{DefaultMaterial.STONE.id}); ++ // Clay ++ resource = Resource.create(worldConfig, OreGen.class, DefaultMaterial.CLAY.id, TCDefaultValues.clayDepositSize.intValue(), TCDefaultValues.clayDepositFrequency.intValue(), TCDefaultValues.clayDepositRarity.intValue(), TCDefaultValues.clayDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, DefaultMaterial.STONE.id); + this.ResourceSequence[this.ResourceCount++] = resource; + +- //Coal +- resource = new Resource(ResourceType.Ore, DefaultMaterial.COAL_ORE.id, 0, TCDefaultValues.coalDepositSize.intValue(), TCDefaultValues.coalDepositFrequency.intValue(), TCDefaultValues.coalDepositRarity.intValue(), TCDefaultValues.coalDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, new int[]{DefaultMaterial.STONE.id}); ++ // Coal ++ resource = Resource.create(worldConfig, OreGen.class, DefaultMaterial.COAL_ORE.id, TCDefaultValues.coalDepositSize.intValue(), TCDefaultValues.coalDepositFrequency.intValue(), TCDefaultValues.coalDepositRarity.intValue(), TCDefaultValues.coalDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, DefaultMaterial.STONE.id); + this.ResourceSequence[this.ResourceCount++] = resource; + +- //Iron +- resource = new Resource(ResourceType.Ore, DefaultMaterial.IRON_ORE.id, 0, TCDefaultValues.ironDepositSize.intValue(), TCDefaultValues.ironDepositFrequency.intValue(), TCDefaultValues.ironDepositRarity.intValue(), TCDefaultValues.ironDepositMinAltitude.intValue(), this.worldConfig.WorldHeight / 2, new int[]{DefaultMaterial.STONE.id}); ++ // Iron ++ resource = Resource.create(worldConfig, OreGen.class, DefaultMaterial.IRON_ORE.id, TCDefaultValues.ironDepositSize.intValue(), TCDefaultValues.ironDepositFrequency.intValue(), TCDefaultValues.ironDepositRarity.intValue(), TCDefaultValues.ironDepositMinAltitude.intValue(), this.worldConfig.WorldHeight / 2, DefaultMaterial.STONE.id); + this.ResourceSequence[this.ResourceCount++] = resource; + +- //Gold +- resource = new Resource(ResourceType.Ore, DefaultMaterial.GOLD_ORE.id, 0, TCDefaultValues.goldDepositSize.intValue(), TCDefaultValues.goldDepositFrequency.intValue(), TCDefaultValues.goldDepositRarity.intValue(), TCDefaultValues.goldDepositMinAltitude.intValue(), this.worldConfig.WorldHeight / 4, new int[]{DefaultMaterial.STONE.id}); ++ // Gold ++ resource = Resource.create(worldConfig, OreGen.class, DefaultMaterial.GOLD_ORE.id, TCDefaultValues.goldDepositSize.intValue(), TCDefaultValues.goldDepositFrequency.intValue(), TCDefaultValues.goldDepositRarity.intValue(), TCDefaultValues.goldDepositMinAltitude.intValue(), this.worldConfig.WorldHeight / 4, DefaultMaterial.STONE.id); + this.ResourceSequence[this.ResourceCount++] = resource; + +- //Redstone +- resource = new Resource(ResourceType.Ore, DefaultMaterial.REDSTONE_ORE.id, 0, TCDefaultValues.redstoneDepositSize.intValue(), TCDefaultValues.redstoneDepositFrequency.intValue(), TCDefaultValues.redstoneDepositRarity.intValue(), TCDefaultValues.redstoneDepositMinAltitude.intValue(), this.worldConfig.WorldHeight / 8, new int[]{DefaultMaterial.STONE.id}); ++ // Redstone ++ resource = Resource.create(worldConfig, OreGen.class, DefaultMaterial.REDSTONE_ORE.id, TCDefaultValues.redstoneDepositSize.intValue(), TCDefaultValues.redstoneDepositFrequency.intValue(), TCDefaultValues.redstoneDepositRarity.intValue(), TCDefaultValues.redstoneDepositMinAltitude.intValue(), this.worldConfig.WorldHeight / 8, DefaultMaterial.STONE.id); + this.ResourceSequence[this.ResourceCount++] = resource; + +- //Diamond +- resource = new Resource(ResourceType.Ore, DefaultMaterial.DIAMOND_ORE.id, 0, TCDefaultValues.diamondDepositSize.intValue(), TCDefaultValues.diamondDepositFrequency.intValue(), TCDefaultValues.diamondDepositRarity.intValue(), TCDefaultValues.diamondDepositMinAltitude.intValue(), this.worldConfig.WorldHeight / 8, new int[]{DefaultMaterial.STONE.id}); ++ // Diamond ++ resource = Resource.create(worldConfig, OreGen.class, DefaultMaterial.DIAMOND_ORE.id, TCDefaultValues.diamondDepositSize.intValue(), TCDefaultValues.diamondDepositFrequency.intValue(), TCDefaultValues.diamondDepositRarity.intValue(), TCDefaultValues.diamondDepositMinAltitude.intValue(), this.worldConfig.WorldHeight / 8, DefaultMaterial.STONE.id); + this.ResourceSequence[this.ResourceCount++] = resource; + +- //Lapislazuli +- resource = new Resource(ResourceType.Ore, DefaultMaterial.LAPIS_ORE.id, 0, TCDefaultValues.lapislazuliDepositSize.intValue(), TCDefaultValues.lapislazuliDepositFrequency.intValue(), TCDefaultValues.lapislazuliDepositRarity.intValue(), TCDefaultValues.lapislazuliDepositMinAltitude.intValue(), this.worldConfig.WorldHeight / 8, new int[]{DefaultMaterial.STONE.id}); ++ // Lapislazuli ++ resource = Resource.create(worldConfig, OreGen.class, DefaultMaterial.LAPIS_ORE.id, TCDefaultValues.lapislazuliDepositSize.intValue(), TCDefaultValues.lapislazuliDepositFrequency.intValue(), TCDefaultValues.lapislazuliDepositRarity.intValue(), TCDefaultValues.lapislazuliDepositMinAltitude.intValue(), this.worldConfig.WorldHeight / 8, DefaultMaterial.STONE.id); + this.ResourceSequence[this.ResourceCount++] = resource; + + DefaultBiome biome = DefaultBiome.getBiome(this.Biome.getId()); + + if (biome != null && (biome == DefaultBiome.EXTREME_HILLS || biome == DefaultBiome.SMALL_MOUNTAINS)) + { +- resource = new Resource(ResourceType.Ore, DefaultMaterial.EMERALD_ORE.id, 0, TCDefaultValues.emeraldDepositSize.intValue(), TCDefaultValues.emeraldDepositFrequency.intValue(), TCDefaultValues.emeraldDepositRarity.intValue(), TCDefaultValues.emeraldDepositMinAltitude.intValue(), this.worldConfig.WorldHeight / 4, new int[]{DefaultMaterial.STONE.id}); ++ resource = Resource.create(worldConfig, OreGen.class, DefaultMaterial.EMERALD_ORE.id, TCDefaultValues.emeraldDepositSize.intValue(), TCDefaultValues.emeraldDepositFrequency.intValue(), TCDefaultValues.emeraldDepositRarity.intValue(), TCDefaultValues.emeraldDepositMinAltitude.intValue(), this.worldConfig.WorldHeight / 4, DefaultMaterial.STONE.id); + this.ResourceSequence[this.ResourceCount++] = resource; + } + +- //Under water sand +- resource = new Resource(ResourceType.UnderWaterOre, DefaultMaterial.SAND.id, 0, TCDefaultValues.waterSandDepositSize.intValue(), TCDefaultValues.waterSandDepositFrequency.intValue(), TCDefaultValues.waterSandDepositRarity.intValue(), 0, 0, new int[]{DefaultMaterial.DIRT.id, DefaultMaterial.GRASS.id}); ++ // Under water sand ++ resource = Resource.create(worldConfig, UnderWaterOreGen.class, DefaultMaterial.SAND.id, TCDefaultValues.waterSandDepositSize.intValue(), TCDefaultValues.waterSandDepositFrequency.intValue(), TCDefaultValues.waterSandDepositRarity.intValue(), DefaultMaterial.DIRT.id, DefaultMaterial.GRASS.id); + this.ResourceSequence[this.ResourceCount++] = resource; + +- //Under water clay ++ // Under water clay + if (this.DefaultClay > 0) + { +- resource = new Resource(ResourceType.UnderWaterOre, DefaultMaterial.CLAY.id, 0, TCDefaultValues.waterClayDepositSize.intValue(), this.DefaultClay, TCDefaultValues.waterClayDepositRarity.intValue(), 0, 0, new int[]{DefaultMaterial.DIRT.id, DefaultMaterial.CLAY.id}); ++ resource = Resource.create(worldConfig, UnderWaterOreGen.class, DefaultMaterial.CLAY.id, TCDefaultValues.waterClayDepositSize.intValue(), this.DefaultClay, TCDefaultValues.waterClayDepositRarity.intValue(), DefaultMaterial.DIRT.id, DefaultMaterial.CLAY.id); + this.ResourceSequence[this.ResourceCount++] = resource; + } +- //Custom objects +- resource = new Resource(ResourceType.CustomObject); +- ResourceType.CustomObject.Generator.ReadFromString(resource, new String[]{BODefaultValues.BO_Use_World.stringValue()}, this); ++ // Custom objects ++ resource = Resource.create(worldConfig, CustomObjectGen.class, ""UseWorld""); + this.ResourceSequence[this.ResourceCount++] = resource; + - -- // -- if (providerRef == null) { -- return null; -- } ++ // Trees + if (biome != null) + switch (biome) + { +- case OCEAN: // Ocean - default +- case EXTREME_HILLS: // BigHills - default +- case RIVER: // River - default +- case SMALL_MOUNTAINS: // SmallHills +- resource = new Resource(ResourceType.Tree, this.DefaultTrees, new TreeType[]{TreeType.BigTree, TreeType.Tree}, new int[]{1, 9}); +- this.ResourceSequence[this.ResourceCount++] = resource; +- break; +- case PLAINS: // Plains - no tree +- case DESERT: // Desert - no tree +- case DESERT_HILLS: //HillsDesert +- break; +- case FOREST_HILLS: // HillsForest +- case FOREST: // Forest - forest +- resource = new Resource(ResourceType.Tree, this.DefaultTrees, new TreeType[]{TreeType.Forest, TreeType.BigTree, TreeType.Tree}, new int[]{20, 10, 100}); +- this.ResourceSequence[this.ResourceCount++] = resource; +- break; +- case TAIGA_HILLS: //HillsTaiga +- case TAIGA: // Taiga - taiga +- resource = new Resource(ResourceType.Tree, this.DefaultTrees, new TreeType[]{TreeType.Taiga1, TreeType.Taiga2}, new int[]{35, 100}); +- this.ResourceSequence[this.ResourceCount++] = resource; +- break; +- case SWAMPLAND: // Swamp - swamp +- resource = new Resource(ResourceType.Tree, this.DefaultTrees, new TreeType[]{TreeType.SwampTree}, new int[]{100}); +- this.ResourceSequence[this.ResourceCount++] = resource; +- break; +- case MUSHROOM_ISLAND: // Mushroom island +- resource = new Resource(ResourceType.Tree, this.DefaultTrees, new TreeType[]{TreeType.HugeMushroom}, new int[]{100}); +- this.ResourceSequence[this.ResourceCount++] = resource; +- break; +- case JUNGLE:// Jungle +- case JUNGLE_HILLS: +- resource = new Resource(ResourceType.Tree, this.DefaultTrees, new TreeType[]{TreeType.BigTree, TreeType.GroundBush, TreeType.JungleTree, TreeType.CocoaTree}, new int[]{10, 50, 35, 100}); +- this.ResourceSequence[this.ResourceCount++] = resource; +- break; - -- // -- return providerRef.getObject(); -+ @Override -+ protected TimestampedObject> loadClass(String name) { -+ return classes.get(name); - } ++ case OCEAN: // Ocean - default ++ case EXTREME_HILLS: // BigHills - default ++ case RIVER: // River - default ++ case SMALL_MOUNTAINS: // SmallHills ++ resource = Resource.create(worldConfig, TreeGen.class, this.DefaultTrees, TreeType.BigTree, 1, TreeType.Tree, 9); ++ this.ResourceSequence[this.ResourceCount++] = resource; ++ break; ++ case PLAINS: // Plains - no tree ++ case DESERT: // Desert - no tree ++ case DESERT_HILLS: // HillsDesert ++ break; ++ case FOREST_HILLS: // HillsForest ++ case FOREST: // Forest - forest ++ resource = Resource.create(worldConfig, TreeGen.class, this.DefaultTrees, TreeType.Forest, 20, TreeType.BigTree, 10, TreeType.Tree, 100); ++ this.ResourceSequence[this.ResourceCount++] = resource; ++ break; ++ case TAIGA_HILLS: // HillsTaiga ++ case TAIGA: // Taiga - taiga ++ resource = Resource.create(worldConfig, TreeGen.class, this.DefaultTrees, TreeType.Taiga1, 35, TreeType.Taiga2, 100); ++ this.ResourceSequence[this.ResourceCount++] = resource; ++ break; ++ case SWAMPLAND: // Swamp - swamp ++ resource = Resource.create(worldConfig, TreeGen.class, this.DefaultTrees, TreeType.SwampTree, 100); ++ this.ResourceSequence[this.ResourceCount++] = resource; ++ break; ++ case MUSHROOM_ISLAND: // Mushroom island ++ resource = Resource.create(worldConfig, TreeGen.class, this.DefaultTrees, TreeType.HugeMushroom, 100); ++ this.ResourceSequence[this.ResourceCount++] = resource; ++ break; ++ case JUNGLE:// Jungle ++ case JUNGLE_HILLS: ++ resource = Resource.create(worldConfig, TreeGen.class, this.DefaultTrees, TreeType.BigTree, 10, TreeType.GroundBush, 50, TreeType.JungleTree, 35, TreeType.CocoaTree, 100); ++ this.ResourceSequence[this.ResourceCount++] = resource; ++ break; -- T getInstance(String name) throws NoSuchCommandException, NullPointerException { -- Class clazz = getClass(name); -- if (clazz == null) { -- return null; -- } -+ @Override -+ protected void saveClass(String name, TimestampedObject> clazz) { -+ classes.put(name, clazz); -+ } + } + if (this.DefaultWaterLily > 0) + { +- resource = new Resource(ResourceType.AboveWaterRes, DefaultMaterial.WATER_LILY.id, 0, 0, this.DefaultWaterLily, 100, 0, 0, new int[0]); ++ resource = Resource.create(worldConfig, AboveWaterGen.class, DefaultMaterial.WATER_LILY.id, this.DefaultWaterLily, 100); + this.ResourceSequence[this.ResourceCount++] = resource; + } -- // -- try { -- return clazz.newInstance(); -- } -- catch (Exception e) { -- throw new NoSuchCommandException(name, ErrorType.INTERNAL, ""Could not create command "" + name + "" instance"", e); -- } -+ @Override -+ protected Resource getResource(String name) { -+ return context.loadResource(name, kind); - } - }" -5f232224c4de92467c4de6385590ec19e0568ba5,Mylyn Reviews,"322734: Display just the last review result for a task -",p,https://github.com/eclipse-mylyn/org.eclipse.mylyn.reviews,"diff --git a/org.eclipse.mylyn.reviews.core/src/org/eclipse/mylyn/reviews/core/ReviewsUtil.java b/org.eclipse.mylyn.reviews.core/src/org/eclipse/mylyn/reviews/core/ReviewsUtil.java -index d1a7e91b..9cd94ab5 100644 ---- a/org.eclipse.mylyn.reviews.core/src/org/eclipse/mylyn/reviews/core/ReviewsUtil.java -+++ b/org.eclipse.mylyn.reviews.core/src/org/eclipse/mylyn/reviews/core/ReviewsUtil.java -@@ -148,12 +148,32 @@ public static List getReviewAttachmentFromTask( - List reviews = new ArrayList(); - TaskData taskData = taskDataManager.getTaskData(task); - if (taskData != null) { -- for (TaskAttribute attribute : getReviewAttachments( -- repositoryModel, taskData)) { -- reviews.addAll(parseAttachments(attribute, -- new NullProgressMonitor())); -+ List attributesByType = taskData -+ .getAttributeMapper().getAttributesByType(taskData, -+ TaskAttribute.TYPE_ATTACHMENT); -+ ITaskAttachment lastReview = null; -+ -+ for (TaskAttribute attribute : attributesByType) { -+ // TODO move RepositoryModel.createTaskAttachment to interface? -+ ITaskAttachment taskAttachment = ((RepositoryModel) repositoryModel) -+ .createTaskAttachment(attribute); -+ if (taskAttachment != null -+ && taskAttachment.getFileName().equals( -+ ReviewConstants.REVIEW_DATA_CONTAINER)) { -+ -+ if (lastReview == null -+ || lastReview.getCreationDate().before( -+ taskAttachment.getCreationDate())) { -+ lastReview = taskAttachment; -+ } -+ } - - } -+ -+ if (lastReview != null) { -+ reviews.addAll(parseAttachments(lastReview.getTaskAttribute(), -+ new NullProgressMonitor())); -+ } - - } - return reviews; -@@ -203,4 +223,5 @@ public static void markAsReview(ITask task) { - public static boolean hasReviewMarker(ITask task) { - return task.getAttribute(ReviewConstants.ATTR_REVIEW_FLAG) != null; - } -+ - }" -01e706466e561557d8591b3031cd85ae39b0559a,intellij-community,gradle: correctly set TestModuleProperties for- modules containing '-' in names (IDEA-151590)--,c,https://github.com/JetBrains/intellij-community,"diff --git a/platform/external-system-api/src/com/intellij/openapi/externalSystem/model/project/ModuleData.java b/platform/external-system-api/src/com/intellij/openapi/externalSystem/model/project/ModuleData.java -index 4179681ab59ba..c2c06d2180435 100644 ---- a/platform/external-system-api/src/com/intellij/openapi/externalSystem/model/project/ModuleData.java -+++ b/platform/external-system-api/src/com/intellij/openapi/externalSystem/model/project/ModuleData.java -@@ -3,7 +3,6 @@ - import com.intellij.ide.highlighter.ModuleFileType; - import com.intellij.openapi.externalSystem.model.ProjectSystemId; - import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil; --import com.intellij.openapi.util.io.FileUtil; - import com.intellij.util.containers.ContainerUtil; - import org.jetbrains.annotations.NotNull; - import org.jetbrains.annotations.Nullable; -@@ -58,7 +57,7 @@ protected ModuleData(@NotNull String id, - @NotNull String internalName, - @NotNull String moduleFileDirectoryPath, - @NotNull String externalConfigPath) { -- super(owner, externalName, FileUtil.sanitizeFileName(internalName)); -+ super(owner, externalName, internalName); - myId = id; - myModuleTypeId = typeId; - myExternalConfigPath = externalConfigPath; -diff --git a/plugins/gradle/src/org/jetbrains/plugins/gradle/service/project/BaseGradleProjectResolverExtension.java b/plugins/gradle/src/org/jetbrains/plugins/gradle/service/project/BaseGradleProjectResolverExtension.java -index 2a7f6b2eb6c25..9f67df07d6893 100644 ---- a/plugins/gradle/src/org/jetbrains/plugins/gradle/service/project/BaseGradleProjectResolverExtension.java -+++ b/plugins/gradle/src/org/jetbrains/plugins/gradle/service/project/BaseGradleProjectResolverExtension.java -@@ -234,8 +234,8 @@ public DataNode createModule(@NotNull IdeaModule gradleModule, @NotN - } - - @NotNull -- public String getInternalModuleName(@NotNull IdeaModule gradleModule, @NotNull String sourceSetName) { -- return gradleModule.getName() + ""_"" + sourceSetName; -+ private static String getInternalModuleName(@NotNull IdeaModule gradleModule, @NotNull String sourceSetName) { -+ return FileUtil.sanitizeFileName(gradleModule.getName() + ""_"" + sourceSetName); - } - - @Override -diff --git a/plugins/gradle/testSources/org/jetbrains/plugins/gradle/importing/GradleMiscImportingTest.java b/plugins/gradle/testSources/org/jetbrains/plugins/gradle/importing/GradleMiscImportingTest.java -index c28a1745846c8..88c0cb575d438 100644 ---- a/plugins/gradle/testSources/org/jetbrains/plugins/gradle/importing/GradleMiscImportingTest.java -+++ b/plugins/gradle/testSources/org/jetbrains/plugins/gradle/importing/GradleMiscImportingTest.java -@@ -59,6 +59,20 @@ public void testTestModuleProperties() throws Exception { - assertSame(productionModule, testModuleProperties.getProductionModule()); - } - -+ @Test -+ public void testTestModulePropertiesForModuleWithHyphenInName() throws Exception { -+ createSettingsFile(""rootProject.name='my-project'""); -+ importProject( -+ ""apply plugin: 'java'"" -+ ); -+ -+ assertModules(""my-project"", ""my_project_main"", ""my_project_test""); -+ -+ final Module testModule = getModule(""my_project_test""); -+ TestModuleProperties testModuleProperties = TestModuleProperties.getInstance(testModule); -+ assertEquals(""my_project_main"", testModuleProperties.getProductionModuleName()); -+ } -+ - @Test - public void testInheritProjectJdkForModules() throws Exception { - importProject(" -19152416a44473325a6c3605f9accc4fee379b63,elasticsearch,add an index level setting to disable/enable- purging of expired docs --,a,https://github.com/elastic/elasticsearch,"diff --git a/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java b/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java -index ae4a010e95c40..f000ba6987225 100644 ---- a/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java -+++ b/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java -@@ -31,6 +31,8 @@ - import org.elasticsearch.action.bulk.BulkResponse; - import org.elasticsearch.action.delete.DeleteRequest; - import org.elasticsearch.client.Client; -+import org.elasticsearch.cluster.ClusterService; -+import org.elasticsearch.cluster.metadata.IndexMetaData; - import org.elasticsearch.cluster.metadata.MetaData; - import org.elasticsearch.common.component.AbstractLifecycleComponent; - import org.elasticsearch.common.inject.Inject; -@@ -65,8 +67,13 @@ public class IndicesTTLService extends AbstractLifecycleComponent 0) + { +- //Red flower +- resource = new Resource(ResourceType.Plant, DefaultMaterial.RED_ROSE.id, 0, 0, this.DefaultFlowers, TCDefaultValues.roseDepositRarity.intValue(), TCDefaultValues.roseDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, new int[]{DefaultMaterial.GRASS.id, DefaultMaterial.DIRT.id, DefaultMaterial.SOIL.id}); ++ // Red flower ++ resource = Resource.create(worldConfig, PlantGen.class, DefaultMaterial.RED_ROSE.id, this.DefaultFlowers, TCDefaultValues.roseDepositRarity.intValue(), TCDefaultValues.roseDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, DefaultMaterial.GRASS.id, DefaultMaterial.DIRT.id, DefaultMaterial.SOIL.id); + this.ResourceSequence[this.ResourceCount++] = resource; + +- //Yellow flower +- resource = new Resource(ResourceType.Plant, DefaultMaterial.YELLOW_FLOWER.id, 0, 0, this.DefaultFlowers, TCDefaultValues.flowerDepositRarity.intValue(), TCDefaultValues.flowerDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, new int[]{DefaultMaterial.GRASS.id, DefaultMaterial.DIRT.id, DefaultMaterial.SOIL.id}); ++ // Yellow flower ++ resource = Resource.create(worldConfig, PlantGen.class, DefaultMaterial.YELLOW_FLOWER.id, this.DefaultFlowers, TCDefaultValues.flowerDepositRarity.intValue(), TCDefaultValues.flowerDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, DefaultMaterial.GRASS.id, DefaultMaterial.DIRT.id, DefaultMaterial.SOIL.id); + this.ResourceSequence[this.ResourceCount++] = resource; } - /** -- * Returns the shards to purge, i.e. the local started primary shards that have ttl enabled -+ * Returns the shards to purge, i.e. the local started primary shards that have ttl enabled and disable_purge to false - */ - private List getShardsToPurge() { - List shardsToPurge = new ArrayList(); - for (IndexService indexService : indicesService) { -+ // check the value of disable_purge for this index -+ IndexMetaData indexMetaData = clusterService.state().metaData().index(indexService.index().name()); -+ boolean disablePurge = indexMetaData.settings().getAsBoolean(""index.ttl.disable_purge"", false); -+ if (disablePurge) { -+ continue; -+ } -+ - // should be optimized with the hasTTL flag - FieldMappers ttlFieldMappers = indexService.mapperService().name(TTLFieldMapper.NAME); - if (ttlFieldMappers == null) {" -c6f22949dc72af5edfd18bf4d350ae925fcc2d2c,Valadoc,"valadoc: LinkHelper: turn get_package_link into a virtual method -",a,https://github.com/GNOME/vala/,⚠️ Could not parse repo info -00db7d150b22031a0c030d55f1395e0ba41c1c76,kotlin,Fix KT-10472: compare all overloads including- varargs in a single pass.--,c,https://github.com/JetBrains/intellij-community,⚠️ HTTP 404: Not Found -c1af71dbf3e85cba514b7cda53ffa20618f7cda3,hidendra$lwc,"FULL removal of Memory Database. Every single usage of it has been removed - gone. Still may be UNSTABLE and volatile; it has not be tested extensively just yet !! [#53] The current implementation will be smoothed out later on. -",p,https://github.com/hidendra/lwc,"diff --git a/modules/core/src/main/java/com/griefcraft/modules/admin/AdminForceOwner.java b/modules/core/src/main/java/com/griefcraft/modules/admin/AdminForceOwner.java -index 0ac0df12b..cf9c04c67 100644 ---- a/modules/core/src/main/java/com/griefcraft/modules/admin/AdminForceOwner.java -+++ b/modules/core/src/main/java/com/griefcraft/modules/admin/AdminForceOwner.java -@@ -19,6 +19,7 @@ - - import com.griefcraft.lwc.LWC; - import com.griefcraft.model.Action; -+import com.griefcraft.model.LWCPlayer; - import com.griefcraft.model.Protection; - import com.griefcraft.scripting.JavaModule; - import com.griefcraft.scripting.event.LWCBlockInteractEvent; -@@ -41,9 +42,9 @@ public void onProtectionInteract(LWCProtectionInteractEvent event) { - - LWC lwc = event.getLWC(); - Protection protection = event.getProtection(); -- Player player = event.getPlayer(); -+ LWCPlayer player = lwc.wrapPlayer(event.getPlayer()); - -- Action action = lwc.getMemoryDatabase().getAction(""forceowner"", player.getName()); -+ Action action = player.getAction(""forceowner""); - String newOwner = action.getData(); - - protection.setOwner(newOwner); -@@ -106,10 +107,15 @@ public void onCommand(LWCCommandEvent event) { - return; + if (this.DefaultMushroom > 0) + { +- //Red mushroom +- resource = new Resource(ResourceType.Plant, DefaultMaterial.RED_MUSHROOM.id, 0, 0, this.DefaultMushroom, TCDefaultValues.redMushroomDepositRarity.intValue(), TCDefaultValues.redMushroomDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, new int[]{DefaultMaterial.GRASS.id, DefaultMaterial.DIRT.id}); ++ // Red mushroom ++ resource = Resource.create(worldConfig, PlantGen.class, DefaultMaterial.RED_MUSHROOM.id, this.DefaultMushroom, TCDefaultValues.redMushroomDepositRarity.intValue(), TCDefaultValues.redMushroomDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, DefaultMaterial.GRASS.id, DefaultMaterial.DIRT.id); + this.ResourceSequence[this.ResourceCount++] = resource; + +- //Brown mushroom +- resource = new Resource(ResourceType.Plant, DefaultMaterial.BROWN_MUSHROOM.id, 0, 0, this.DefaultMushroom, TCDefaultValues.brownMushroomDepositRarity.intValue(), TCDefaultValues.brownMushroomDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, new int[]{DefaultMaterial.GRASS.id, DefaultMaterial.DIRT.id}); ++ // Brown mushroom ++ resource = Resource.create(worldConfig, PlantGen.class, DefaultMaterial.BROWN_MUSHROOM.id, this.DefaultMushroom, TCDefaultValues.brownMushroomDepositRarity.intValue(), TCDefaultValues.brownMushroomDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, DefaultMaterial.GRASS.id, DefaultMaterial.DIRT.id); + this.ResourceSequence[this.ResourceCount++] = resource; } -- Player player = (Player) sender; -+ LWCPlayer player = lwc.wrapPlayer(sender); - String newOwner = args[1]; - -- lwc.getMemoryDatabase().registerAction(""forceowner"", player.getName(), newOwner); -+ Action action = new Action(); -+ action.setName(""forceowner""); -+ action.setPlayer(player); -+ action.setData(newOwner); -+ player.addAction(action); -+ - lwc.sendLocale(sender, ""protection.admin.forceowner.finalize"", ""player"", newOwner); - - return; -diff --git a/modules/core/src/main/java/com/griefcraft/modules/create/CreateModule.java b/modules/core/src/main/java/com/griefcraft/modules/create/CreateModule.java -index 408b80120..32f56cd3d 100644 ---- a/modules/core/src/main/java/com/griefcraft/modules/create/CreateModule.java -+++ b/modules/core/src/main/java/com/griefcraft/modules/create/CreateModule.java -@@ -20,6 +20,7 @@ - import com.griefcraft.lwc.LWC; - import com.griefcraft.model.AccessRight; - import com.griefcraft.model.Action; -+import com.griefcraft.model.LWCPlayer; - import com.griefcraft.model.Protection; - import com.griefcraft.model.ProtectionTypes; - import com.griefcraft.scripting.JavaModule; -@@ -29,7 +30,6 @@ - import com.griefcraft.scripting.event.LWCProtectionInteractEvent; - import com.griefcraft.scripting.event.LWCProtectionRegisterEvent; - import com.griefcraft.scripting.event.LWCProtectionRegistrationPostEvent; --import com.griefcraft.sql.MemDB; - import com.griefcraft.sql.PhysDB; - import com.griefcraft.util.Colors; - import com.griefcraft.util.StringUtils; -@@ -76,16 +76,15 @@ public void onBlockInteract(LWCBlockInteractEvent event) { - - LWC lwc = event.getLWC(); - Block block = event.getBlock(); -- Player player = event.getPlayer(); -+ LWCPlayer player = lwc.wrapPlayer(event.getPlayer()); - - if (!lwc.isProtectable(block)) { - return; + if (this.DefaultGrass > 0) + { +- //Grass +- resource = new Resource(ResourceType.Grass, DefaultMaterial.LONG_GRASS.id, 1, 0, this.DefaultGrass, TCDefaultValues.longGrassDepositRarity.intValue(), 0, 0, new int[]{DefaultMaterial.GRASS.id, DefaultMaterial.DIRT.id}); ++ // Grass ++ resource = Resource.create(worldConfig, GrassGen.class, DefaultMaterial.LONG_GRASS.id, 1, this.DefaultGrass, TCDefaultValues.longGrassDepositRarity.intValue(), DefaultMaterial.GRASS.id, DefaultMaterial.DIRT.id); + this.ResourceSequence[this.ResourceCount++] = resource; } - PhysDB physDb = lwc.getPhysicalDatabase(); -- MemDB memDb = lwc.getMemoryDatabase(); - -- Action action = memDb.getAction(""create"", player.getName()); -+ Action action = player.getAction(""create""); - String actionData = action.getData(); - String[] split = actionData.split("" ""); - String protectionType = split[0].toLowerCase(); -@@ -107,8 +106,8 @@ public void onBlockInteract(LWCBlockInteractEvent event) { - int blockZ = block.getZ(); - - lwc.removeModes(player); -- Result registerProtection = lwc.getModuleLoader().dispatchEvent(Event.REGISTER_PROTECTION, player, block); -- LWCProtectionRegisterEvent evt = new LWCProtectionRegisterEvent(player, block); -+ Result registerProtection = lwc.getModuleLoader().dispatchEvent(Event.REGISTER_PROTECTION, player.getBukkitPlayer(), block); -+ LWCProtectionRegisterEvent evt = new LWCProtectionRegisterEvent(player.getBukkitPlayer(), block); - lwc.getModuleLoader().dispatchEvent(evt); - - // another plugin cancelled the registration -@@ -126,7 +125,7 @@ public void onBlockInteract(LWCBlockInteractEvent event) { - String password = lwc.encrypt(protectionData); - - protection = physDb.registerProtection(block.getTypeId(), ProtectionTypes.PASSWORD, worldName, playerName, password, blockX, blockY, blockZ); -- memDb.registerPlayer(playerName, protection.getId()); -+ player.addAccessibleProtection(protection); - - lwc.sendLocale(player, ""protection.interact.create.finalize""); - lwc.sendLocale(player, ""protection.interact.create.password""); -@@ -234,7 +233,7 @@ public void onCommand(LWCCommandEvent event) { - return; + if (this.DefaultDeadBrush > 0) + { +- //Dead Bush +- resource = new Resource(ResourceType.Grass, DefaultMaterial.DEAD_BUSH.id, 0, 0, this.DefaultDeadBrush, TCDefaultValues.deadBushDepositRarity.intValue(), 0, 0, new int[]{DefaultMaterial.SAND.id}); ++ // Dead Bush ++ resource = Resource.create(worldConfig, GrassGen.class, DefaultMaterial.DEAD_BUSH.id, 0, this.DefaultDeadBrush, TCDefaultValues.deadBushDepositRarity.intValue(), DefaultMaterial.SAND.id); + this.ResourceSequence[this.ResourceCount++] = resource; } -- Player player = (Player) sender; -+ LWCPlayer player = lwc.wrapPlayer(sender); +- //Pumpkin +- resource = new Resource(ResourceType.Plant, DefaultMaterial.PUMPKIN.id, 0, 0, TCDefaultValues.pumpkinDepositFrequency.intValue(), TCDefaultValues.pumpkinDepositRarity.intValue(), TCDefaultValues.pumpkinDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, new int[]{DefaultMaterial.GRASS.id}); ++ // Pumpkin ++ resource = Resource.create(worldConfig, PlantGen.class, DefaultMaterial.PUMPKIN.id, TCDefaultValues.pumpkinDepositFrequency.intValue(), TCDefaultValues.pumpkinDepositRarity.intValue(), TCDefaultValues.pumpkinDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, DefaultMaterial.GRASS.id); + this.ResourceSequence[this.ResourceCount++] = resource; - String full = StringUtils.join(args, 0); - String type = args[0].toLowerCase(); -@@ -272,12 +271,15 @@ public void onCommand(LWCCommandEvent event) { - return; +- + if (this.DefaultReed > 0) + { +- //Reed +- resource = new Resource(ResourceType.Reed, DefaultMaterial.SUGAR_CANE_BLOCK.id, 0, 0, this.DefaultReed, TCDefaultValues.reedDepositRarity.intValue(), TCDefaultValues.reedDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, new int[]{DefaultMaterial.GRASS.id, DefaultMaterial.DIRT.id, DefaultMaterial.SAND.id}); ++ // Reed ++ resource = Resource.create(worldConfig, ReedGen.class, DefaultMaterial.SUGAR_CANE_BLOCK.id, this.DefaultReed, TCDefaultValues.reedDepositRarity.intValue(), TCDefaultValues.reedDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, DefaultMaterial.GRASS.id, DefaultMaterial.DIRT.id, DefaultMaterial.SAND.id); + this.ResourceSequence[this.ResourceCount++] = resource; } -- MemDB db = lwc.getMemoryDatabase(); -- db.unregisterAllActions(player.getName()); -- db.registerAction(""create"", player.getName(), full); -+ Action action = new Action(); -+ action.setName(""create""); -+ action.setPlayer(player); -+ action.setData(full); -+ -+ player.removeAllActions(); -+ player.addAction(action); +- + if (this.DefaultCactus > 0) + { +- //Cactus +- resource = new Resource(ResourceType.Cactus, DefaultMaterial.CACTUS.id, 0, 0, this.DefaultCactus, TCDefaultValues.cactusDepositRarity.intValue(), TCDefaultValues.cactusDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, new int[]{DefaultMaterial.SAND.id}); ++ // Cactus ++ resource = Resource.create(worldConfig, CactusGen.class, DefaultMaterial.CACTUS.id, this.DefaultCactus, TCDefaultValues.cactusDepositRarity.intValue(), TCDefaultValues.cactusDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, DefaultMaterial.SAND.id); + this.ResourceSequence[this.ResourceCount++] = resource; + } +- if (biome == DefaultBiome.JUNGLE || biome == DefaultBiome.JUNGLE_HILLS) // Jungle and Jungle Hills ++ if (biome == DefaultBiome.JUNGLE || biome == DefaultBiome.JUNGLE_HILLS) + { +- resource = new Resource(ResourceType.Vines, 0, 0, 0, TCDefaultValues.vinesFrequency.intValue(), TCDefaultValues.vinesRarity.intValue(), TCDefaultValues.vinesMinAltitude.intValue(), this.worldConfig.WorldHeight, new int[]{DefaultMaterial.VINE.id}); ++ resource = Resource.create(worldConfig, VinesGen.class, TCDefaultValues.vinesFrequency.intValue(), TCDefaultValues.vinesRarity.intValue(), TCDefaultValues.vinesMinAltitude.intValue(), this.worldConfig.WorldHeight, DefaultMaterial.VINE.id); + this.ResourceSequence[this.ResourceCount++] = resource; + } - lwc.sendLocale(player, ""protection.create.finalize"", ""type"", lwc.getLocale(type)); -- return; - } +- //Water source +- resource = new Resource(ResourceType.Liquid, DefaultMaterial.WATER.id, 0, 0, TCDefaultValues.waterSourceDepositFrequency.intValue(), TCDefaultValues.waterSourceDepositRarity.intValue(), TCDefaultValues.waterSourceDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, new int[]{DefaultMaterial.STONE.id}); ++ // Water source ++ resource = Resource.create(worldConfig, LiquidGen.class, DefaultMaterial.WATER.id, TCDefaultValues.waterSourceDepositFrequency.intValue(), TCDefaultValues.waterSourceDepositRarity.intValue(), TCDefaultValues.waterSourceDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, DefaultMaterial.STONE.id); + this.ResourceSequence[this.ResourceCount++] = resource; - } -diff --git a/modules/core/src/main/java/com/griefcraft/modules/flag/BaseFlagModule.java b/modules/core/src/main/java/com/griefcraft/modules/flag/BaseFlagModule.java -index 8d000e82f..92576dcde 100644 ---- a/modules/core/src/main/java/com/griefcraft/modules/flag/BaseFlagModule.java -+++ b/modules/core/src/main/java/com/griefcraft/modules/flag/BaseFlagModule.java -@@ -19,13 +19,13 @@ - - import com.griefcraft.lwc.LWC; - import com.griefcraft.model.Action; -+import com.griefcraft.model.LWCPlayer; - import com.griefcraft.model.Protection; - import com.griefcraft.scripting.JavaModule; - import com.griefcraft.scripting.event.LWCCommandEvent; - import com.griefcraft.scripting.event.LWCProtectionInteractEvent; - import com.griefcraft.util.StringUtils; - import org.bukkit.command.CommandSender; --import org.bukkit.entity.Player; +- //Lava source +- resource = new Resource(ResourceType.Liquid, DefaultMaterial.LAVA.id, 0, 0, TCDefaultValues.lavaSourceDepositFrequency.intValue(), TCDefaultValues.lavaSourceDepositRarity.intValue(), TCDefaultValues.lavaSourceDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, new int[]{DefaultMaterial.STONE.id}); ++ // Lava source ++ resource = Resource.create(worldConfig, LiquidGen.class, DefaultMaterial.LAVA.id, TCDefaultValues.lavaSourceDepositFrequency.intValue(), TCDefaultValues.lavaSourceDepositRarity.intValue(), TCDefaultValues.lavaSourceDepositMinAltitude.intValue(), this.worldConfig.WorldHeight, DefaultMaterial.STONE.id); + this.ResourceSequence[this.ResourceCount++] = resource; - public class BaseFlagModule extends JavaModule { +- + } -@@ -37,9 +37,9 @@ public void onProtectionInteract(LWCProtectionInteractEvent event) { + protected void ReadConfigSettings() +@@ -348,7 +355,6 @@ protected void ReadConfigSettings() - LWC lwc = event.getLWC(); - Protection protection = event.getProtection(); -- Player player = event.getPlayer(); -+ LWCPlayer player = lwc.wrapPlayer(event.getPlayer()); + this.BiomeRivers = ReadModSettings(TCDefaultValues.BiomeRivers.name(), this.DefaultRiver); -- Action action = lwc.getMemoryDatabase().getAction(""flag"", player.getName()); -+ Action action = player.getAction(""flag""); - String data = action.getData(); - event.setResult(Result.CANCEL); +- + this.IsleInBiome = ReadModSettings(TCDefaultValues.IsleInBiome.name(), this.DefaultIsle); + this.BiomeIsBorder = ReadModSettings(TCDefaultValues.BiomeIsBorder.name(), this.DefaultBorder); + this.NotBorderNear = ReadModSettings(TCDefaultValues.NotBorderNear.name(), this.DefaultNotBorderNear); +@@ -364,7 +370,6 @@ protected void ReadConfigSettings() + this.SurfaceBlock = ReadModSettings(TCDefaultValues.SurfaceBlock.name(), this.DefaultSurfaceBlock); + this.GroundBlock = ReadModSettings(TCDefaultValues.GroundBlock.name(), this.DefaultGroundBlock); -@@ -99,7 +99,7 @@ public void onCommand(LWCCommandEvent event) { - return; - } +- + this.UseWorldWaterLevel = ReadSettings(TCDefaultValues.UseWorldWaterLevel); + this.waterLevelMax = ReadSettings(TCDefaultValues.WaterLevelMax); + this.waterLevelMin = ReadSettings(TCDefaultValues.WaterLevelMin); +@@ -389,7 +394,6 @@ protected void ReadConfigSettings() + if (DefaultBiome.getBiome(this.Biome.getId()) == null) + { + // Only for custom biomes +- // System.out.println(""Reading mobs for ""+this.Name); // debug + this.spawnMonstersAddDefaults = ReadModSettings(""spawnMonstersAddDefaults"", true); + this.spawnMonsters = ReadModSettings(""spawnMonsters"", new ArrayList()); + this.spawnCreaturesAddDefaults = ReadModSettings(""spawnCreaturesAddDefaults"", true); +@@ -487,33 +491,30 @@ private void ReadResourceSettings() + for (Map.Entry entry : this.SettingsCache.entrySet()) + { + String key = entry.getKey(); +- for (ResourceType type : ResourceType.values()) ++ int start = key.indexOf(""(""); ++ int end = key.lastIndexOf("")""); ++ if (start != -1 && end != -1) + { +- if (key.startsWith(type.name())) ++ String name = key.substring(0, start); ++ String[] props = ReadComplexString(key.substring(start + 1, end)); ++ ++ Resource res = TerrainControl.getResourcesManager().getResource(name, this, Arrays.asList(props)); ++ ++ if (res != null) + { +- int start = key.indexOf(""(""); +- int end = key.lastIndexOf("")""); +- if (start != -1 && end != -1) ++ ++ if (res.getType() == ResourceType.saplingResource) + { +- Resource res = new Resource(type); +- String[] props = ReadComplexString(key.substring(start + 1, end)); +- +- if (type.Generator.ReadFromString(res, props, this)) +- { +- if (res.Type == ResourceType.Sapling) +- { +- if (res.BlockData == -1) +- this.SaplingResource = res; +- else +- this.SaplingTypes[res.BlockData] = res; +- +- } else +- { +- LineNumbers.add(Integer.valueOf(entry.getValue())); +- this.ResourceSequence[this.ResourceCount++] = res; +- } +- } else +- System.out.println(""TerrainControl: wrong resource "" + type.name() + key); ++ SaplingGen sapling = (SaplingGen) res; ++ if (sapling.saplingType == -1) ++ this.SaplingResource = sapling; ++ else ++ this.SaplingTypes[sapling.saplingType] = sapling; ++ ++ } else if (res.getType() == ResourceType.biomeConfigResource) ++ { ++ LineNumbers.add(Integer.valueOf(entry.getValue())); ++ this.ResourceSequence[this.ResourceCount++] = res; + } + } + } +@@ -541,43 +542,30 @@ private void ReadResourceSettings() -- Player player = (Player) sender; -+ LWCPlayer player = lwc.wrapPlayer(sender); - String flagName = args[0]; - String type = args[1].toLowerCase(); - String internalType; // + or - -@@ -142,8 +142,14 @@ public void onCommand(LWCCommandEvent event) { - return; + private void ReadCustomObjectSettings() + { +- CustomObjectsCompiled = new ArrayList(); ++ biomeObjects = new ArrayList(); ++ biomeObjectStrings = new ArrayList(); + + // Read from BiomeObjects setting +- String customObjectsString = ReadModSettings(""biomeobjects"",""""); +- if(customObjectsString.length() > 0) ++ String biomeObjectsValue = ReadModSettings(""biomeobjects"", """"); ++ if (biomeObjectsValue.length() > 0) + { +- String[] customObjects = customObjectsString.split("",""); +- for (String customObject : customObjects) ++ String[] customObjectStrings = biomeObjectsValue.split("",""); ++ for (String customObjectString : customObjectStrings) + { +- CustomObjectCompiled object = ObjectsStore.CompileString(customObject, worldConfig.CustomObjectsDirectory); +- if (object == null) +- object = ObjectsStore.CompileString(customObject, ObjectsStore.GlobalDirectory); ++ CustomObject object = TerrainControl.getCustomObjectManager().getObjectFromString(customObjectString, worldConfig); + if (object != null) +- CustomObjectsCompiled.add(object); ++ { ++ biomeObjects.add(object); ++ biomeObjectStrings.add(customObjectString); ++ } + } } +- +- // Read from random places in BiomeConfig +- // TODO: Remove this in 2.4, as it's quite resource intensive +- for (Map.Entry entry : this.SettingsCache.entrySet()) +- { +- CustomObjectCompiled object = ObjectsStore.CompileString(entry.getKey(), worldConfig.CustomObjectsDirectory); +- if (object == null) +- object = ObjectsStore.CompileString(entry.getKey(), ObjectsStore.GlobalDirectory); +- if (object != null) +- CustomObjectsCompiled.add(object); +- +- } +- + } -- lwc.getMemoryDatabase().unregisterAllActions(player.getName()); -- lwc.getMemoryDatabase().registerAction(""flag"", player.getName(), internalType + flagName); -+ Action action = new Action(); -+ action.setName(""flag""); -+ action.setPlayer(player); -+ action.setData(internalType + flagName); -+ -+ player.removeAllActions(); -+ player.addAction(action); -+ - lwc.sendLocale(sender, ""protection.flag.finalize""); - - return; -diff --git a/modules/core/src/main/java/com/griefcraft/modules/free/FreeModule.java b/modules/core/src/main/java/com/griefcraft/modules/free/FreeModule.java -index d880ebc4f..86dca114e 100644 ---- a/modules/core/src/main/java/com/griefcraft/modules/free/FreeModule.java -+++ b/modules/core/src/main/java/com/griefcraft/modules/free/FreeModule.java -@@ -18,6 +18,8 @@ - package com.griefcraft.modules.free; - - import com.griefcraft.lwc.LWC; -+import com.griefcraft.model.Action; -+import com.griefcraft.model.LWCPlayer; - import com.griefcraft.model.Protection; - import com.griefcraft.scripting.JavaModule; - import com.griefcraft.scripting.event.LWCBlockInteractEvent; -@@ -110,19 +112,19 @@ public void onCommand(LWCCommandEvent event) { - } +- + protected void WriteConfigSettings() throws IOException + { + WriteTitle(this.Name + "" biome config""); - String type = args[0].toLowerCase(); -- Player player = (Player) sender; -+ LWCPlayer player = lwc.wrapPlayer(sender); +- + WriteComment(""Biome size from 0 to GenerationDepth. Show in what zoom level biome will be generated (see GenerationDepth)""); + WriteComment(""Higher numbers=Smaller% of world / Lower numbers=Bigger % of world""); + WriteComment(""Don`t work on Ocean and River (frozen versions too) biomes until not added as normal biome.""); +@@ -601,7 +589,6 @@ protected void WriteConfigSettings() throws IOException + WriteValue(TCDefaultValues.BiomeRivers.name(), this.BiomeRivers); + this.WriteNewLine(); - if (type.equals(""protection"") || type.equals(""chest"") || type.equals(""furnace"") || type.equals(""dispenser"")) { -- if (lwc.getMemoryDatabase().hasPendingChest(player.getName())) { -- lwc.sendLocale(sender, ""protection.general.pending""); -- return; -- } -+ Action action = new Action(); -+ action.setName(""free""); -+ action.setPlayer(player); -+ -+ player.removeAllActions(); -+ player.addAction(action); - -- lwc.getMemoryDatabase().unregisterAllActions(player.getName()); -- lwc.getMemoryDatabase().registerAction(""free"", player.getName()); - lwc.sendLocale(sender, ""protection.remove.protection.finalize""); - } else if (type.equals(""modes"")) { -- lwc.getMemoryDatabase().unregisterAllModes(player.getName()); -+ player.disableAllModes(); - lwc.sendLocale(sender, ""protection.remove.modes.finalize""); - } else { - lwc.sendSimpleUsage(sender, ""/lwc -r ""); -diff --git a/modules/core/src/main/java/com/griefcraft/modules/info/InfoModule.java b/modules/core/src/main/java/com/griefcraft/modules/info/InfoModule.java -index 840d8e71d..24a8f6e21 100644 ---- a/modules/core/src/main/java/com/griefcraft/modules/info/InfoModule.java -+++ b/modules/core/src/main/java/com/griefcraft/modules/info/InfoModule.java -@@ -18,6 +18,8 @@ - package com.griefcraft.modules.info; - - import com.griefcraft.lwc.LWC; -+import com.griefcraft.model.Action; -+import com.griefcraft.model.LWCPlayer; - import com.griefcraft.model.Protection; - import com.griefcraft.scripting.JavaModule; - import com.griefcraft.scripting.event.LWCBlockInteractEvent; -@@ -99,7 +101,7 @@ public void onCommand(LWCCommandEvent event) { - return; - } +- + WriteComment(""Biome name list where this biome will be spawned as isle. Like Mushroom isle in Ocean. This work only if this biome is in IsleBiomes in world config""); + WriteValue(TCDefaultValues.IsleInBiome.name(), this.IsleInBiome); + this.WriteNewLine(); +@@ -625,7 +612,6 @@ protected void WriteConfigSettings() throws IOException + WriteValue(TCDefaultValues.ReplaceToBiomeName.name(), this.ReplaceBiomeName); + this.WriteNewLine(); -- Player player = (Player) sender; -+ LWCPlayer player = lwc.wrapPlayer(sender); - String type = ""info""; +- + WriteTitle(""Terrain Generator Variables""); + WriteComment(""BiomeHeight mean how much height will be added in terrain generation""); + WriteComment(""It is double value from -10.0 to 10.0""); +@@ -668,7 +654,6 @@ protected void WriteConfigSettings() throws IOException + WriteComment(""Make empty layer above bedrock layer. ""); + WriteHeightSettings(); - if (args.length > 0) { -@@ -107,8 +109,13 @@ public void onCommand(LWCCommandEvent event) { - } +- + this.WriteNewLine(); + WriteComment(""Surface block id""); + WriteValue(TCDefaultValues.SurfaceBlock.name(), this.SurfaceBlock); +@@ -691,7 +676,6 @@ protected void WriteConfigSettings() throws IOException + WriteComment(""BlockId used as ice""); + WriteValue(TCDefaultValues.IceBlock.name(), this.iceBlock); - if (type.equals(""info"")) { -- lwc.getMemoryDatabase().unregisterAllActions(player.getName()); -- lwc.getMemoryDatabase().registerAction(""info"", player.getName()); -+ Action action = new Action(); -+ action.setName(""info""); -+ action.setPlayer(player); +- + this.WriteNewLine(); + WriteComment(""Replace Variable: (BlockIdFrom,BlockIdTo[,BlockDataTo,minHeight,maxHeight])""); + WriteComment(""Example :""); +@@ -713,7 +697,7 @@ protected void WriteConfigSettings() throws IOException + this.WriteNewLine(); + this.WriteComment(""Biome grass color""); + this.WriteColorValue(TCDefaultValues.GrassColor.name(), this.GrassColor); +- + -+ player.removeAllActions(); -+ player.addAction(action); -+ - lwc.sendLocale(player, ""protection.info.finalize""); - } + this.WriteNewLine(); + this.WriteComment(""Whether the grass color is a multiplier""); + this.WriteComment(""If you set it to true, the color will be based on this value, the BiomeTemperature and the BiomeWetness""); +@@ -786,7 +770,6 @@ protected void WriteConfigSettings() throws IOException + this.WriteComment(""These objects will spawn when using the UseBiome keyword.""); + this.WriteCustomObjects(); -diff --git a/modules/core/src/main/java/com/griefcraft/modules/modes/DropTransferModule.java b/modules/core/src/main/java/com/griefcraft/modules/modes/DropTransferModule.java -index 7e1e90db0..cbd0eb625 100644 ---- a/modules/core/src/main/java/com/griefcraft/modules/modes/DropTransferModule.java -+++ b/modules/core/src/main/java/com/griefcraft/modules/modes/DropTransferModule.java -@@ -18,6 +18,9 @@ - package com.griefcraft.modules.modes; - - import com.griefcraft.lwc.LWC; -+import com.griefcraft.model.Action; -+import com.griefcraft.model.LWCPlayer; -+import com.griefcraft.model.Mode; - import com.griefcraft.model.Protection; - import com.griefcraft.scripting.JavaModule; - import com.griefcraft.scripting.event.LWCCommandEvent; -@@ -48,8 +51,8 @@ public void load(LWC lwc) { - * @param player - * @return - */ -- private boolean isPlayerDropTransferring(String player) { -- return lwc.getMemoryDatabase().hasMode(player, ""+dropTransfer""); -+ private boolean isPlayerDropTransferring(LWCPlayer player) { -+ return player.hasMode(""+dropTransfer""); +- + if (DefaultBiome.getBiome(this.Biome.getId()) != null) + { + this.WriteTitle(""MOB SPAWNING""); +@@ -862,7 +845,6 @@ private void WriteHeightSettings() throws IOException + this.WriteValue(TCDefaultValues.CustomHeightControl.name(), output); } - /** -@@ -58,8 +61,9 @@ private boolean isPlayerDropTransferring(String player) { - * @param player - * @return - */ -- private int getPlayerDropTransferTarget(String player) { -- String target = lwc.getMemoryDatabase().getModeData(player, ""dropTransfer""); -+ private int getPlayerDropTransferTarget(LWCPlayer player) { -+ Mode mode = player.getMode(""dropTransfer""); -+ String target = mode.getData(); - - try { - return Integer.parseInt(target); -@@ -70,14 +74,15 @@ private int getPlayerDropTransferTarget(String player) { +- + private void WriteModReplaceSettings() throws IOException + { + if (this.ReplaceCount == 0) +@@ -914,22 +896,20 @@ private void WriteModReplaceSettings() throws IOException + private void WriteResources() throws IOException + { + for (int i = 0; i < this.ResourceCount; i++) +- this.WriteValue(this.ResourceSequence[i].Type.Generator.WriteToString(this.ResourceSequence[i])); ++ { ++ this.WriteValue(this.ResourceSequence[i].makeString()); ++ } } - @Override -- public Result onDropItem(LWC lwc, Player player, Item item, ItemStack itemStack) { -- int protectionId = getPlayerDropTransferTarget(player.getName()); -+ public Result onDropItem(LWC lwc, Player bPlayer, Item item, ItemStack itemStack) { -+ LWCPlayer player = lwc.wrapPlayer(bPlayer); -+ int protectionId = getPlayerDropTransferTarget(player); - - if (protectionId == -1) { - return DEFAULT; - } - -- if (!isPlayerDropTransferring(player.getName())) { -+ if (!isPlayerDropTransferring(player)) { - return DEFAULT; - } - -@@ -85,7 +90,7 @@ public Result onDropItem(LWC lwc, Player player, Item item, ItemStack itemStack) - - if (protection == null) { - player.sendMessage(Colors.Red + ""Protection no longer exists""); -- lwc.getMemoryDatabase().unregisterMode(player.getName(), ""dropTransfer""); -+ player.disableMode(player.getMode(""dropTransfer"")); - return DEFAULT; + private void WriteCustomObjects() throws IOException + { + StringBuilder builder = new StringBuilder(); +- for (CustomObjectCompiled objectCompiled : CustomObjectsCompiled) ++ for (String objectString : biomeObjectStrings) + { +- builder.append(objectCompiled.Name); +- if(!objectCompiled.ChangedSettings.equals("""")) +- { +- builder.append(""("" + objectCompiled.ChangedSettings + "")""); +- } ++ builder.append(objectString); + builder.append(','); } +- if(builder.length() > 0) ++ if (builder.length() > 0) + { + // Delete last char + builder.deleteCharAt(builder.length() - 1); +@@ -940,11 +920,11 @@ private void WriteCustomObjects() throws IOException + private void WriteSaplingSettings() throws IOException + { + if (this.SaplingResource != null) +- this.WriteValue(ResourceType.Sapling.Generator.WriteToString(this.SaplingResource)); ++ this.WriteValue(SaplingResource.makeString()); -@@ -94,7 +99,7 @@ public Result onDropItem(LWC lwc, Player player, Item item, ItemStack itemStack) - - if (world == null) { - player.sendMessage(Colors.Red + ""Invalid world!""); -- lwc.getMemoryDatabase().unregisterMode(player.getName(), ""dropTransfer""); -+ player.disableMode(player.getMode(""dropTransfer"")); - return DEFAULT; - } + for (Resource res : this.SaplingTypes) + if (res != null) +- this.WriteValue(ResourceType.Sapling.Generator.WriteToString(res)); ++ this.WriteValue(res.makeString()); -@@ -105,36 +110,46 @@ public Result onDropItem(LWC lwc, Player player, Item item, ItemStack itemStack) - player.sendMessage(""Chest could not hold all the items! Have the remaining items back.""); + } - for (ItemStack temp : remaining.values()) { -- player.getInventory().addItem(temp); -+ bPlayer.getInventory().addItem(temp); +@@ -976,11 +956,12 @@ protected void CorrectSettings() + protected void RenameOldSettings() + { + // Old values from WorldConfig +- TCDefaultValues[] copyFromWorld = {TCDefaultValues.MaxAverageHeight, TCDefaultValues.MaxAverageDepth, TCDefaultValues.Volatility1, TCDefaultValues.Volatility2, TCDefaultValues.VolatilityWeight1, TCDefaultValues.VolatilityWeight2, TCDefaultValues.DisableBiomeHeight, TCDefaultValues.CustomHeightControl}; ++ TCDefaultValues[] copyFromWorld = { TCDefaultValues.MaxAverageHeight, TCDefaultValues.MaxAverageDepth, TCDefaultValues.Volatility1, TCDefaultValues.Volatility2, TCDefaultValues.VolatilityWeight1, TCDefaultValues.VolatilityWeight2, TCDefaultValues.DisableBiomeHeight, TCDefaultValues.CustomHeightControl }; + for (TCDefaultValues value : copyFromWorld) + if (this.worldConfig.SettingsCache.containsKey(value.name().toLowerCase())) + { +- //this.SettingsCache.put(value.name(), this.worldConfig.SettingsCache.get(value.name().toLowerCase())); ++ // this.SettingsCache.put(value.name(), ++ // this.worldConfig.SettingsCache.get(value.name().toLowerCase())); + this.SettingsCache.put(value.name().toLowerCase(), this.worldConfig.SettingsCache.get(value.name().toLowerCase())); } - } -- player.updateInventory(); // if they're in the chest and dropping items, this is required -+ bPlayer.updateInventory(); // if they're in the chest and dropping items, this is required - item.remove(); - return DEFAULT; - } +@@ -994,27 +975,29 @@ protected void RenameOldSettings() + } - @Override -- public Result onProtectionInteract(LWC lwc, Player player, Protection protection, List actions, boolean canAccess, boolean canAdmin) { -+ public Result onProtectionInteract(LWC lwc, Player bPlayer, Protection protection, List actions, boolean canAccess, boolean canAdmin) { - if (!actions.contains(""dropTransferSelect"")) { - return DEFAULT; } - -+ LWCPlayer player = lwc.wrapPlayer(bPlayer); +- + - if (!canAccess) { - lwc.sendLocale(player, ""protection.interact.dropxfer.noaccess""); - } else { - if (protection.getBlockId() != Material.CHEST.getId()) { - lwc.sendLocale(player, ""protection.interact.dropxfer.notchest""); -- lwc.getMemoryDatabase().unregisterAllActions(player.getName()); -+ player.removeAllActions(); - return CANCEL; + // CustomTreeChance + int customTreeChance = 0; // Default value +- if(worldConfig.SettingsCache.containsKey(""customtreechance"")) ++ if (worldConfig.SettingsCache.containsKey(""customtreechance"")) + { +- try { ++ try ++ { + customTreeChance = Integer.parseInt(worldConfig.SettingsCache.get(""customtreechance"")); +- } catch(NumberFormatException e) { ++ } catch (NumberFormatException e) ++ { + // Ignore, so leave customTreeChance at 0 } - -- lwc.getMemoryDatabase().registerMode(player.getName(), ""dropTransfer"", protection.getId() + """"); -- lwc.getMemoryDatabase().registerMode(player.getName(), ""+dropTransfer""); -+ Mode mode = new Mode(); -+ mode.setName(""dropTransfer""); -+ mode.setData(protection.getId() + """"); -+ mode.setPlayer(bPlayer); -+ player.enableMode(mode); -+ mode = new Mode(); -+ mode.setName(""+dropTransfer""); -+ mode.setPlayer(bPlayer); -+ player.enableMode(mode); -+ - lwc.sendLocale(player, ""protection.interact.dropxfer.finalize""); } - -- lwc.getMemoryDatabase().unregisterAllActions(player.getName()); // ignore the persist mode -+ player.removeAllActions(); // ignore the persist mode - return DEFAULT; - } - -@@ -145,7 +160,7 @@ public Result onBlockInteract(LWC lwc, Player player, Block block, List +- if(customTreeChance == 100) ++ if (customTreeChance == 100) + { + this.SettingsCache.put(""Sapling(All,UseWorld,100)"", ""-""); + } +- if(customTreeChance > 0 && customTreeChance < 100) ++ if (customTreeChance > 0 && customTreeChance < 100) + { +- this.SettingsCache.put(""Sapling(0,UseWorld,""+customTreeChance+"",BigTree,10,Tree,100)"", ""-""); // Oak +- this.SettingsCache.put(""Sapling(1,UseWorld,""+customTreeChance+"",Taiga2,100)"", ""-""); // Redwood +- this.SettingsCache.put(""Sapling(2,UseWorld,""+customTreeChance+"",Forest,100)"", ""-""); // Birch +- this.SettingsCache.put(""Sapling(3,UseWorld,""+customTreeChance+"",CocoaTree,100)"", ""-""); // Jungle ++ this.SettingsCache.put(""Sapling(0,UseWorld,"" + customTreeChance + "",BigTree,10,Tree,100)"", ""-""); // Oak ++ this.SettingsCache.put(""Sapling(1,UseWorld,"" + customTreeChance + "",Taiga2,100)"", ""-""); // Redwood ++ this.SettingsCache.put(""Sapling(2,UseWorld,"" + customTreeChance + "",Forest,100)"", ""-""); // Birch ++ this.SettingsCache.put(""Sapling(3,UseWorld,"" + customTreeChance + "",CocoaTree,100)"", ""-""); // Jungle } - lwc.sendLocale(player, ""protection.interact.dropxfer.notprotected""); -- lwc.getMemoryDatabase().unregisterAllActions(player.getName()); -+ lwc.removeModes(player); + // ReplacedBlocks +@@ -1088,11 +1071,9 @@ protected void RenameOldSettings() - return DEFAULT; + this.SettingsCache.put(""ReplacedBlocks"" + "":"" + output.substring(0, output.length() - 1), """"); + +- + } } -@@ -164,7 +179,7 @@ public void onCommand(LWCCommandEvent event) { - CommandSender sender = event.getSender(); - String[] args = event.getArgs(); -- Player player = (Player) sender; -+ LWCPlayer player = lwc.wrapPlayer(sender); - String mode = args[0].toLowerCase(); +- + private int DefaultTrees = 1; + private int DefaultFlowers = 2; + private int DefaultGrass = 10; +@@ -1128,161 +1109,160 @@ private void InitDefaults() + this.DefaultBiomeTemperature = this.Biome.getTemperature(); + this.DefaultBiomeWetness = this.Biome.getWetness(); - if (!mode.equals(""droptransfer"")) { -@@ -185,40 +200,48 @@ public void onCommand(LWCCommandEvent event) { - String playerName = player.getName(); +- + switch (this.Biome.getId()) + { +- case 0: +- this.DefaultColor = ""0x3333FF""; +- break; +- case 1: +- { +- this.DefaultTrees = 0; +- this.DefaultFlowers = 4; +- this.DefaultGrass = 20; +- this.DefaultColor = ""0x999900""; +- break; +- } +- case 2: +- { +- this.DefaultTrees = 0; +- this.DefaultDeadBrush = 4; +- this.DefaultGrass = 0; +- this.DefaultReed = 10; +- this.DefaultCactus = 10; +- this.DefaultColor = ""0xFFCC33""; +- break; +- } +- case 3: +- this.DefaultColor = ""0x333300""; +- break; +- case 4: +- { +- this.DefaultTrees = 10; +- this.DefaultGrass = 15; +- this.DefaultColor = ""0x00FF00""; +- break; +- } +- case 5: +- { +- this.DefaultTrees = 10; +- this.DefaultGrass = 10; +- this.DefaultColor = ""0x007700""; +- break; +- } +- case 6: +- { +- this.DefaultTrees = 2; +- this.DefaultFlowers = -999; +- this.DefaultDeadBrush = 1; +- this.DefaultMushroom = 8; +- this.DefaultReed = 10; +- this.DefaultClay = 1; +- this.DefaultWaterLily = 1; +- this.DefaultColor = ""0x99CC66""; +- this.DefaultWaterColorMultiplier = ""0xe0ffae""; +- this.DefaultGrassColor = ""0x7E6E7E""; +- this.DefaultFoliageColor = ""0x7E6E7E""; +- break; +- } +- case 7: +- this.DefaultSize = 8; +- this.DefaultRarity = 95; +- this.DefaultIsle.add(DefaultBiome.SWAMPLAND.Name); +- this.DefaultColor = ""0x00CCCC""; +- case 8: +- case 9: +- +- break; +- case 10: +- this.DefaultColor = ""0xFFFFFF""; +- break; +- case 11: +- this.DefaultColor = ""0x66FFFF""; +- break; +- case 12: +- this.DefaultColor = ""0xCCCCCC""; +- break; +- case 13: +- this.DefaultColor = ""0xCC9966""; +- break; +- case 14: +- { +- this.DefaultSurfaceBlock = (byte) DefaultMaterial.MYCEL.id; +- this.DefaultMushroom = 1; +- this.DefaultGrass = 0; +- this.DefaultFlowers = 0; +- this.DefaultTrees = 0; +- this.DefaultRarity = 1; +- this.DefaultRiver = false; +- this.DefaultSize = 6; +- this.DefaultIsle.add(DefaultBiome.OCEAN.Name); +- this.DefaultColor = ""0xFF33CC""; +- this.DefaultWaterLily = 1; +- break; +- } +- case 15: +- { +- this.DefaultRiver = false; +- this.DefaultSize = 9; +- this.DefaultBorder.add(DefaultBiome.MUSHROOM_ISLAND.Name); +- this.DefaultColor = ""0xFF9999""; +- break; +- } +- case 16: +- this.DefaultTrees = 0; +- this.DefaultSize = 8; +- this.DefaultBorder.add(DefaultBiome.OCEAN.Name); +- this.DefaultNotBorderNear.add(DefaultBiome.RIVER.Name); +- this.DefaultNotBorderNear.add(DefaultBiome.SWAMPLAND.Name); +- this.DefaultNotBorderNear.add(DefaultBiome.EXTREME_HILLS.Name); +- this.DefaultNotBorderNear.add(DefaultBiome.MUSHROOM_ISLAND.Name); +- this.DefaultColor = ""0xFFFF00""; +- break; +- case 17: +- this.DefaultSize = 6; +- this.DefaultRarity = 97; +- this.DefaultIsle.add(DefaultBiome.DESERT.Name); +- this.DefaultTrees = 0; +- this.DefaultDeadBrush = 4; +- this.DefaultGrass = 0; +- this.DefaultReed = 50; +- this.DefaultCactus = 10; +- this.DefaultColor = ""0x996600""; +- break; +- case 18: +- this.DefaultSize = 6; +- this.DefaultRarity = 97; +- this.DefaultIsle.add(DefaultBiome.FOREST.Name); +- this.DefaultTrees = 10; +- this.DefaultGrass = 15; +- this.DefaultColor = ""0x009900""; +- break; +- case 19: +- this.DefaultSize = 6; +- this.DefaultRarity = 97; +- this.DefaultIsle.add(DefaultBiome.TAIGA.Name); +- this.DefaultTrees = 10; +- this.DefaultGrass = 10; +- this.DefaultColor = ""0x003300""; +- break; +- case 20: +- this.DefaultSize = 8; +- this.DefaultBorder.add(DefaultBiome.EXTREME_HILLS.Name); +- this.DefaultColor = ""0x666600""; +- break; +- case 21: +- this.DefaultTrees = 50; +- this.DefaultGrass = 25; +- this.DefaultFlowers = 4; +- this.DefaultColor = ""0xCC6600""; +- break; +- case 22: +- this.DefaultTrees = 50; +- this.DefaultGrass = 25; +- this.DefaultFlowers = 4; +- this.DefaultColor = ""0x663300""; +- this.DefaultIsle.add(DefaultBiome.JUNGLE.Name); +- break; ++ case 0: ++ this.DefaultColor = ""0x3333FF""; ++ break; ++ case 1: ++ { ++ this.DefaultTrees = 0; ++ this.DefaultFlowers = 4; ++ this.DefaultGrass = 20; ++ this.DefaultColor = ""0x999900""; ++ break; ++ } ++ case 2: ++ { ++ this.DefaultTrees = 0; ++ this.DefaultDeadBrush = 4; ++ this.DefaultGrass = 0; ++ this.DefaultReed = 10; ++ this.DefaultCactus = 10; ++ this.DefaultColor = ""0xFFCC33""; ++ break; ++ } ++ case 3: ++ this.DefaultColor = ""0x333300""; ++ break; ++ case 4: ++ { ++ this.DefaultTrees = 10; ++ this.DefaultGrass = 15; ++ this.DefaultColor = ""0x00FF00""; ++ break; ++ } ++ case 5: ++ { ++ this.DefaultTrees = 10; ++ this.DefaultGrass = 10; ++ this.DefaultColor = ""0x007700""; ++ break; ++ } ++ case 6: ++ { ++ this.DefaultTrees = 2; ++ this.DefaultFlowers = -999; ++ this.DefaultDeadBrush = 1; ++ this.DefaultMushroom = 8; ++ this.DefaultReed = 10; ++ this.DefaultClay = 1; ++ this.DefaultWaterLily = 1; ++ this.DefaultColor = ""0x99CC66""; ++ this.DefaultWaterColorMultiplier = ""0xe0ffae""; ++ this.DefaultGrassColor = ""0x7E6E7E""; ++ this.DefaultFoliageColor = ""0x7E6E7E""; ++ break; ++ } ++ case 7: ++ this.DefaultSize = 8; ++ this.DefaultRarity = 95; ++ this.DefaultIsle.add(DefaultBiome.SWAMPLAND.Name); ++ this.DefaultColor = ""0x00CCCC""; ++ case 8: ++ case 9: ++ ++ break; ++ case 10: ++ this.DefaultColor = ""0xFFFFFF""; ++ break; ++ case 11: ++ this.DefaultColor = ""0x66FFFF""; ++ break; ++ case 12: ++ this.DefaultColor = ""0xCCCCCC""; ++ break; ++ case 13: ++ this.DefaultColor = ""0xCC9966""; ++ break; ++ case 14: ++ { ++ this.DefaultSurfaceBlock = (byte) DefaultMaterial.MYCEL.id; ++ this.DefaultMushroom = 1; ++ this.DefaultGrass = 0; ++ this.DefaultFlowers = 0; ++ this.DefaultTrees = 0; ++ this.DefaultRarity = 1; ++ this.DefaultRiver = false; ++ this.DefaultSize = 6; ++ this.DefaultIsle.add(DefaultBiome.OCEAN.Name); ++ this.DefaultColor = ""0xFF33CC""; ++ this.DefaultWaterLily = 1; ++ break; ++ } ++ case 15: ++ { ++ this.DefaultRiver = false; ++ this.DefaultSize = 9; ++ this.DefaultBorder.add(DefaultBiome.MUSHROOM_ISLAND.Name); ++ this.DefaultColor = ""0xFF9999""; ++ break; ++ } ++ case 16: ++ this.DefaultTrees = 0; ++ this.DefaultSize = 8; ++ this.DefaultBorder.add(DefaultBiome.OCEAN.Name); ++ this.DefaultNotBorderNear.add(DefaultBiome.RIVER.Name); ++ this.DefaultNotBorderNear.add(DefaultBiome.SWAMPLAND.Name); ++ this.DefaultNotBorderNear.add(DefaultBiome.EXTREME_HILLS.Name); ++ this.DefaultNotBorderNear.add(DefaultBiome.MUSHROOM_ISLAND.Name); ++ this.DefaultColor = ""0xFFFF00""; ++ break; ++ case 17: ++ this.DefaultSize = 6; ++ this.DefaultRarity = 97; ++ this.DefaultIsle.add(DefaultBiome.DESERT.Name); ++ this.DefaultTrees = 0; ++ this.DefaultDeadBrush = 4; ++ this.DefaultGrass = 0; ++ this.DefaultReed = 50; ++ this.DefaultCactus = 10; ++ this.DefaultColor = ""0x996600""; ++ break; ++ case 18: ++ this.DefaultSize = 6; ++ this.DefaultRarity = 97; ++ this.DefaultIsle.add(DefaultBiome.FOREST.Name); ++ this.DefaultTrees = 10; ++ this.DefaultGrass = 15; ++ this.DefaultColor = ""0x009900""; ++ break; ++ case 19: ++ this.DefaultSize = 6; ++ this.DefaultRarity = 97; ++ this.DefaultIsle.add(DefaultBiome.TAIGA.Name); ++ this.DefaultTrees = 10; ++ this.DefaultGrass = 10; ++ this.DefaultColor = ""0x003300""; ++ break; ++ case 20: ++ this.DefaultSize = 8; ++ this.DefaultBorder.add(DefaultBiome.EXTREME_HILLS.Name); ++ this.DefaultColor = ""0x666600""; ++ break; ++ case 21: ++ this.DefaultTrees = 50; ++ this.DefaultGrass = 25; ++ this.DefaultFlowers = 4; ++ this.DefaultColor = ""0xCC6600""; ++ break; ++ case 22: ++ this.DefaultTrees = 50; ++ this.DefaultGrass = 25; ++ this.DefaultFlowers = 4; ++ this.DefaultColor = ""0x663300""; ++ this.DefaultIsle.add(DefaultBiome.JUNGLE.Name); ++ break; + } + } - if (action.equals(""select"")) { -- if (isPlayerDropTransferring(playerName)) { -+ if (isPlayerDropTransferring(player)) { - lwc.sendLocale(player, ""protection.modes.dropxfer.select.error""); - return; - } +diff --git a/common/src/com/khorn/terraincontrol/configuration/ConfigFile.java b/common/src/com/khorn/terraincontrol/configuration/ConfigFile.java +index 93e688b2a..7ef4da5a4 100644 +--- a/common/src/com/khorn/terraincontrol/configuration/ConfigFile.java ++++ b/common/src/com/khorn/terraincontrol/configuration/ConfigFile.java +@@ -8,6 +8,7 @@ + import java.util.Collections; + import java.util.HashMap; + import java.util.List; ++import java.util.Map; -- lwc.getMemoryDatabase().unregisterMode(playerName, mode); -- lwc.getMemoryDatabase().registerAction(""dropTransferSelect"", playerName, """"); -+ player.disableMode(player.getMode(mode)); + public abstract class ConfigFile + { +@@ -15,7 +16,7 @@ public abstract class ConfigFile + + // TODO: This map is populated with lowercase versions as well. + // TODO: That is a derped approach. Use TreeSet with CASE_INSENSITIVE_ORDER instead. +- protected HashMap SettingsCache = new HashMap(); ++ protected Map SettingsCache = new HashMap(); + + // TODO: We should use GSON only instead of just for a few fields. + // TODO: Hah. We should remove that buggy GSON. +diff --git a/common/src/com/khorn/terraincontrol/configuration/Resource.java b/common/src/com/khorn/terraincontrol/configuration/Resource.java +index 5169b3f42..569fe97e3 100644 +--- a/common/src/com/khorn/terraincontrol/configuration/Resource.java ++++ b/common/src/com/khorn/terraincontrol/configuration/Resource.java +@@ -1,99 +1,258 @@ + package com.khorn.terraincontrol.configuration; -+ Action temp = new Action(); -+ temp.setName(""dropTransferSelect""); -+ temp.setPlayer(player); ++import java.util.ArrayList; ++import java.util.List; ++import java.util.Random; + -+ player.addAction(temp); - lwc.sendLocale(player, ""protection.modes.dropxfer.select.finalize""); - } else if (action.equals(""on"")) { -- int target = getPlayerDropTransferTarget(playerName); -+ int target = getPlayerDropTransferTarget(player); + import com.khorn.terraincontrol.DefaultMaterial; +-import com.khorn.terraincontrol.customobjects.CustomObjectCompiled; ++import com.khorn.terraincontrol.LocalWorld; ++import com.khorn.terraincontrol.TerrainControl; ++import com.khorn.terraincontrol.exception.InvalidResourceException; + import com.khorn.terraincontrol.generator.resourcegens.ResourceType; +-import com.khorn.terraincontrol.generator.resourcegens.TreeType; - if (target == -1) { - lwc.sendLocale(player, ""protection.modes.dropxfer.selectchest""); - return; - } +-public class Resource ++public abstract class Resource + { +- public ResourceType Type; +- public int MinAltitude; +- public int MaxAltitude; +- public int MinSize; +- public int MaxSize; +- public int BlockId; +- public int BlockData; +- public int[] SourceBlockId = new int[0]; +- public int Frequency; +- public int Rarity; +- public TreeType[] TreeTypes = new TreeType[0]; +- public int[] TreeChances = new int[0]; +- +- // For custom trees +- public CustomObjectCompiled[] CUObjectsWorld = new CustomObjectCompiled[0]; +- public CustomObjectCompiled[] CUObjectsBiome = new CustomObjectCompiled[0]; +- +- public CustomObjectCompiled[] CUObjects = new CustomObjectCompiled[0]; +- public String[] CUObjectsNames = new String[0]; +- +- +- public Resource(ResourceType type) ++ protected int frequency; ++ protected int rarity; ++ protected WorldConfig worldConfig; ++ ++ /** ++ * Sets the world. Needed for some resources, like CustomObject and Tree. ++ * @param world ++ */ ++ public void setWorldConfig(WorldConfig worldConfig) + { +- Type = type; ++ this.worldConfig = worldConfig; + } +- +- public Resource(ResourceType type, int blockId, int blockData, int size, int frequency, int rarity, int minAltitude, int maxAltitude, int[] sourceBlockIds) ++ ++ /** ++ * Convenience method for creating a resource. Used to create the default resources. ++ * @param world ++ * @param clazz ++ * @param args ++ * @return ++ */ ++ public static Resource create(WorldConfig config, Class clazz, Object... args) + { +- this.Type = type; +- this.BlockId = blockId; +- this.BlockData = blockData; +- this.MaxSize = size; +- this.Frequency = frequency; +- this.Rarity = rarity; +- this.MinAltitude = minAltitude; +- this.MaxAltitude = maxAltitude; +- this.SourceBlockId = sourceBlockIds; ++ List stringArgs = new ArrayList(args.length); ++ for(Object arg: args) ++ { ++ stringArgs.add("""" + arg); ++ } ++ ++ Resource resource; ++ try ++ { ++ resource = clazz.newInstance(); ++ } catch (InstantiationException e) ++ { ++ return null; ++ } catch (IllegalAccessException e) ++ { ++ return null; ++ } ++ resource.setWorldConfig(config); ++ try { ++ resource.load(stringArgs); ++ } catch(InvalidResourceException e) ++ { ++ TerrainControl.log(""Invalid default resource! Please report! "" + clazz.getName() + "": ""+e.getMessage()); ++ e.printStackTrace(); ++ } ++ ++ return resource; + } -- lwc.getMemoryDatabase().registerMode(playerName, ""+dropTransfer""); -+ Mode temp = new Mode(); -+ temp.setName(""+dropTransfer""); -+ temp.setPlayer(player.getBukkitPlayer()); +- public Resource(ResourceType type, int minSize, int maxSize, int frequency, int rarity, int minAltitude, int maxAltitude) ++ /** ++ * Loads the settings. Returns false if one of the arguments contains an ++ * error. ++ * ++ * @param args ++ * List of args. ++ * @return Returns false if one of the arguments contains an error, ++ * otherwise true. ++ * @throws InvalidResourceException ++ * If the resoure is invalid. ++ */ ++ public abstract void load(List args) throws InvalidResourceException; + -+ player.enableMode(temp); - lwc.sendLocale(player, ""protection.modes.dropxfer.on.finalize""); - } else if (action.equals(""off"")) { -- int target = getPlayerDropTransferTarget(playerName); -+ int target = getPlayerDropTransferTarget(player); ++ /** ++ * Spawns the resource at this position, ignoring rarity and frequency. ++ * ++ * @param world ++ * @param chunkX ++ * @param chunkZ ++ */ ++ public abstract void spawn(LocalWorld world, Random random, int x, int z); ++ ++ /** ++ * Spawns the resource normally. ++ * ++ * @param world ++ * @param chunkX ++ * @param chunkZ ++ */ ++ public void process(LocalWorld world, Random random, int chunkX, int chunkZ) + { +- this.Type = type; +- this.MaxSize = maxSize; +- this.MinSize = minSize; +- this.Frequency = frequency; +- this.Rarity = rarity; +- this.MinAltitude = minAltitude; +- this.MaxAltitude = maxAltitude; ++ for (int t = 0; t < frequency; t++) ++ { ++ if (random.nextInt(100) > rarity) ++ continue; ++ int x = chunkX * 16 + random.nextInt(16) + 8; ++ int z = chunkZ * 16 + random.nextInt(16) + 8; ++ spawn(world, random, x, z); ++ } + } - if (target == -1) { - lwc.sendLocale(player, ""protection.modes.dropxfer.selectchest""); - return; - } +- public Resource(ResourceType type, int blockId, int frequency, int rarity, int minAltitude, int maxAltitude) ++ /** ++ * Gets the type of this resource. ++ * ++ * @return The type of this resource. ++ */ ++ public abstract ResourceType getType(); ++ ++ /** ++ * Gets a String representation, like Tree(10,BigTree,50,Tree,100) ++ * ++ * @return A String representation, like Tree(10,BigTree,50,Tree,100) ++ */ ++ public abstract String makeString(); ++ ++ /** ++ * Parses the string and returns a number between minValue and maxValue. ++ * Returns Resource.INCORRECT_NUMBER if the string is not a number. ++ * ++ * @param string ++ * @param minValue ++ * @param maxValue ++ * @return ++ * @throws InvalidResourceException ++ * If the number is invalid. ++ */ ++ public int getInt(String string, int minValue, int maxValue) throws InvalidResourceException + { +- this.Type = type; +- this.BlockId = blockId; +- this.Frequency = frequency; +- this.Rarity = rarity; +- this.MinAltitude = minAltitude; +- this.MaxAltitude = maxAltitude; ++ try ++ { ++ int number = Integer.parseInt(string); ++ if (number < minValue) ++ { ++ return minValue; ++ } ++ if (number > maxValue) ++ { ++ return maxValue; ++ } ++ return number; ++ } catch (NumberFormatException e) ++ { ++ throw new InvalidResourceException(""Incorrect number: "" + string); ++ } + } -- lwc.getMemoryDatabase().unregisterMode(playerName, ""+dropTransfer""); -+ player.disableMode(player.getMode(""+dropTransfer"")); - lwc.sendLocale(player, ""protection.modes.dropxfer.off.finalize""); - } else if (action.equals(""status"")) { -- if (getPlayerDropTransferTarget(playerName) == -1) { -+ if (getPlayerDropTransferTarget(player) == -1) { - lwc.sendLocale(player, ""protection.modes.dropxfer.status.off""); - } else { -- if (isPlayerDropTransferring(playerName)) { -+ if (isPlayerDropTransferring(player)) { - lwc.sendLocale(player, ""protection.modes.dropxfer.status.active""); - } else { - lwc.sendLocale(player, ""protection.modes.dropxfer.status.inactive""); -diff --git a/modules/core/src/main/java/com/griefcraft/modules/modes/NoSpamModule.java b/modules/core/src/main/java/com/griefcraft/modules/modes/NoSpamModule.java -index ef14c3da7..abf81768a 100644 ---- a/modules/core/src/main/java/com/griefcraft/modules/modes/NoSpamModule.java -+++ b/modules/core/src/main/java/com/griefcraft/modules/modes/NoSpamModule.java -@@ -18,13 +18,12 @@ - package com.griefcraft.modules.modes; - - import com.griefcraft.lwc.LWC; -+import com.griefcraft.model.LWCPlayer; -+import com.griefcraft.model.Mode; - import com.griefcraft.scripting.JavaModule; - import com.griefcraft.scripting.event.LWCCommandEvent; - import com.griefcraft.scripting.event.LWCSendLocaleEvent; - import org.bukkit.command.CommandSender; --import org.bukkit.entity.Player; -- --import java.util.List; +- public Resource(ResourceType type, int frequency, TreeType[] types, int[] treeChances) ++ /** ++ * Returns the block id with the given name. ++ * ++ * @param string ++ * @return ++ */ ++ public int getBlockId(String string) throws InvalidResourceException + { +- this.Type = type; +- this.Frequency = frequency; +- if (types != null) ++ if (string.indexOf('.') != -1) ++ { ++ // Ignore block data ++ string = string.split(""\\."")[0]; ++ } ++ ++ DefaultMaterial material = DefaultMaterial.getMaterial(string); ++ if (material != null) + { +- this.TreeTypes = types; +- this.TreeChances = treeChances; ++ return material.id; + } ++ ++ return getInt(string, 0, 256); + } - public class NoSpamModule extends JavaModule { +- public boolean CheckSourceId(int blockId) ++ /** ++ * Gets the block data from a material string. ++ * ++ * @param string ++ * @return ++ * @throws InvalidResourceException ++ */ ++ public int getBlockData(String string) throws InvalidResourceException + { +- for (int id : this.SourceBlockId) +- if (blockId == id) +- return true; +- return false; ++ if (string.indexOf('.') == -1) ++ { ++ // No block data ++ return 0; ++ } ++ ++ // Get block data ++ string = string.split(""\\."")[1]; ++ return getInt(string, 0, 16); + } -@@ -42,37 +41,36 @@ public void onCommand(LWCCommandEvent event) { - CommandSender sender = event.getSender(); - String[] args = event.getArgs(); ++ public void assureSize(int size, List args) throws InvalidResourceException ++ { ++ if (args.size() < size) ++ { ++ throw new InvalidResourceException(""Too few arguments supplied""); ++ } ++ } -- Player player = (Player) sender; -+ LWCPlayer player = lwc.wrapPlayer(sender); - String mode = args[0].toLowerCase(); +- public String BlockIdToName(int id) ++ /** ++ * Gets the material name back from the id and data. ++ * ++ * @param id ++ * The block id ++ * @param data ++ * The block data ++ * @return String in the format blockname[.blockdata] ++ */ ++ public String makeMaterial(int id, int data) + { ++ String materialString = """" + id; + DefaultMaterial material = DefaultMaterial.getMaterial(id); + if (material != DefaultMaterial.UNKNOWN_BLOCK) +- return material.name(); +- else +- return Integer.toString(id); ++ { ++ // No name, return number as String ++ materialString = material.toString(); ++ } ++ ++ if (data > 0) ++ { ++ materialString = materialString + ""."" + data; ++ } ++ ++ return materialString; + } - if (!mode.equals(""nospam"")) { - return; - } ++ /** ++ * Gets the material name back from the id. ++ * ++ * @param id ++ * The block id ++ * @return String in the format blockname ++ */ ++ public String makeMaterial(int id) ++ { ++ return makeMaterial(id, 0); ++ } ++ ++ /** ++ * Returns a String in the format "",materialName,materialName,etc"" ++ * ++ * @param ids ++ * @return ++ */ ++ public String makeMaterial(List ids) ++ { ++ String string = """"; ++ for (int blockId : ids) ++ { ++ string += "",""; ++ string += makeMaterial(blockId); ++ } ++ return string; ++ } + } +diff --git a/common/src/com/khorn/terraincontrol/configuration/WorldConfig.java b/common/src/com/khorn/terraincontrol/configuration/WorldConfig.java +index a25695090..b7fbf0fac 100644 +--- a/common/src/com/khorn/terraincontrol/configuration/WorldConfig.java ++++ b/common/src/com/khorn/terraincontrol/configuration/WorldConfig.java +@@ -3,9 +3,8 @@ + import com.khorn.terraincontrol.DefaultBiome; + import com.khorn.terraincontrol.LocalBiome; + import com.khorn.terraincontrol.LocalWorld; ++import com.khorn.terraincontrol.TerrainControl; + import com.khorn.terraincontrol.customobjects.CustomObject; +-import com.khorn.terraincontrol.customobjects.CustomObjectCompiled; +-import com.khorn.terraincontrol.customobjects.ObjectsStore; -- List modes = lwc.getMemoryDatabase().getModes(player.getName()); -+ if (!player.hasMode(mode)) { -+ Mode temp = new Mode(); -+ temp.setName(mode); -+ temp.setPlayer(player.getBukkitPlayer()); + import java.io.DataInputStream; + import java.io.DataOutputStream; +@@ -13,13 +12,19 @@ + import java.io.IOException; + import java.util.ArrayList; + import java.util.HashMap; ++import java.util.Map; -- if (!modes.contains(mode)) { -- lwc.getMemoryDatabase().registerMode(player.getName(), mode); -+ player.enableMode(temp); - lwc.sendLocale(player, ""protection.modes.nospam.finalize""); - } else { -- lwc.getMemoryDatabase().unregisterMode(player.getName(), mode); -+ player.disableMode(player.getMode(mode)); - lwc.sendLocale(player, ""protection.modes.nospam.off""); - } + public class WorldConfig extends ConfigFile + { ++ public LocalWorld world; ++ + public ArrayList CustomBiomes = new ArrayList(); + public HashMap CustomBiomeIds = new HashMap(); - event.setCancelled(true); -- return; - } +- public ArrayList CustomObjectsCompiled; ++ /** ++ * Holds all world CustomObjects. All keys should be lowercase. ++ */ ++ public Map customObjects = new HashMap(); - @Override - public void onSendLocale(LWCSendLocaleEvent event) { - LWC lwc = event.getLWC(); -- Player player = event.getPlayer(); -+ LWCPlayer player = lwc.wrapPlayer(event.getPlayer()); - String locale = event.getLocale(); + public ArrayList NormalBiomes = new ArrayList(); + public ArrayList IceBiomes = new ArrayList(); +@@ -32,10 +37,6 @@ public class WorldConfig extends ConfigFile + public byte[] ReplaceMatrixBiomes = new byte[256]; + public boolean HaveBiomeReplace = false; -- List modes = lwc.getMemoryDatabase().getModes(player.getName()); +- // public BiomeBase currentBiome; +- // --Commented out by Inspection (17.07.11 1:49):String seedValue; - - // they don't intrigue us -- if (!modes.contains(""nospam"")) { -+ if (!player.hasMode(""nospam"")) { - return; - } - -diff --git a/modules/core/src/main/java/com/griefcraft/modules/modes/PersistModule.java b/modules/core/src/main/java/com/griefcraft/modules/modes/PersistModule.java -index 6d082be9a..cd91987ad 100644 ---- a/modules/core/src/main/java/com/griefcraft/modules/modes/PersistModule.java -+++ b/modules/core/src/main/java/com/griefcraft/modules/modes/PersistModule.java -@@ -18,12 +18,11 @@ - package com.griefcraft.modules.modes; - - import com.griefcraft.lwc.LWC; -+import com.griefcraft.model.LWCPlayer; -+import com.griefcraft.model.Mode; - import com.griefcraft.scripting.JavaModule; - import com.griefcraft.scripting.event.LWCCommandEvent; - import org.bukkit.command.CommandSender; --import org.bukkit.entity.Player; - --import java.util.List; - - public class PersistModule extends JavaModule { + // For old biome generator + public double oldBiomeSize; -@@ -41,25 +40,26 @@ public void onCommand(LWCCommandEvent event) { - CommandSender sender = event.getSender(); - String[] args = event.getArgs(); +@@ -161,6 +162,7 @@ public WorldConfig(File settingsDir, LocalWorld world, boolean checkOnly) + { + this.SettingsDir = settingsDir; + this.WorldName = world.getName(); ++ this.world = world; -- Player player = (Player) sender; -+ LWCPlayer player = lwc.wrapPlayer(sender); - String mode = args[0].toLowerCase(); + File settingsFile = new File(this.SettingsDir, TCDefaultValues.WorldSettingsName.stringValue()); - if (!mode.equals(""persist"")) { - return; +@@ -285,15 +287,9 @@ private void ReadWorldCustomObjects() + } } -- List modes = lwc.getMemoryDatabase().getModes(player.getName()); -+ if (!player.hasMode(mode)) { -+ Mode temp = new Mode(); -+ temp.setName(mode); -+ temp.setPlayer(player.getBukkitPlayer()); +- ArrayList rawObjects = ObjectsStore.LoadObjectsFromDirectory(CustomObjectsDirectory); +- +- +- CustomObjectsCompiled = new ArrayList(); +- +- for (CustomObject object : rawObjects) +- CustomObjectsCompiled.add(object.Compile("""")); +- System.out.println(""TerrainControl: "" + CustomObjectsCompiled.size() + "" world custom objects loaded""); ++ customObjects = TerrainControl.getCustomObjectManager().loadObjects(CustomObjectsDirectory); -- if (!modes.contains(mode)) { -- lwc.getMemoryDatabase().registerMode(player.getName(), mode); -+ player.enableMode(temp); - lwc.sendLocale(player, ""protection.modes.persist.finalize""); - } else { -- lwc.getMemoryDatabase().unregisterMode(player.getName(), mode); -+ player.disableMode(player.getMode(mode)); - lwc.sendLocale(player, ""protection.modes.persist.off""); - } ++ TerrainControl.log(customObjects.size() + "" world custom objects loaded""); - event.setCancelled(true); -- return; } - } -diff --git a/modules/core/src/main/java/com/griefcraft/modules/modify/ModifyModule.java b/modules/core/src/main/java/com/griefcraft/modules/modify/ModifyModule.java -index deed3ec5e..3a5e6e5fe 100644 ---- a/modules/core/src/main/java/com/griefcraft/modules/modify/ModifyModule.java -+++ b/modules/core/src/main/java/com/griefcraft/modules/modify/ModifyModule.java -@@ -20,6 +20,7 @@ - import com.griefcraft.lwc.LWC; - import com.griefcraft.model.AccessRight; - import com.griefcraft.model.Action; -+import com.griefcraft.model.LWCPlayer; - import com.griefcraft.model.Protection; - import com.griefcraft.scripting.JavaModule; - import com.griefcraft.scripting.event.LWCBlockInteractEvent; -@@ -46,11 +47,11 @@ public void onProtectionInteract(LWCProtectionInteractEvent event) { - - LWC lwc = event.getLWC(); - Protection protection = event.getProtection(); -- Player player = event.getPlayer(); -+ LWCPlayer player = lwc.wrapPlayer(event.getPlayer()); - event.setResult(Result.CANCEL); - -- if (lwc.canAdminProtection(player, protection)) { -- Action action = lwc.getMemoryDatabase().getAction(""modify"", player.getName()); -+ if (lwc.canAdminProtection(player.getBukkitPlayer(), protection)) { -+ Action action = player.getAction(""modify""); - - final String defaultEntities = action.getData(); - String[] entities = new String[0]; -@@ -172,10 +173,16 @@ public void onCommand(LWCCommandEvent event) { - } - - String full = join(args, 0); -- Player player = (Player) sender; -+ LWCPlayer player = lwc.wrapPlayer(sender); -+ -+ Action action = new Action(); -+ action.setName(""modify""); -+ action.setPlayer(player); -+ action.setData(full); -+ -+ player.removeAllActions(); -+ player.addAction(action); - -- lwc.getMemoryDatabase().unregisterAllActions(player.getName()); -- lwc.getMemoryDatabase().registerAction(""modify"", player.getName(), full); - lwc.sendLocale(sender, ""protection.modify.finalize""); - return; - } -diff --git a/modules/core/src/main/java/com/griefcraft/modules/owners/OwnersModule.java b/modules/core/src/main/java/com/griefcraft/modules/owners/OwnersModule.java -index 7657f6176..f280e7379 100644 ---- a/modules/core/src/main/java/com/griefcraft/modules/owners/OwnersModule.java -+++ b/modules/core/src/main/java/com/griefcraft/modules/owners/OwnersModule.java -@@ -20,6 +20,7 @@ - import com.griefcraft.lwc.LWC; - import com.griefcraft.model.AccessRight; - import com.griefcraft.model.Action; -+import com.griefcraft.model.LWCPlayer; - import com.griefcraft.model.Protection; - import com.griefcraft.scripting.JavaModule; - import com.griefcraft.scripting.event.LWCBlockInteractEvent; -@@ -46,10 +47,10 @@ public void onProtectionInteract(LWCProtectionInteractEvent event) { - - LWC lwc = event.getLWC(); - Protection protection = event.getProtection(); -- Player player = event.getPlayer(); -+ LWCPlayer player = lwc.wrapPlayer(event.getPlayer()); - event.setResult(Result.CANCEL); - -- Action action = lwc.getMemoryDatabase().getAction(""owners"", player.getName()); -+ Action action = player.getAction(""owners""); - int accessPage = Integer.parseInt(action.getData()); - - /* -@@ -142,7 +143,7 @@ public void onCommand(LWCCommandEvent event) { - return; - } - -- Player player = (Player) sender; -+ LWCPlayer player = lwc.wrapPlayer(sender); - int page = 1; - - if (args.length > 0) { -@@ -154,8 +155,14 @@ public void onCommand(LWCCommandEvent event) { - } - } - -- lwc.getMemoryDatabase().unregisterAllActions(player.getName()); -- lwc.getMemoryDatabase().registerAction(""owners"", player.getName(), page + """"); -+ Action action = new Action(); -+ action.setName(""owners""); -+ action.setPlayer(player); -+ action.setData(page + """"); -+ -+ player.removeAllActions(); -+ player.addAction(action); -+ - lwc.sendLocale(sender, ""protection.owners.finalize""); - return; - } -diff --git a/modules/core/src/main/java/com/griefcraft/modules/unlock/UnlockModule.java b/modules/core/src/main/java/com/griefcraft/modules/unlock/UnlockModule.java -index 5475424e5..139deca8f 100644 ---- a/modules/core/src/main/java/com/griefcraft/modules/unlock/UnlockModule.java -+++ b/modules/core/src/main/java/com/griefcraft/modules/unlock/UnlockModule.java -@@ -18,6 +18,8 @@ - package com.griefcraft.modules.unlock; - - import com.griefcraft.lwc.LWC; -+import com.griefcraft.model.Action; -+import com.griefcraft.model.LWCPlayer; - import com.griefcraft.model.Protection; - import com.griefcraft.model.ProtectionTypes; - import com.griefcraft.scripting.JavaModule; -@@ -61,38 +63,35 @@ public void onCommand(LWCCommandEvent event) { - return; - } - -- Player player = (Player) sender; -+ LWCPlayer player = lwc.wrapPlayer(sender); - String password = join(args, 0); - password = encrypt(password); - -- if (!lwc.getMemoryDatabase().hasPendingUnlock(player.getName())) { -+ // see if they have the protection interaction action -+ Action action = player.getAction(""interacted""); -+ -+ if (action == null) { - player.sendMessage(Colors.Red + ""Nothing selected. Open a locked protection first.""); +diff --git a/common/src/com/khorn/terraincontrol/customobjects/CustomObject.java b/common/src/com/khorn/terraincontrol/customobjects/CustomObject.java +index bdb8e3347..a7394627a 100644 +--- a/common/src/com/khorn/terraincontrol/customobjects/CustomObject.java ++++ b/common/src/com/khorn/terraincontrol/customobjects/CustomObject.java +@@ -1,101 +1,123 @@ +-package com.khorn.terraincontrol.customobjects; +- +- +-import com.khorn.terraincontrol.configuration.ConfigFile; +- +-import java.io.File; +-import java.io.IOException; +-import java.util.HashMap; +-import java.util.Map; +- +-public class CustomObject extends ConfigFile +-{ +- public boolean IsValid = false; +- public File FilePath; +- public String Name = """"; +- +- public CustomObject(File objectFile) +- { +- FilePath = objectFile; +- Name = objectFile.getName(); +- +- if(!Name.toLowerCase().endsWith(BODefaultValues.BO_Extension.stringValue().toLowerCase())) - return; - } else { -- int protectionId = lwc.getMemoryDatabase().getUnlockID(player.getName()); -+ Protection protection = action.getProtection(); - -- if (protectionId == -1) { -+ if (protection == null) { - lwc.sendLocale(player, ""protection.internalerror"", ""id"", ""unlock""); - return; - } - -- Protection entity = lwc.getPhysicalDatabase().loadProtection(protectionId); - -- if (entity.getType() != ProtectionTypes.PASSWORD) { -+ if (protection.getType() != ProtectionTypes.PASSWORD) { - lwc.sendLocale(player, ""protection.unlock.notpassword""); - return; - } - -- if (entity.getData().equals(password)) { -- lwc.getMemoryDatabase().unregisterUnlock(player.getName()); -- lwc.getMemoryDatabase().registerPlayer(player.getName(), protectionId); -+ if (protection.getData().equals(password)) { -+ player.addAccessibleProtection(protection); - lwc.sendLocale(player, ""protection.unlock.password.valid""); - } else { - lwc.sendLocale(player, ""protection.unlock.password.invalid""); - } - } +- //Remove extension. +- Name = Name.substring(0, Name.length() - 4); - -- return; - } - - } -diff --git a/modules/spout/src/main/java/com/griefcraft/lwc/PasswordRequestModule.java b/modules/spout/src/main/java/com/griefcraft/lwc/PasswordRequestModule.java -index 475d235f5..e6251cc9a 100644 ---- a/modules/spout/src/main/java/com/griefcraft/lwc/PasswordRequestModule.java -+++ b/modules/spout/src/main/java/com/griefcraft/lwc/PasswordRequestModule.java -@@ -18,6 +18,8 @@ - package com.griefcraft.lwc; - - import com.griefcraft.bukkit.LWCSpoutPlugin; -+import com.griefcraft.model.Action; -+import com.griefcraft.model.LWCPlayer; - import com.griefcraft.model.Protection; - import com.griefcraft.model.ProtectionTypes; - import com.griefcraft.scripting.JavaModule; -@@ -104,9 +106,12 @@ public void onButtonClicked(ButtonClickEvent event) { - Button button = event.getButton(); - SpoutPlayer player = event.getPlayer(); - LWC lwc = LWC.getInstance(); -+ LWCPlayer lwcPlayer = lwc.wrapPlayer(player); -+ -+ Action action = lwcPlayer.getAction(""interacted""); - - // if they don't have an unlock req, why is the screen open? -- if (!lwc.getMemoryDatabase().hasPendingUnlock(player.getName())) { -+ if (action == null) { - player.getMainScreen().closePopup(); - return; - } -@@ -117,24 +122,21 @@ public void onButtonClicked(ButtonClickEvent event) { - // check their password - String password = lwc.encrypt(textField.getText().trim()); - -- int protectionId = lwc.getMemoryDatabase().getUnlockID(player.getName()); -+ // load the protection they had clicked -+ Protection protection = action.getProtection(); - -- if (protectionId == -1) { -+ if (protection == null) { - lwc.sendLocale(player, ""protection.internalerror"", ""id"", ""unlock""); - return; - } - -- // load the protection they had clicked -- Protection protection = lwc.getPhysicalDatabase().loadProtection(protectionId); - - if (protection.getType() != ProtectionTypes.PASSWORD) { - lwc.sendLocale(player, ""protection.unlock.notpassword""); - return; - } - - if (protection.getData().equals(password)) { -- lwc.getMemoryDatabase().unregisterUnlock(player.getName()); -- lwc.getMemoryDatabase().registerPlayer(player.getName(), protectionId); -+ lwcPlayer.addAccessibleProtection(protection); - player.getMainScreen().closePopup(); - - // open the chest that they clicked :P -diff --git a/skel/doors.yml b/skel/doors.yml -index c602913bc..4ce685a32 100644 ---- a/skel/doors.yml -+++ b/skel/doors.yml -@@ -10,7 +10,7 @@ doors: - # toggle: the door will just open if it's closed, or close if it's opened. Will not auto close. - # openAndClose: the door will automatically close after . If it was already opened, it will NOT re-open. - # -- action: toggle -+ name: toggle - -- # The amount of seconds after opening a door for it to close. No effect if openAndClose action is not being used. -+ # The amount of seconds after opening a door for it to close. No effect if openAndClose name is not being used. - interval: 3 -\ No newline at end of file -diff --git a/src/lang/lwc_cz.properties b/src/lang/lwc_cz.properties -index b52246589..737f1fae4 100644 ---- a/src/lang/lwc_cz.properties -+++ b/src/lang/lwc_cz.properties -@@ -45,7 +45,7 @@ protection.general.locked.password=\ - %red%Musis napsat %gold%%cunlock% %red% k odemknuti ! - protection.general.locked.private=%green%%block% %white%->%red% objekt je uzamknut magickym klicem ! %white%:-) - --# Pending action -+# Pending name - protection.general.pending=%red%Jiz mas nevyrizeny prikaz s LWC! - - ################## -diff --git a/src/lang/lwc_da.properties b/src/lang/lwc_da.properties -index 8bee5fd15..6e43e768c 100644 ---- a/src/lang/lwc_da.properties -+++ b/src/lang/lwc_da.properties -@@ -44,7 +44,7 @@ protection.general.locked.password=\ - %red%Skriv %gold%%cunlock% %red% for at låse op. - protection.general.locked.private=%red%Denne %block% er låst med en trylleformular - --# Pending action -+# Pending name - protection.general.pending=%red%Du har allerede en ventende handling. - - ################## -diff --git a/src/lang/lwc_de.properties b/src/lang/lwc_de.properties -index 0faeb46e9..6eee83e52 100644 ---- a/src/lang/lwc_de.properties -+++ b/src/lang/lwc_de.properties -@@ -45,7 +45,7 @@ protection.general.locked.password=\ - %red%Schreibe %gold%%cunlock% %red%zum entriegeln. - protection.general.locked.private=%red%Diese %block% ist abgeschlossen. - --# Pending action -+# Pending name - protection.general.pending=%red%Du hast derzeit noch eine offene Anfrage. - - ################## -diff --git a/src/lang/lwc_en.properties b/src/lang/lwc_en.properties -index a9e491641..e641a3ba2 100644 ---- a/src/lang/lwc_en.properties -+++ b/src/lang/lwc_en.properties -@@ -44,8 +44,8 @@ protection.general.locked.password=\ - %red%Type %gold%%cunlock% %red% to unlock it. - protection.general.locked.private=%red%This %block% is locked with a magical spell - --# Pending action --protection.general.pending=%red%You already have a pending action. -+# Pending name -+protection.general.pending=%red%You already have a pending name. - - ################## - ## Commands ## -diff --git a/src/lang/lwc_es.properties b/src/lang/lwc_es.properties -index 00bd24fbc..f37e00bf5 100644 ---- a/src/lang/lwc_es.properties -+++ b/src/lang/lwc_es.properties -@@ -35,7 +35,7 @@ protection.general.locked.password=\ - %red%Escribe %gold%%cunlock% %red% para desbloquearlo. - protection.general.locked.private=%red%Este %block% esta protegido con un hechizo magico. - --# Pending action -+# Pending name - protection.general.pending=%red%Ya tienes una accion pendiente. - - ################## -diff --git a/src/lang/lwc_fr.properties b/src/lang/lwc_fr.properties -index be4addd2b..3a78963f8 100644 ---- a/src/lang/lwc_fr.properties -+++ b/src/lang/lwc_fr.properties -@@ -31,8 +31,8 @@ protection.general.locked.password=\ - %red%Veuillez taper %gold%%cunlock% %red% pour le/la déverrouiller. - protection.general.locked.private=%red%Ce/Cette %block% est bloqué par un sort magique. - --# Pending action --protection.general.pending=%red%Vous avez déjà une action en cours. -+# Pending name -+protection.general.pending=%red%Vous avez déjà une name en cours. - - ################## - ## Commands ## -diff --git a/src/lang/lwc_nl.properties b/src/lang/lwc_nl.properties -index 813313d55..d7e779b01 100644 ---- a/src/lang/lwc_nl.properties -+++ b/src/lang/lwc_nl.properties -@@ -35,7 +35,7 @@ protection.general.locked.password=\ - %red%Typ %gold%%cunlock% %red% om er toegang tot te krijgen. - protection.general.locked.private=%red%deze %block% is beveiligd met een magische spreuk. - --# Pending action -+# Pending name - protection.general.pending=%red%Er is al een actie bezig. - - ################## -diff --git a/src/lang/lwc_pl.properties b/src/lang/lwc_pl.properties -index e8657902c..d44c07529 100644 ---- a/src/lang/lwc_pl.properties -+++ b/src/lang/lwc_pl.properties -@@ -44,7 +44,7 @@ protection.general.locked.password=\ - %gold%%cunlock% %red%, aby odblokowac. - protection.general.locked.private=%red%Ten %block% jest zablokowany magicznym zakleciem. - --# Pending action -+# Pending name - protection.general.pending=%red%Juz wykonujesz akcje. - - ################## -diff --git a/src/lang/lwc_ru.properties b/src/lang/lwc_ru.properties -index f9243536c..b312d7dc1 100644 ---- a/src/lang/lwc_ru.properties -+++ b/src/lang/lwc_ru.properties -@@ -43,7 +43,7 @@ protection.general.locked.password=\ - %red%Введите %gold%%cunlock% <Пароль>%red% чтобы открыть его. - protection.general.locked.private=%red%%block% защищён магией. - --# Pending action -+# Pending name - protection.general.pending=%red%Вы уже выполняете это действие - - ################## -diff --git a/src/lang/lwc_sv.properties b/src/lang/lwc_sv.properties -index 7c025c889..d19e1f4d4 100644 ---- a/src/lang/lwc_sv.properties -+++ b/src/lang/lwc_sv.properties -@@ -44,7 +44,7 @@ protection.general.locked.password=\ - %red%Skriv %gold%%cunlock% %red% för att låsa upp den. - protection.general.locked.private=%red%Detta %block% är låst med en trollformel - --# Pending action -+# Pending name - protection.general.pending=%red%Du har redan et pågående utspel. - - ################## -diff --git a/src/main/java/com/griefcraft/listeners/LWCPlayerListener.java b/src/main/java/com/griefcraft/listeners/LWCPlayerListener.java -index de6a125b9..2fd2c88a7 100644 ---- a/src/main/java/com/griefcraft/listeners/LWCPlayerListener.java -+++ b/src/main/java/com/griefcraft/listeners/LWCPlayerListener.java -@@ -19,6 +19,7 @@ - - import com.griefcraft.lwc.LWC; - import com.griefcraft.lwc.LWCPlugin; -+import com.griefcraft.model.LWCPlayer; - import com.griefcraft.model.Protection; - import com.griefcraft.scripting.Module; - import com.griefcraft.scripting.Module.Result; -@@ -43,6 +44,7 @@ - import org.bukkit.event.player.PlayerQuitEvent; - import org.bukkit.inventory.ItemStack; - -+import java.util.ArrayList; - import java.util.List; - - public class LWCPlayerListener extends PlayerListener { -@@ -103,6 +105,7 @@ public void onPlayerInteract(PlayerInteractEvent event) { - - LWC lwc = plugin.getLWC(); - Player player = event.getPlayer(); -+ LWCPlayer lwcPlayer = lwc.wrapPlayer(player); - Block clickedBlock = event.getClickedBlock(); - Location location = clickedBlock.getLocation(); - -@@ -122,12 +125,22 @@ public void onPlayerInteract(PlayerInteractEvent event) { - } - - try { -- List actions = lwc.getMemoryDatabase().getActions(player.getName()); -+ List actions = new ArrayList(lwcPlayer.getActionNames()); - Protection protection = lwc.findProtection(block); - Module.Result result = Module.Result.CANCEL; - boolean canAccess = lwc.canAccessProtection(player, protection); - boolean canAdmin = lwc.canAdminProtection(player, protection); - -+ // register in an action what protection they interacted with (if applicable.) -+ if (protection != null) { -+ com.griefcraft.model.Action action = new com.griefcraft.model.Action(); -+ action.setName(""interacted""); -+ action.setPlayer(lwcPlayer); -+ action.setProtection(protection); -+ -+ lwcPlayer.addAction(action); -+ } -+ - if (event.getAction() == Action.LEFT_CLICK_BLOCK) { - boolean ignoreLeftClick = Boolean.parseBoolean(lwc.resolveProtectionConfiguration(material, ""ignoreLeftClick"")); - -@@ -188,14 +201,10 @@ public void onPlayerQuit(PlayerQuitEvent event) { - return; - } - -- LWC lwc = plugin.getLWC(); -- String player = event.getPlayer().getName(); +- ReadSettingsFile(objectFile); +- CorrectSettings(); +- if (SettingsCache.containsKey(""[META]"") && SettingsCache.containsKey(""[DATA]"")) +- this.IsValid = true; - -- lwc.getMemoryDatabase().unregisterPlayer(player); -- lwc.getMemoryDatabase().unregisterUnlock(player); -- lwc.getMemoryDatabase().unregisterPendingLock(player); -- lwc.getMemoryDatabase().unregisterAllActions(player); -- lwc.getMemoryDatabase().unregisterAllModes(player); -+ LWCPlayer player = LWC.getInstance().wrapPlayer(event.getPlayer()); -+ player.removeAllAccessibleProtections(); -+ player.removeAllActions(); -+ player.disableAllModes(); - } - - } -diff --git a/src/main/java/com/griefcraft/lwc/LWC.java b/src/main/java/com/griefcraft/lwc/LWC.java -index 1081b9642..595656229 100644 ---- a/src/main/java/com/griefcraft/lwc/LWC.java -+++ b/src/main/java/com/griefcraft/lwc/LWC.java -@@ -66,7 +66,6 @@ - import com.griefcraft.scripting.ModuleLoader.Event; - import com.griefcraft.scripting.event.LWCSendLocaleEvent; - import com.griefcraft.sql.Database; --import com.griefcraft.sql.MemDB; - import com.griefcraft.sql.PhysDB; - import com.griefcraft.util.Colors; - import com.griefcraft.util.Performance; -@@ -142,11 +141,6 @@ public class LWC { - */ - private CacheSet caches; - -- /** -- * Memory database instance -- */ -- private MemDB memoryDatabase; +- if (!this.IsValid) +- return; - - /** - * Physical database instance - */ -@@ -205,11 +199,19 @@ public LWC(LWCPlugin plugin) { - /** - * Create an LWCPlayer object for a player - * -- * @param player -+ * @param sender - * @return - */ -- public LWCPlayer wrapPlayer(Player player) { -- return new LWCPlayer(this, player); -+ public LWCPlayer wrapPlayer(CommandSender sender) { -+ if (sender instanceof LWCPlayer) { -+ return (LWCPlayer) sender; -+ } -+ -+ if (!(sender instanceof Player)) { -+ return null; -+ } -+ -+ return LWCPlayer.getPlayer((Player) sender); - } - - /** -@@ -254,11 +256,17 @@ public CacheSet getCaches() { - /** - * Remove all modes if the player is not in persistent mode - * -- * @param player -+ * @param sender - */ -- public void removeModes(Player player) { -- if (notInPersistentMode(player.getName())) { -- memoryDatabase.unregisterAllActions(player.getName()); -+ public void removeModes(CommandSender sender) { -+ if (sender instanceof Player) { -+ Player bPlayer = (Player) sender; -+ -+ if (notInPersistentMode(bPlayer.getName())) { -+ wrapPlayer(bPlayer).getActions().clear(); -+ } -+ } else if (sender instanceof LWCPlayer) { -+ removeModes(((LWCPlayer) sender).getBukkitPlayer()); - } - } - -@@ -369,7 +377,7 @@ public boolean canAccessProtection(Player player, Protection protection) { - return true; - - case ProtectionTypes.PASSWORD: -- return memoryDatabase.hasAccess(player.getName(), protection); -+ return wrapPlayer(player).getAccessibleProtections().contains(protection); - - case ProtectionTypes.PRIVATE: - if (playerName.equalsIgnoreCase(protection.getOwner())) { -@@ -437,7 +445,7 @@ public boolean canAdminProtection(Player player, Protection protection) { - return player.getName().equalsIgnoreCase(protection.getOwner()); - - case ProtectionTypes.PASSWORD: -- return player.getName().equalsIgnoreCase(protection.getOwner()) && memoryDatabase.hasAccess(player.getName(), protection); -+ return player.getName().equalsIgnoreCase(protection.getOwner()) && wrapPlayer(player).getAccessibleProtections().contains(protection); - - case ProtectionTypes.PRIVATE: - if (playerName.equalsIgnoreCase(protection.getOwner())) { -@@ -483,12 +491,7 @@ public void destruct() { - physicalDatabase.dispose(); - } - -- if (memoryDatabase != null) { -- memoryDatabase.dispose(); +- ReadConfigSettings(); +- } +- +- public CustomObjectCompiled Compile(String settingsLine) +- { +- HashMap newSettings = new HashMap(); +- for (Map.Entry entry : this.SettingsCache.entrySet()) +- if (BODefaultValues.Contains(entry.getKey()) || ObjectCoordinate.isCoordinateString(entry.getKey())) +- newSettings.put(entry.getKey(), entry.getValue()); +- +- String[] keys = settingsLine.split("";""); +- String changedSettings = """"; +- boolean first = true; +- +- for (String key : keys) +- { +- String[] values = null; +- if (key.contains(""="")) +- values = key.split(""="", 2); +- else if (key.contains("":"")) +- values = key.split(""="", 2); +- if (values == null) +- continue; +- if (BODefaultValues.Contains(values[0].toLowerCase()) || ObjectCoordinate.isCoordinateString(values[0])) +- { +- newSettings.put(values[0].toLowerCase(), values[1]); +- changedSettings = changedSettings + (first ? """" : "";"") + key; +- if (first) +- first = false; +- } - } - - physicalDatabase = null; -- memoryDatabase = null; - } - - /** -@@ -576,8 +579,7 @@ public boolean enforceAccess(Player player, Block block) { - switch (protection.getType()) { - case ProtectionTypes.PASSWORD: - if (!hasAccess) { -- getMemoryDatabase().unregisterUnlock(player.getName()); -- getMemoryDatabase().registerUnlock(player.getName(), protection.getId()); -+ wrapPlayer(player).addAccessibleProtection(protection); - - sendLocale(player, ""protection.general.locked.password"", ""block"", materialToString(block)); - } -@@ -778,13 +780,6 @@ public String getLocale(String key, Object... args) { - return value; - } - -- /** -- * @return memory database object -- */ -- public MemDB getMemoryDatabase() { -- return memoryDatabase; +- return new CustomObjectCompiled(newSettings, Name, changedSettings, this); +- - } - - /** - * @return the Permissions handler - */ -@@ -1060,7 +1055,6 @@ public void load() { - Performance.init(); - - physicalDatabase = new PhysDB(); -- memoryDatabase = new MemDB(); - updateThread = new UpdateThread(this); - - // Permissions init -@@ -1114,10 +1108,7 @@ public void load() { - log(""Loading "" + Database.DefaultType); - try { - physicalDatabase.connect(); -- memoryDatabase.connect(); +- @Override +- public boolean sayNotFoundEnabled() +- { +- return false; +- } +- +- @Override +- protected void ReadConfigSettings() +- { - - physicalDatabase.load(); -- memoryDatabase.load(); - - log(""Using: "" + StringUtils.capitalizeFirstLetter(physicalDatabase.getConnection().getMetaData().getDriverVersion())); - } catch (Exception e) { -@@ -1257,7 +1248,7 @@ public ItemStack[] mergeInventories(List blocks) { - * @return true if the player is NOT in persistent mode - */ - public boolean notInPersistentMode(String player) { -- return !memoryDatabase.hasMode(player, ""persist""); -+ return !wrapPlayer(Bukkit.getServer().getPlayer(player)).hasMode(""persist""); - } - - /** -diff --git a/src/main/java/com/griefcraft/model/Action.java b/src/main/java/com/griefcraft/model/Action.java -index 5ea7d69f0..e5b68e884 100644 ---- a/src/main/java/com/griefcraft/model/Action.java -+++ b/src/main/java/com/griefcraft/model/Action.java -@@ -19,24 +19,23 @@ - - public class Action { - -- public int id; -- private String action; -- private int protectionId; -+ private String name; -+ private Protection protection; - private String data; -- private String player; -+ private LWCPlayer player; - - /** -- * @return the action -+ * @return the name - */ -- public String getAction() { -- return action; -+ public String getName() { -+ return name; - } - - /** -- * @return the protectionId -+ * @return the Protection associated with this action - */ -- public int getProtectionId() { -- return protectionId; -+ public Protection getProtection() { -+ return protection; - } - - /** -@@ -46,32 +45,25 @@ public String getData() { - return data; - } - -- /** -- * @return the id -- */ -- public int getId() { -- return id; - } - - /** - * @return the player - */ -- public String getPlayer() { -+ public LWCPlayer getPlayer() { - return player; - } - - /** -- * @param action the action to set -+ * @param name the name to set - */ -- public void setAction(String action) { -- this.action = action; -+ public void setName(String name) { -+ this.name = name; - } - - /** -- * @param protectionId the protectionId to set -+ * @param protection the Protection to set - */ -- public void setProtectionId(int protectionId) { -- this.protectionId = protectionId; -+ public void setProtection(Protection protection) { -+ this.protection = protection; - } - - /** -@@ -81,17 +73,10 @@ public void setData(String data) { - this.data = data; - } - -- /** -- * @param id the id to set -- */ -- public void setId(int id) { -- this.id = id; +- @Override +- protected void WriteConfigSettings() throws IOException +- { +- - } - - /** - * @param player the player to set - */ -- public void setPlayer(String player) { -+ public void setPlayer(LWCPlayer player) { - this.player = player; - } - -diff --git a/src/main/java/com/griefcraft/model/LWCPlayer.java b/src/main/java/com/griefcraft/model/LWCPlayer.java -index e0ad1e771..bc93c9aaa 100644 ---- a/src/main/java/com/griefcraft/model/LWCPlayer.java -+++ b/src/main/java/com/griefcraft/model/LWCPlayer.java -@@ -18,21 +18,269 @@ - package com.griefcraft.model; - - import com.griefcraft.lwc.LWC; -+import org.bukkit.Server; -+import org.bukkit.command.CommandSender; - import org.bukkit.entity.Player; -+import org.bukkit.permissions.Permission; -+import org.bukkit.permissions.PermissionAttachment; -+import org.bukkit.permissions.PermissionAttachmentInfo; -+import org.bukkit.plugin.Plugin; - - import java.util.ArrayList; -+import java.util.Collections; -+import java.util.HashMap; -+import java.util.HashSet; - import java.util.List; -+import java.util.Map; -+import java.util.Set; - --public class LWCPlayer { -+public class LWCPlayer implements CommandSender { - -+ /** -+ * The LWC instance -+ */ - private LWC lwc; -+ -+ /** -+ * The player instance -+ */ - private Player player; - -+ /** -+ * Cache of LWCPlayer objects -+ */ -+ private final static Map players = new HashMap(); -+ -+ /** -+ * The modes bound to all players -+ */ -+ private final static Map> modes = Collections.synchronizedMap(new HashMap>()); -+ -+ /** -+ * The actions bound to all players -+ */ -+ private final static Map> actions = Collections.synchronizedMap(new HashMap>()); -+ -+ /** -+ * Map of protections a player can temporarily access -+ */ -+ private final static Map> accessibleProtections = Collections.synchronizedMap(new HashMap>()); -+ - public LWCPlayer(LWC lwc, Player player) { - this.lwc = lwc; - this.player = player; - } - -+ /** -+ * Get the LWCPlayer object from a Player object -+ * -+ * @param player -+ * @return -+ */ -+ public static LWCPlayer getPlayer(Player player) { -+ if (!players.containsKey(player)) { -+ players.put(player, new LWCPlayer(LWC.getInstance(), player)); -+ } -+ -+ return players.get(player); -+ } -+ -+ /** -+ * @return the Bukkit Player object -+ */ -+ public Player getBukkitPlayer() { -+ return player; -+ } -+ -+ /** -+ * @return the player's name -+ */ -+ public String getName() { -+ return player.getName(); -+ } -+ -+ /** -+ * Enable a mode on the player -+ * -+ * @param mode -+ * @return -+ */ -+ public boolean enableMode(Mode mode) { -+ return getModes().add(mode); -+ } -+ -+ /** -+ * Disable a mode on the player -+ * -+ * @param mode -+ * @return -+ */ -+ public boolean disableMode(Mode mode) { -+ return getModes().remove(mode); -+ } +- @Override +- protected void CorrectSettings() +- { +- +- } +- +- @Override +- protected void RenameOldSettings() +- { +- } +-} ++package com.khorn.terraincontrol.customobjects; + -+ /** -+ * Disable all modes enabled by the player -+ * -+ * @return -+ */ -+ public void disableAllModes() { -+ getModes().clear(); -+ } ++import java.util.Map; ++import java.util.Random; + -+ /** -+ * Check if the player has an action -+ * -+ * @param name -+ * @return -+ */ -+ public boolean hasAction(String name) { -+ return getAction(name) != null; -+ } ++import com.khorn.terraincontrol.LocalBiome; ++import com.khorn.terraincontrol.LocalWorld; + ++public interface CustomObject ++{ + /** -+ * Get the action represented by the name -+ * -+ * @param name ++ * Returns the name of this object. ++ * + * @return + */ -+ public Action getAction(String name) { -+ for (Action action : getActions()) { -+ if (action.getName().equals(name)) { -+ return action; -+ } -+ } -+ -+ return null; -+ } ++ public String getName(); + + /** -+ * Add an action -+ * -+ * @param action -+ * @return ++ * Returns whether this object can spawn as a tree. UseWorld and UseBiome ++ * should return true. ++ * ++ * @return Whether this object can spawn as a tree. + */ -+ public boolean addAction(Action action) { -+ return getActions().add(action); -+ } ++ public boolean canSpawnAsTree(); + + /** -+ * Remove an action ++ * Returns whether this object can spawn from the CustomObject() resource. ++ * Vanilla trees should return false; everything else should return true. + * -+ * @param action + * @return + */ -+ public boolean removeAction(Action action) { -+ return getActions().remove(action); -+ } -+ -+ /** -+ * Remove all actions -+ */ -+ public void removeAllActions() { -+ getActions().clear(); -+ } ++ public boolean canSpawnAsObject(); + + /** -+ * Retrieve a Mode object for a player -+ * -+ * @param name -+ * @return ++ * Spawns the object at the given position. ++ * ++ * @param world ++ * @param x ++ * @param y ++ * @param z ++ * @return Whether the attempt was successful. + */ -+ public Mode getMode(String name) { -+ for (Mode mode : getModes()) { -+ if (mode.getName().equals(name)) { -+ return mode; -+ } -+ } -+ -+ return null; -+ } ++ public boolean spawn(LocalWorld world, Random random, int x, int y, int z); + + /** -+ * Check if the player has the given mode -+ * -+ * @param name -+ * @return ++ * Spawns the object at the given position. If the object isn't a tree, it ++ * shouldn't spawn and it should return false. ++ * ++ * @param world ++ * @param x ++ * @param y ++ * @param z ++ * @return Whether the attempt was successful. + */ -+ public boolean hasMode(String name) { -+ return getMode(name) != null; -+ } ++ public boolean spawnAsTree(LocalWorld world, Random random, int x, int y, int z); + + /** -+ * @return the Set of modes the player has activated ++ * Spawns the object at the given position. It should search a suitable y ++ * location by itself. ++ * ++ * @param world ++ * @param x ++ * @param y ++ * @param z ++ * @return Whether the attempt was successful. + */ -+ public Set getModes() { -+ if (!modes.containsKey(this)) { -+ modes.put(this, new HashSet()); -+ } -+ -+ return modes.get(this); -+ } ++ public boolean spawn(LocalWorld world, Random random, int x, int z); + + /** -+ * @return the Set of actions the player has ++ * Spawns the object at the given position. It should search a suitable y ++ * location by itself. If the object isn't a tree, it shouldn't spawn and it ++ * should return false. ++ * ++ * @param world ++ * @param x ++ * @param y ++ * @param z ++ * @return Whether the attempt was successful. + */ -+ public Set getActions() { -+ if (!actions.containsKey(this)) { -+ actions.put(this, new HashSet()); -+ } -+ -+ return actions.get(this); -+ } ++ public boolean spawnAsTree(LocalWorld world, Random random, int x, int z); + + /** -+ * @return a Set containing all of the action names ++ * Spawns the object in a chunk. The object can search a good y position by ++ * itself. ++ * ++ * @param world ++ * @param random ++ * @param chunkX ++ * @param chunkZ + */ -+ public Set getActionNames() { -+ Set actions = getActions(); -+ Set names = new HashSet(actions.size()); -+ -+ for (Action action : actions) { -+ names.add(action.getName()); -+ } -+ -+ return names; -+ } ++ public void process(LocalWorld world, Random random, int chunkX, int chunkZ); + + /** -+ * @return the set of protections the player can temporarily access ++ * Spawns the object in a chunk. The object can search a good y position by ++ * itself. If the object isn't a tree, the object shouldn't spawn and it ++ * should return false. ++ * ++ * @param world ++ * @param random ++ * @param x ++ * @param z + */ -+ public Set getAccessibleProtections() { -+ if (!accessibleProtections.containsKey(this)) { -+ accessibleProtections.put(this, new HashSet()); -+ } -+ -+ return accessibleProtections.get(this); -+ } ++ public void processAsTree(LocalWorld world, Random random, int chunkX, int chunkZ); + + /** -+ * Add an accessible protection for the player -+ * -+ * @param protection -+ * @return ++ * Returns a copy of this object will all the settings applied. Can return ++ * null if the settings are invalid. ++ * ++ * @param settings ++ * A Map with all the settings. ++ * @return A copy of this object will all the settings applied. + */ -+ public boolean addAccessibleProtection(Protection protection) { -+ return getAccessibleProtections().add(protection); -+ } ++ public CustomObject applySettings(Map settings); + + /** -+ * Remove an accessible protection from the player -+ * -+ * @param protection ++ * Returns whether this object would like to spawn in this biome. BO2s will ++ * return whether this biome is in their spawnInBiome setting. ++ * ++ * @param biome + * @return + */ -+ public boolean removeAccessibleProtection(Protection protection) { -+ return getAccessibleProtections().remove(protection); -+ } -+ -+ /** -+ * Remove all accessible protections -+ */ -+ public void removeAllAccessibleProtections() { -+ getAccessibleProtections().clear(); -+ } -+ - /** - * Create a History object that is attached to this protection - * -@@ -84,4 +332,63 @@ public List getRelatedHistory(History.Type type) { - return related; - } - -+ public void sendMessage(String s) { -+ player.sendMessage(s); -+ } -+ -+ public Server getServer() { -+ return player.getServer(); -+ } -+ -+ public boolean isPermissionSet(String s) { -+ return player.isPermissionSet(s); -+ } -+ -+ public boolean isPermissionSet(Permission permission) { -+ return player.isPermissionSet(permission); -+ } -+ -+ public boolean hasPermission(String s) { -+ return player.hasPermission(s); -+ } -+ -+ public boolean hasPermission(Permission permission) { -+ return player.hasPermission(permission); -+ } -+ -+ public PermissionAttachment addAttachment(Plugin plugin, String s, boolean b) { -+ return player.addAttachment(plugin, s, b); -+ } -+ -+ public PermissionAttachment addAttachment(Plugin plugin) { -+ return player.addAttachment(plugin); -+ } -+ -+ public PermissionAttachment addAttachment(Plugin plugin, String s, boolean b, int i) { -+ return player.addAttachment(plugin, s, b, i); -+ } -+ -+ public PermissionAttachment addAttachment(Plugin plugin, int i) { -+ return player.addAttachment(plugin, i); -+ } -+ -+ public void removeAttachment(PermissionAttachment permissionAttachment) { -+ player.removeAttachment(permissionAttachment); -+ } -+ -+ public void recalculatePermissions() { -+ player.recalculatePermissions(); -+ } -+ -+ public Set getEffectivePermissions() { -+ return player.getEffectivePermissions(); -+ } -+ -+ public boolean isOp() { -+ return player.isOp(); -+ } -+ -+ public void setOp(boolean b) { -+ player.setOp(b); -+ } - } -diff --git a/src/main/java/com/griefcraft/model/Mode.java b/src/main/java/com/griefcraft/model/Mode.java -new file mode 100644 -index 000000000..75a5450cd ---- /dev/null -+++ b/src/main/java/com/griefcraft/model/Mode.java -@@ -0,0 +1,63 @@ -+/** -+ * This file is part of LWC (https://github.com/Hidendra/LWC) -+ * -+ * This program is free software: you can redistribute it and/or modify -+ * it under the terms of the GNU General Public License as published by -+ * the Free Software Foundation, either version 3 of the License, or -+ * (at your option) any later version. -+ * -+ * This program is distributed in the hope that it will be useful, -+ * but WITHOUT ANY WARRANTY; without even the implied warranty of -+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+ * GNU General Public License for more details. -+ * -+ * You should have received a copy of the GNU General Public License -+ * along with this program. If not, see . -+ */ -+ -+package com.griefcraft.model; -+ -+import org.bukkit.entity.Player; -+ -+public class Mode { -+ -+ /** -+ * The name of this mode -+ */ -+ private String name; -+ -+ /** -+ * The player this mode belongs to -+ */ -+ private Player player; -+ -+ /** -+ * Mode data -+ */ -+ private String data; -+ -+ public String getName() { -+ return name; -+ } -+ -+ public Player getPlayer() { -+ return player; -+ } -+ -+ public String getData() { -+ return data; -+ } -+ -+ public void setName(String name) { -+ this.name = name; -+ } -+ -+ public void setPlayer(Player player) { -+ this.player = player; -+ } -+ -+ public void setData(String data) { -+ this.data = data; -+ } -+ ++ public boolean hasPreferenceToSpawnIn(LocalBiome biome); +} -diff --git a/src/main/java/com/griefcraft/sql/MemDB.java b/src/main/java/com/griefcraft/sql/MemDB.java -deleted file mode 100755 -index b92531459..000000000 ---- a/src/main/java/com/griefcraft/sql/MemDB.java +diff --git a/common/src/com/khorn/terraincontrol/customobjects/CustomObjectCompiled.java b/common/src/com/khorn/terraincontrol/customobjects/CustomObjectCompiled.java +deleted file mode 100644 +index 032cf215c..000000000 +--- a/common/src/com/khorn/terraincontrol/customobjects/CustomObjectCompiled.java +++ /dev/null -@@ -1,831 +0,0 @@ --/** -- * This file is part of LWC (https://github.com/Hidendra/LWC) -- * -- * This program is free software: you can redistribute it and/or modify -- * it under the terms of the GNU General Public License as published by -- * the Free Software Foundation, either version 3 of the License, or -- * (at your option) any later version. -- * -- * This program is distributed in the hope that it will be useful, -- * but WITHOUT ANY WARRANTY; without even the implied warranty of -- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -- * GNU General Public License for more details. -- * -- * You should have received a copy of the GNU General Public License -- * along with this program. If not, see . -- */ -- --package com.griefcraft.sql; +@@ -1,229 +0,0 @@ +-package com.khorn.terraincontrol.customobjects; - --import com.griefcraft.model.Action; --import com.griefcraft.model.Protection; --import com.griefcraft.util.Performance; +-import com.khorn.terraincontrol.DefaultMaterial; +-import com.khorn.terraincontrol.configuration.ConfigFile; - --import java.sql.PreparedStatement; --import java.sql.ResultSet; --import java.sql.Statement; +-import java.io.IOException; -import java.util.ArrayList; --import java.util.List; +-import java.util.HashMap; +-import java.util.HashSet; - --public class MemDB extends Database { +-public class CustomObjectCompiled extends ConfigFile +-{ +- public ObjectCoordinate[][] Data = new ObjectCoordinate[4][]; - -- public MemDB() { -- super(); -- } +- public CustomObjectCompiled[] GroupObjects = null; - -- public MemDB(Type currentType) { -- super(currentType); -- } +- public String Name; - -- @Override -- protected void postPrepare() { -- Performance.addMemDBQuery(); -- } +- public HashSet SpawnInBiome; - -- public Action getAction(String action, String player) { -- try { -- PreparedStatement statement = prepare(""SELECT * FROM "" + prefix + ""actions WHERE player = ? AND action = ?""); -- statement.setString(1, player); -- statement.setString(2, action); -- -- ResultSet set = statement.executeQuery(); -- -- if (set.next()) { -- final int id = set.getInt(""id""); -- final String actionString = set.getString(""action""); -- final String playerString = set.getString(""player""); -- final int chestID = set.getInt(""chest""); -- final String data = set.getString(""data""); -- -- final Action act = new Action(); -- act.setId(id); -- act.setAction(actionString); -- act.setPlayer(playerString); -- act.setProtectionId(chestID); -- act.setData(data); -- -- return act; -- } +- public String Version; +- public HashSet SpawnOnBlockType; - +- public HashSet CollisionBlockType; - -- } catch (final Exception e) { -- printException(e); -- } +- public boolean SpawnWater; +- public boolean SpawnLava; +- public boolean SpawnAboveGround; +- public boolean SpawnUnderGround; - -- return null; -- } +- public boolean SpawnSunlight; +- public boolean SpawnDarkness; - -- /** -- * Get the chest ID associated with a player's unlock request -- * -- * @param player the player to lookup -- * @return the chest ID -- */ -- public int getActionID(String action, String player) { -- try { -- int chestID = -1; +- public boolean UnderFill; +- public boolean RandomRotation; +- public boolean Dig; +- public boolean Tree; +- public boolean Branch; +- public boolean DiggingBranch; +- public boolean NeedsFoundation; +- public int Rarity; +- public double CollisionPercentage; +- public int SpawnElevationMin; +- public int SpawnElevationMax; - -- PreparedStatement statement = prepare(""SELECT chest FROM "" + prefix + ""actions WHERE action = ? AND player = ?""); -- statement.setString(1, action); -- statement.setString(2, player); +- public int GroupFrequencyMin; +- public int GroupFrequencyMax; +- public int GroupSeparationMin; +- public int GroupSeparationMax; +- public String GroupId; - -- final ResultSet set = statement.executeQuery(); +- public int BranchLimit; - -- while (set.next()) { -- chestID = set.getInt(""chest""); -- } +- public String ChangedSettings; - +- public CustomObject parent; - -- return chestID; -- } catch (final Exception e) { -- printException(e); -- } +- public CustomObjectCompiled(HashMap settings, String name, String changedSettings, CustomObject parent) +- { +- SettingsCache = settings; +- this.Name = name; +- this.ChangedSettings = changedSettings; +- this.parent = parent; +- +- ReadConfigSettings(); +- CorrectSettings(); - -- return -1; +- SettingsCache.clear(); +- SettingsCache = null; - } - -- /** -- * Get all the active actions for a player -- * -- * @param player the player to get actions for -- * @return the List of actions -- */ -- public List getActions(String player) { -- final List actions = new ArrayList(); - -- try { -- PreparedStatement statement = prepare(""SELECT action FROM "" + prefix + ""actions WHERE player = ?""); -- statement.setString(1, player); +- public boolean CheckBiome(String biomeName) +- { +- return (SpawnInBiome.contains(BODefaultValues.BO_ALL_KEY.stringValue()) || SpawnInBiome.contains(BODefaultValues.BO_ALL_KEY.stringValue().toLowerCase()) || SpawnInBiome.contains(biomeName)); +- } - -- final ResultSet set = statement.executeQuery(); - -- while (set.next()) { -- final String action = set.getString(""action""); +- @Override +- protected void ReadConfigSettings() +- { +- this.Version = ReadModSettings(BODefaultValues.version.name(), BODefaultValues.version.stringValue()); - -- actions.add(action); -- } - -- } catch (final Exception e) { -- printException(e); -- } +- this.SpawnOnBlockType = this.ReadBlockList(ReadModSettings(BODefaultValues.spawnOnBlockType.name(), BODefaultValues.spawnOnBlockType.StringArrayListValue()),BODefaultValues.spawnOnBlockType.name()); +- this.CollisionBlockType = this.ReadBlockList(ReadModSettings(BODefaultValues.collisionBlockType.name(), BODefaultValues.collisionBlockType.StringArrayListValue()),BODefaultValues.collisionBlockType.name()); - -- return actions; -- } +- this.SpawnInBiome = new HashSet(ReadModSettings(BODefaultValues.spawnInBiome.name(), BODefaultValues.spawnInBiome.StringArrayListValue())); - -- /** -- * @return the path where the database file should be saved -- */ -- @Override -- public String getDatabasePath() { -- // if we're using mysql, just open another connection -- if (currentType == Type.MySQL) { -- return super.getDatabasePath(); -- } - -- return "":memory:""; -- } +- this.SpawnSunlight = ReadModSettings(BODefaultValues.spawnSunlight.name(), BODefaultValues.spawnSunlight.booleanValue()); +- this.SpawnDarkness = ReadModSettings(BODefaultValues.spawnDarkness.name(), BODefaultValues.spawnDarkness.booleanValue()); +- this.SpawnWater = ReadModSettings(BODefaultValues.spawnWater.name(), BODefaultValues.spawnWater.booleanValue()); +- this.SpawnLava = ReadModSettings(BODefaultValues.spawnLava.name(), BODefaultValues.spawnLava.booleanValue()); +- this.SpawnAboveGround = ReadModSettings(BODefaultValues.spawnAboveGround.name(), BODefaultValues.spawnAboveGround.booleanValue()); +- this.SpawnUnderGround = ReadModSettings(BODefaultValues.spawnUnderGround.name(), BODefaultValues.spawnUnderGround.booleanValue()); - -- /** -- * Get the password submitted for a pending chest lock -- * -- * @param player the player to lookup -- * @return the password for the pending lock -- */ -- public String getLockPassword(String player) { -- try { -- String password = """"; +- this.UnderFill = ReadModSettings(BODefaultValues.underFill.name(), BODefaultValues.underFill.booleanValue()); - -- PreparedStatement statement = prepare(""SELECT password FROM "" + prefix + ""locks WHERE player = ?""); -- statement.setString(1, player); +- this.RandomRotation = ReadModSettings(BODefaultValues.randomRotation.name(), BODefaultValues.randomRotation.booleanValue()); +- this.Dig = ReadModSettings(BODefaultValues.dig.name(), BODefaultValues.dig.booleanValue()); +- this.Tree = ReadModSettings(BODefaultValues.tree.name(), BODefaultValues.tree.booleanValue()); +- this.Branch = ReadModSettings(BODefaultValues.branch.name(), BODefaultValues.branch.booleanValue()); +- this.DiggingBranch = ReadModSettings(BODefaultValues.diggingBranch.name(), BODefaultValues.diggingBranch.booleanValue()); +- this.NeedsFoundation = ReadModSettings(BODefaultValues.needsFoundation.name(), BODefaultValues.needsFoundation.booleanValue()); +- this.Rarity = ReadModSettings(BODefaultValues.rarity.name(), BODefaultValues.rarity.intValue()); +- this.CollisionPercentage = ReadModSettings(BODefaultValues.collisionPercentage.name(), BODefaultValues.collisionPercentage.intValue()); +- this.SpawnElevationMin = ReadModSettings(BODefaultValues.spawnElevationMin.name(), BODefaultValues.spawnElevationMin.intValue()); +- this.SpawnElevationMax = ReadModSettings(BODefaultValues.spawnElevationMax.name(), BODefaultValues.spawnElevationMax.intValue()); - -- final ResultSet set = statement.executeQuery(); +- this.GroupFrequencyMin = ReadModSettings(BODefaultValues.groupFrequencyMin.name(), BODefaultValues.groupFrequencyMin.intValue()); +- this.GroupFrequencyMax = ReadModSettings(BODefaultValues.groupFrequencyMax.name(), BODefaultValues.groupFrequencyMax.intValue()); +- this.GroupSeparationMin = ReadModSettings(BODefaultValues.groupSeperationMin.name(), BODefaultValues.groupSeperationMin.intValue()); +- this.GroupSeparationMax = ReadModSettings(BODefaultValues.groupSeperationMax.name(), BODefaultValues.groupSeperationMax.intValue()); +- this.GroupId = ReadModSettings(BODefaultValues.groupId.name(), BODefaultValues.groupId.stringValue()); - -- while (set.next()) { -- password = set.getString(""password""); -- } - +- this.BranchLimit = ReadModSettings(BODefaultValues.branchLimit.name(), BODefaultValues.branchLimit.intValue()); - -- return password; -- } catch (final Exception e) { -- printException(e); -- } +- this.ReadCoordinates(); +- } - -- return null; +- @Override +- protected boolean sayNotFoundEnabled() +- { +- return false; - } +- +- @Override +- protected void CorrectSettings() +- { - -- /** -- * Get the mode data for a player's mode -- * -- * @param player -- * @param mode -- * @return -- */ -- public String getModeData(String player, String mode) { -- String ret = null; -- try { -- PreparedStatement statement = prepare(""SELECT data FROM "" + prefix + ""modes WHERE player = ? AND mode = ?""); -- statement.setString(1, player); -- statement.setString(2, mode); - -- final ResultSet set = statement.executeQuery(); -- if (set.next()) { -- ret = set.getString(""data""); -- } +- } - +- @Override +- protected void WriteConfigSettings() throws IOException +- { - -- } catch (final Exception e) { -- printException(e); -- } -- return ret; - } - -- /** -- * Get the modes a player has activated -- * -- * @param player the player to get -- * @return the List of modes the player is using -- */ -- public List getModes(String player) { -- final List modes = new ArrayList(); +- @Override +- protected void RenameOldSettings() +- { - -- try { -- PreparedStatement statement = prepare(""SELECT * FROM "" + prefix + ""modes WHERE player = ?""); -- statement.setString(1, player); +- } - -- final ResultSet set = statement.executeQuery(); - -- while (set.next()) { -- final String mode = set.getString(""mode""); +- private void ReadCoordinates() +- { +- ArrayList coordinates = new ArrayList(); - -- modes.add(mode); -- } +- for (String key : SettingsCache.keySet()) +- { +- ObjectCoordinate buffer = ObjectCoordinate.getCoordinateFromString(key, SettingsCache.get(key)); +- if (buffer != null) +- coordinates.add(buffer); +- } - +- Data[0] = new ObjectCoordinate[coordinates.size()]; +- Data[1] = new ObjectCoordinate[coordinates.size()]; +- Data[2] = new ObjectCoordinate[coordinates.size()]; +- Data[3] = new ObjectCoordinate[coordinates.size()]; - -- } catch (final Exception e) { -- printException(e); +- for (int i = 0; i < coordinates.size(); i++) +- { +- ObjectCoordinate coordinate = coordinates.get(i); +- +- Data[0][i] = coordinate; +- coordinate = coordinate.Rotate(); +- Data[1][i] = coordinate; +- coordinate = coordinate.Rotate(); +- Data[2][i] = coordinate; +- coordinate = coordinate.Rotate(); +- Data[3][i] = coordinate; - } - -- return modes; -- } - -- /** -- * Get all of the users ""logged in"" to a chest -- * -- * @param chestID the chest ID to look at -- * @return -- */ -- public List getSessionUsers(int chestID) { -- final List sessionUsers = new ArrayList(); +- } - -- try { -- PreparedStatement statement = prepare(""SELECT player FROM "" + prefix + ""sessions WHERE chest = ?""); -- statement.setInt(1, chestID); +- private HashSet ReadBlockList(ArrayList blocks, String settingName) +- { +- HashSet output = new HashSet(); - -- final ResultSet set = statement.executeQuery(); +- boolean nonIntegerValues = false; +- boolean all = false; +- boolean solid = false; - -- while (set.next()) { -- final String player = set.getString(""player""); +- for (String block : blocks) +- { - -- sessionUsers.add(player); +- if (block.equals(BODefaultValues.BO_ALL_KEY.stringValue())) +- { +- all = true; +- continue; +- } +- if (block.equals(BODefaultValues.BO_SolidKey.stringValue())) +- { +- solid = true; +- continue; +- } +- try +- { +- int blockID = Integer.decode(block); +- if (blockID != 0) +- output.add(blockID); +- } catch (NumberFormatException e) +- { +- nonIntegerValues = true; - } -- -- } catch (final Exception e) { -- printException(e); - } - -- return sessionUsers; -- } +- if (all || solid) +- for (DefaultMaterial material : DefaultMaterial.values()) +- { +- if(material.id == 0) +- continue; +- if (solid && !material.isSolid()) +- continue; +- output.add(material.id); - -- /** -- * Get the chest ID associated with a player's unlock request -- * -- * @param player the player to lookup -- * @return the chest ID -- */ -- public int getUnlockID(String player) { -- return getActionID(""unlock"", player); -- } +- } +- if (nonIntegerValues) +- System.out.println(""TerrainControl: Custom object "" + this.Name + "" have wrong value "" + settingName); - -- /** -- * Check if a player has an active chest session -- * -- * @param player the player to check -- * @param chestID the chest ID to check -- * @return true if the player has access -- */ -- public boolean hasAccess(String player, int chestID) { -- try { -- PreparedStatement statement = prepare(""SELECT player FROM "" + prefix + ""sessions WHERE chest = ?""); -- statement.setInt(1, chestID); +- return output; - -- final ResultSet set = statement.executeQuery(); +- } +-} +diff --git a/common/src/com/khorn/terraincontrol/customobjects/CustomObjectGen.java b/common/src/com/khorn/terraincontrol/customobjects/CustomObjectGen.java +deleted file mode 100644 +index 06acaad21..000000000 +--- a/common/src/com/khorn/terraincontrol/customobjects/CustomObjectGen.java ++++ /dev/null +@@ -1,344 +0,0 @@ +-package com.khorn.terraincontrol.customobjects; - -- while (set.next()) { -- final String player2 = set.getString(""player""); +-import com.khorn.terraincontrol.DefaultMaterial; +-import com.khorn.terraincontrol.LocalWorld; +-import com.khorn.terraincontrol.configuration.BiomeConfig; +-import com.khorn.terraincontrol.configuration.Resource; +-import com.khorn.terraincontrol.generator.resourcegens.ResourceGenBase; - -- if (player.equals(player2)) { +-import java.util.ArrayList; +-import java.util.HashMap; +-import java.util.Random; - +-public class CustomObjectGen extends ResourceGenBase +-{ - -- return true; -- } -- } - +- @Override +- public void Process(LocalWorld world, Random rand, Resource res, int _x, int _z) +- { - -- } catch (final Exception e) { -- printException(e); -- } +- if (res.CUObjects.length == 0) +- return; - -- return false; -- } +- _x = _x + 8; +- _z = _z + 8; - -- /** -- * Check if a player has an active chest session -- * -- * @param player the player to check -- * @param chest the chest to check -- * @return true if the player has access -- */ -- public boolean hasAccess(String player, Protection chest) { -- return chest == null || hasAccess(player, chest.getId()); +- boolean objectSpawned = false; +- int spawnAttempts = 0; +- while (!objectSpawned) +- { +- if (spawnAttempts > world.getSettings().objectSpawnRatio) +- return; - -- } +- spawnAttempts++; - -- /** -- * Return if a player has the mode -- * -- * @param player the player to check -- * @param mode the mode to check -- */ -- public boolean hasMode(String player, String mode) { -- List modes = getModes(player); +- CustomObjectCompiled SelectedObject = res.CUObjects[rand.nextInt(res.CUObjects.length)]; +- if (SelectedObject.Branch) +- continue; - -- return modes.size() > 0 && modes.contains(mode); -- } +- int randomRoll = rand.nextInt(100); +- int ObjectRarity = SelectedObject.Rarity; - -- /** -- * Check if a player has a pending action -- * -- * @param player the player to check -- * @param action the action to check -- * @return true if they have a record -- */ -- public boolean hasPendingAction(String action, String player) { -- return getAction(action, player) != null; -- } +- while (randomRoll < ObjectRarity) +- { +- ObjectRarity -= 100; +- +- int x = _x + rand.nextInt(16); +- int z = _z + rand.nextInt(16); +- int y; +- +- if (SelectedObject.SpawnAboveGround) +- y = world.getSolidHeight(x, z); +- else if (SelectedObject.SpawnUnderGround) +- { +- int solidHeight = world.getSolidHeight(x, z); +- if (solidHeight < 1 || solidHeight <= SelectedObject.SpawnElevationMin) +- continue; +- if (solidHeight > SelectedObject.SpawnElevationMax) +- solidHeight = SelectedObject.SpawnElevationMax; +- y = rand.nextInt(solidHeight - SelectedObject.SpawnElevationMin) + SelectedObject.SpawnElevationMin; +- } else +- y = world.getHighestBlockYAt(x, z); - -- /** -- * Check if a player has a pending chest request -- * -- * @param player The player to check -- * @return true if the player has a pending chest request -- */ -- public boolean hasPendingChest(String player) { -- try { -- PreparedStatement statement = prepare(""SELECT id FROM "" + prefix + ""locks WHERE player = ?""); -- statement.setString(1, player); +- if (y < 0) +- continue; +- +- if (!ObjectCanSpawn(world, x, y, z, SelectedObject)) +- continue; - -- ResultSet set = statement.executeQuery(); - -- if (set.next()) { -- set.close(); -- return true; -- } +- objectSpawned = GenerateCustomObject(world, rand, x, y, z, SelectedObject); - -- set.close(); -- } catch (final Exception e) { -- printException(e); +- if (objectSpawned) +- GenerateCustomObjectFromGroup(world, rand, x, y, z, SelectedObject); +- } - } - -- return false; -- } - -- /** -- * Check if a player has a pending unlock request -- * -- * @param player the player to check -- * @return true if the player has a pending unlock request -- */ -- public boolean hasPendingUnlock(String player) { -- return getUnlockID(player) != -1; - } - -- /** -- * create the in-memory table which hold sessions, users that have activated a chest. Not needed past a restart, so no need for extra disk i/o -- */ -- @Override -- public void load() { -- if (loaded) { +- public static void GenerateCustomObjectFromGroup(LocalWorld world, Random rand, int x, int y, int z, CustomObjectCompiled workObject) +- { +- if (workObject.GroupObjects == null) - return; -- } - -- try { -- // reusable column -- Column column; +- int attempts = 3; +- if ((workObject.GroupFrequencyMax - workObject.GroupFrequencyMin) > 0) +- attempts = workObject.GroupFrequencyMin + rand.nextInt(workObject.GroupFrequencyMax - workObject.GroupFrequencyMin); +- +- while (attempts > 0) +- { +- attempts--; - -- Table sessions = new Table(this, ""sessions""); -- sessions.setMemory(true); +- int objIndex = rand.nextInt(workObject.GroupObjects.length); +- CustomObjectCompiled ObjectFromGroup = workObject.GroupObjects[objIndex]; - -- { -- column = new Column(""id""); -- column.setType(""INTEGER""); -- column.setPrimary(true); -- sessions.add(column); -- -- column = new Column(""player""); -- column.setType(""VARCHAR(255)""); -- sessions.add(column); -- -- column = new Column(""chest""); -- column.setType(""INTEGER""); -- sessions.add(column); -- } +- if (ObjectFromGroup.Branch) +- continue; - -- Table locks = new Table(this, ""locks""); -- locks.setMemory(true); +- x = x + rand.nextInt(workObject.GroupSeparationMax - workObject.GroupSeparationMin) + workObject.GroupSeparationMin; +- z = z + rand.nextInt(workObject.GroupSeparationMax - workObject.GroupSeparationMin) + workObject.GroupSeparationMin; +- int _y; - +- if (workObject.SpawnAboveGround) +- _y = world.getSolidHeight(x, z); +- else if (workObject.SpawnUnderGround) - { -- column = new Column(""id""); -- column.setType(""INTEGER""); -- column.setPrimary(true); -- locks.add(column); -- -- column = new Column(""player""); -- column.setType(""VARCHAR(255)""); -- locks.add(column); -- -- column = new Column(""password""); -- column.setType(""VARCHAR(100)""); -- locks.add(column); -- } +- int solidHeight = world.getSolidHeight(x, z); +- if (solidHeight < 1 || solidHeight <= workObject.SpawnElevationMin) +- continue; +- if (solidHeight > workObject.SpawnElevationMax) +- solidHeight = workObject.SpawnElevationMax; +- _y = rand.nextInt(solidHeight - workObject.SpawnElevationMin) + workObject.SpawnElevationMin; +- } else +- _y = world.getHighestBlockYAt(x, z); +- +- if (y < 0) +- continue; +- +- if ((y - _y) > 10 || (_y - y) > 10) +- continue; +- +- if (!ObjectCanSpawn(world, x, y, z, ObjectFromGroup)) +- continue; +- GenerateCustomObject(world, rand, x, _y, z, ObjectFromGroup); +- } +- +- +- } +- +- +- public static boolean GenerateCustomObject(LocalWorld world, Random rand, int x, int y, int z, CustomObjectCompiled workObject) +- { +- +- ObjectCoordinate[] data = workObject.Data[0]; +- if (workObject.RandomRotation) +- data = workObject.Data[rand.nextInt(4)]; - -- Table actions = new Table(this, ""actions""); -- actions.setMemory(true); - +- int faultCounter = 0; +- +- for (ObjectCoordinate point : data) +- { +- if (!world.isLoaded((x + point.x), (y + point.y), (z + point.z))) +- return false; +- +- if (!workObject.Dig) - { -- column = new Column(""id""); -- column.setType(""INTEGER""); -- column.setPrimary(true); -- actions.add(column); -- -- column = new Column(""action""); -- column.setType(""VARCHAR(255)""); -- actions.add(column); -- -- column = new Column(""player""); -- column.setType(""VARCHAR(255)""); -- actions.add(column); -- -- column = new Column(""chest""); -- column.setType(""INTEGER""); -- actions.add(column); -- -- column = new Column(""data""); -- column.setType(""VARCHAR(255)""); -- actions.add(column); +- if (workObject.CollisionBlockType.contains(world.getTypeId((x + point.x), (y + point.y), (z + point.z)))) +- { +- faultCounter++; +- if (faultCounter > (data.length * (workObject.CollisionPercentage / 100))) +- { +- return false; +- } +- } - } - -- Table modes = new Table(this, ""modes""); -- modes.setMemory(true); - +- } +- +- for (ObjectCoordinate point : data) +- { +- +- if (world.getTypeId(x + point.x, y + point.y, z + point.z) == 0) +- { +- world.setBlock((x + point.x), y + point.y, z + point.z, point.BlockId, point.BlockData, true, false, true); +- } else if (workObject.Dig) - { -- column = new Column(""id""); -- column.setType(""INTEGER""); -- column.setPrimary(true); -- modes.add(column); -- -- column = new Column(""player""); -- column.setType(""VARCHAR(255)""); -- modes.add(column); -- -- column = new Column(""mode""); -- column.setType(""VARCHAR(255)""); -- modes.add(column); -- -- column = new Column(""data""); -- column.setType(""VARCHAR(255)""); -- modes.add(column); +- world.setBlock((x + point.x), y + point.y, z + point.z, point.BlockId, point.BlockData, true, false, true); - } - -- // now create all of the tables -- sessions.execute(); -- locks.execute(); -- actions.execute(); -- modes.execute(); -- } catch (final Exception e) { -- printException(e); - } +- return true; - -- loaded = true; - } - -- /** -- * @return the number of pending chest locks -- */ -- public int pendingCount() { -- int count = 0; +- public static boolean ObjectCanSpawn(LocalWorld world, int x, int y, int z, CustomObjectCompiled obj) +- { +- if ((world.getTypeId(x, y - 5, z) == 0) && (obj.NeedsFoundation)) +- return false; - -- try { -- Statement statement = connection.createStatement(); -- final ResultSet set = statement.executeQuery(""SELECT id FROM "" + prefix + ""locks""); +- boolean output = true; +- int checkBlock = world.getTypeId(x, y + 2, z); +- if (!obj.SpawnWater) +- output = !((checkBlock == DefaultMaterial.WATER.id) || (checkBlock == DefaultMaterial.STATIONARY_WATER.id)); +- if (!obj.SpawnLava) +- output = !((checkBlock == DefaultMaterial.LAVA.id) || (checkBlock == DefaultMaterial.STATIONARY_LAVA.id)); - -- while (set.next()) { -- count++; -- } +- checkBlock = world.getLightLevel(x, y + 2, z); +- if (!obj.SpawnSunlight) +- output = !(checkBlock > 8); +- if (!obj.SpawnDarkness) +- output = !(checkBlock < 9); - -- statement.close(); +- if ((y < obj.SpawnElevationMin) || (y > obj.SpawnElevationMax)) +- output = false; - -- } catch (final Exception e) { -- printException(e); -- } +- if (!obj.SpawnOnBlockType.contains(world.getTypeId(x, y - 1, z))) +- output = false; - -- return count; +- return output; - } - -- /** -- * Register a pending chest unlock, for when the player does /unlock -- * -- * @param action -- * @param player -- */ -- public void registerAction(String action, String player) { -- try { -- /* -- * We only want 1 action per player, no matter what! -- */ -- unregisterAction(action, player); -- -- PreparedStatement statement = prepare(""INSERT INTO "" + prefix + ""actions (action, player, chest) VALUES (?, ?, ?)""); -- statement.setString(1, action); -- statement.setString(2, player); -- statement.setInt(3, -1); -- -- statement.executeUpdate(); +- @Override +- protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, int z) +- { - - -- } catch (final Exception e) { -- printException(e); -- } - } - -- /** -- * Register a pending chest unlock, for when the player does /unlock -- * -- * @param player the player to register -- * @param chestID the chestID to unlock -- */ -- public void registerAction(String action, String player, int chestID) { -- try { -- /* -- * We only want 1 action per player, no matter what! -- */ -- unregisterAction(action, player); +- @Override +- protected String WriteString(Resource res, String blockSources) +- { +- String output = """"; +- boolean first = true; - -- PreparedStatement statement = prepare(""INSERT INTO "" + prefix + ""actions (action, player, chest) VALUES (?, ?, ?)""); -- statement.setString(1, action); -- statement.setString(2, player); -- statement.setInt(3, chestID); +- for (String name : res.CUObjectsNames) +- { +- output = output + (first ? """" : "",""); +- if (first) +- first = false; - -- statement.executeUpdate(); +- if (name.equals(BODefaultValues.BO_Use_World.stringValue()) || name.equals(BODefaultValues.BO_Use_Biome.stringValue())) +- { +- output += name; +- continue; +- } - +- for (CustomObjectCompiled object : res.CUObjects) +- if (object.Name.equals(name)) +- output += name + (object.ChangedSettings.equals("""") ? """" : (""("" + object.ChangedSettings + "")"")); - -- } catch (final Exception e) { -- printException(e); - } +- +- return output; +- - } - -- /** -- * Register an action, used for various actions (stating the obvious here) -- * -- * @param player the player to register -- * @param data data -- */ -- public void registerAction(String action, String player, String data) { -- try { -- /* -- * We only want 1 action per player, no matter what! -- */ -- unregisterAction(action, player); +- @Override +- protected boolean ReadString(Resource res, String[] Props, BiomeConfig biomeConfig) throws NumberFormatException +- { - -- PreparedStatement statement = prepare(""INSERT INTO "" + prefix + ""actions (action, player, data) VALUES (?, ?, ?)""); -- statement.setString(1, action); -- statement.setString(2, player); -- statement.setString(3, data); +- ArrayList objects = new ArrayList(); +- ArrayList objectsName = new ArrayList(); +- HashMap> Groups = new HashMap>(); - -- statement.executeUpdate(); +- if (Props.length == 1 && Props[0].equals("""")) +- { +- AddCompiledObjectsFromWorld(biomeConfig, objects, Groups); +- objectsName.add(BODefaultValues.BO_Use_World.stringValue()); - +- } else +- for (String key : Props) +- { +- if (key.equals(BODefaultValues.BO_Use_World.stringValue())) +- { +- AddCompiledObjectsFromWorld(biomeConfig, objects, Groups); +- objectsName.add(BODefaultValues.BO_Use_World.stringValue()); +- continue; +- } - -- } catch (final Exception e) { -- printException(e); -- } -- } +- if (key.equals(BODefaultValues.BO_Use_Biome.stringValue())) +- { +- AddCompiledObjectsFromBiome(biomeConfig, objects, Groups); +- objectsName.add(BODefaultValues.BO_Use_Biome.stringValue()); +- continue; +- } - -- /** -- * Register a mode to a player (temporary) -- * -- * @param player the player to register the mode to -- * @param mode the mode to register -- */ -- public void registerMode(String player, String mode) { -- try { -- PreparedStatement statement = prepare(""INSERT INTO "" + prefix + ""modes (player, mode) VALUES (?, ?)""); -- statement.setString(1, player); -- statement.setString(2, mode); +- CustomObjectCompiled obj = ObjectsStore.CompileString(key, biomeConfig.worldConfig.CustomObjectsDirectory); +- if (obj == null) +- obj = ObjectsStore.CompileString(key, ObjectsStore.GlobalDirectory); +- if (obj != null) +- { +- objects.add(obj); +- objectsName.add(obj.Name); - -- statement.executeUpdate(); +- if (!obj.GroupId.equals("""")) +- { +- if (!Groups.containsKey(obj.GroupId)) +- Groups.put(obj.GroupId, new ArrayList()); - +- Groups.get(obj.GroupId).add(obj); - -- } catch (final Exception e) { -- printException(e); -- } -- } +- } - -- /** -- * Register a mode with data to a player (temporary) -- * -- * @param player the player to register the mode to -- * @param mode the mode to register -- * @param data additional data -- */ -- public void registerMode(String player, String mode, String data) { -- try { -- PreparedStatement statement = prepare(""INSERT INTO "" + prefix + ""modes (player, mode, data) VALUES (?, ?, ?)""); -- statement.setString(1, player); -- statement.setString(2, mode); -- statement.setString(3, data); - -- statement.executeUpdate(); +- } - +- } - -- } catch (final Exception e) { -- printException(e); +- for (CustomObjectCompiled objectCompiled : objects) +- { +- if (Groups.containsKey(objectCompiled.GroupId)) +- { +- objectCompiled.GroupObjects = Groups.get(objectCompiled.GroupId).toArray(new CustomObjectCompiled[0]); +- } - } +- +- res.CUObjects = objects.toArray(res.CUObjects); +- res.CUObjectsNames = objectsName.toArray(res.CUObjectsNames); +- +- return true; - } - -- /** -- * Register a pending lock request to a player -- * -- * @param player the player to assign the chest to -- * @param password the password to register with -- */ -- public void registerPendingLock(String player, String password) { -- try { -- PreparedStatement statement = prepare(""INSERT INTO "" + prefix + ""locks (player, password) VALUES (?, ?)""); -- statement.setString(1, player); -- statement.setString(2, password); - -- statement.executeUpdate(); +- private void AddCompiledObjectsFromWorld(BiomeConfig biomeConfig, ArrayList output, HashMap> groups) +- { +- for (CustomObjectCompiled objectCompiled : biomeConfig.worldConfig.CustomObjectsCompiled) +- if (objectCompiled.CheckBiome(biomeConfig.Name)) +- { +- output.add(objectCompiled); +- if (!objectCompiled.GroupId.equals("""")) +- { +- if (!groups.containsKey(objectCompiled.GroupId)) +- groups.put(objectCompiled.GroupId, new ArrayList()); - +- groups.get(objectCompiled.GroupId).add(objectCompiled); +- +- } +- +- } - -- } catch (final Exception e) { -- printException(e); -- } - } - -- /** -- * Add a player to be allowed to access a chest -- * -- * @param player the player to add -- * @param chestID the chest ID to allow them to access -- */ -- public void registerPlayer(String player, int chestID) { -- try { -- PreparedStatement statement = prepare(""INSERT INTO "" + prefix + ""sessions (player, chest) VALUES(?, ?)""); -- statement.setString(1, player); -- statement.setInt(2, chestID); +- private void AddCompiledObjectsFromBiome(BiomeConfig biomeConfig, ArrayList output, HashMap> groups) +- { +- for (CustomObjectCompiled objectCompiled : biomeConfig.CustomObjectsCompiled) +- { +- output.add(objectCompiled); +- if (!objectCompiled.GroupId.equals("""")) +- { +- if (!groups.containsKey(objectCompiled.GroupId)) +- groups.put(objectCompiled.GroupId, new ArrayList()); - -- statement.executeUpdate(); +- groups.get(objectCompiled.GroupId).add(objectCompiled); - +- } - -- } catch (final Exception e) { -- printException(e); - } -- } - -- /** -- * Register a pending chest unlock, for when the player does /unlock -- * -- * @param player the player to register -- * @param chestID the chestID to unlock -- */ -- public void registerUnlock(String player, int chestID) { -- registerAction(""unlock"", player, chestID); - } - -- /** -- * @return the number of active session -- */ -- public int sessionCount() { -- int count = 0; +-} +diff --git a/common/src/com/khorn/terraincontrol/customobjects/CustomObjectLoader.java b/common/src/com/khorn/terraincontrol/customobjects/CustomObjectLoader.java +new file mode 100644 +index 000000000..b4ab6a652 +--- /dev/null ++++ b/common/src/com/khorn/terraincontrol/customobjects/CustomObjectLoader.java +@@ -0,0 +1,8 @@ ++package com.khorn.terraincontrol.customobjects; ++ ++import java.io.File; ++ ++public interface CustomObjectLoader ++{ ++ public CustomObject loadFromFile(String objectName, File file); ++} +diff --git a/common/src/com/khorn/terraincontrol/customobjects/CustomObjectManager.java b/common/src/com/khorn/terraincontrol/customobjects/CustomObjectManager.java +new file mode 100644 +index 000000000..580a41005 +--- /dev/null ++++ b/common/src/com/khorn/terraincontrol/customobjects/CustomObjectManager.java +@@ -0,0 +1,221 @@ ++package com.khorn.terraincontrol.customobjects; ++ ++import java.io.File; ++import java.util.HashMap; ++import java.util.Map; ++import com.khorn.terraincontrol.LocalWorld; ++import com.khorn.terraincontrol.TerrainControl; ++import com.khorn.terraincontrol.configuration.WorldConfig; ++import com.khorn.terraincontrol.customobjects.bo2.BO2Loader; ++import com.khorn.terraincontrol.generator.resourcegens.TreeType; ++ ++public class CustomObjectManager ++{ ++ /* ++ * Khoorn's comment, copied from the removed class ObjectsStore: ++ ++ Load: ++ 1)Load here all objects. ++ 2)Start save coordinates thread. ++ 2)Pre compile objects (make arrays for different angle) ?? ++ 3)Compile custom objects array for each biome. Based on world Bo2 list + Biome Bo2 list + Biome CustomTree list ++ a) store in one array in biome ++ b) store in different arrays in biome ??? ++ 4) Load ObjectCoordinates from file and add that instance to save thread. ++ ++ New load ++ 1) Load all objects from world directory ++ 2) Search and load objects from plugin directory ++ ++ ++ ++ Spawn: ++ 1)CustomObject resource, Tree resource, sapling, command ++ 2)Select random object if needed. ++ 3)Check for biome and select CustomBiome array if needed. ++ 4)Check for spawn conditions. ++ 5)Check for collision ++ a) Check for block collisions ++ b) If out of loaded chunks and object.dig == false - drop. ++ c) If out of loaded chunks and object.branch && !object.digBranch == true - drop ++ d) ?? ++ 6)Set blocks ++ a) If out of loaded chunks - get ObjectBuffer from CoordinatesStore and save to it. ++ b) If found branch start point - select random branch from group and call 5 for it. ++ ++ ++ Calculate branch size for in chunk check?? ++ Call branch in this chunk or in next ?? ++ ++ */ ++ ++ ++ ++ public final Map loaders; ++ public final Map globalObjects; ++ ++ public CustomObjectManager(Map loaders, Map globalObjects) ++ { ++ // These are the actual lists, not just a copy. ++ this.loaders = loaders; ++ this.globalObjects = globalObjects; ++ ++ // Register loaders ++ TerrainControl.registerCustomObjectLoader(""bo2"", new BO2Loader()); ++ ++ // Load all global objects (they can overwrite special objects) ++ TerrainControl.getEngine().getGlobalObjectsDirectory().mkdirs(); ++ this.globalObjects.putAll(loadObjects(TerrainControl.getEngine().getGlobalObjectsDirectory())); ++ TerrainControl.log(this.globalObjects.size() + "" global custom objects loaded.""); ++ ++ // Put some default CustomObjects ++ for(TreeType type: TreeType.values()) ++ { ++ globalObjects.put(type.name().toLowerCase(), new TreeObject(type)); ++ } ++ globalObjects.put(""useworld"", new UseWorld()); ++ globalObjects.put(""usebiome"", new UseBiome()); ++ } ++ ++ /** ++ * Returns the global CustomObject with the given name. ++ * @param name Name of the CustomObject, case-insensitive. ++ * @return The CustomObject, or null if there isn't one with that name. ++ */ ++ public CustomObject getCustomObject(String name) ++ { ++ return globalObjects.get(name.toLowerCase()); ++ } ++ ++ /** ++ * Returns the CustomObject with the given name. It searches for a world object first, and then it searches for a global object. ++ * @param name Name of the CustomObject, case-insensitive. ++ * @param world The world to search in first before searching the global objects. ++ * @return The CustomObject, or null if there isn't one with that name. ++ */ ++ public CustomObject getCustomObject(String name, LocalWorld world) ++ { ++ return getCustomObject(name, world.getSettings()); ++ } ++ ++ /** ++ * Returns the CustomObject with the given name. It searches for a world object first, and then it searches for a global object. ++ * @param name Name of the CustomObject, case-insensitive. ++ * @param config The config to search in first before searching the global objects. ++ * @return The CustomObject, or null if there isn't one with that name. ++ */ ++ public CustomObject getCustomObject(String name, WorldConfig config) ++ { ++ if(config.customObjects.containsKey(name.toLowerCase())) { ++ return config.customObjects.get(name.toLowerCase()); ++ } ++ return getCustomObject(name); ++ } ++ ++ /** ++ * Returns a Map with all CustomObjects in a directory in it. The Map will ++ * have the lowercase object name as a key. ++ * ++ * @param directory ++ * The directory to load from. ++ * @return ++ */ ++ public Map loadObjects(File directory) ++ { ++ if (!directory.isDirectory()) ++ { ++ throw new IllegalArgumentException(""Given file is not a directory: "" + directory.getAbsolutePath()); ++ } ++ ++ Map objects = new HashMap(); ++ for (File file : directory.listFiles()) ++ { ++ // Get name and extension ++ String[] fileName = file.getName().split(""\\.""); ++ String objectName; ++ String objectType; ++ if (fileName.length == 1) ++ { ++ // Found an object without an extension ++ objectName = fileName[0]; ++ objectType = """"; ++ } else ++ { ++ // Found an object with an extension ++ objectType = fileName[fileName.length - 1]; ++ objectName = """"; ++ for (int i = 0; i < fileName.length - 2; i++) ++ { ++ objectName += fileName[i]; ++ } ++ } ++ ++ // Get the object ++ CustomObjectLoader loader = loaders.get(objectType); ++ if (loader != null) ++ { ++ objects.put(objectName.toLowerCase(), loader.loadFromFile(objectName, file)); ++ } ++ } ++ ++ return objects; ++ } ++ ++ /** ++ * Parses a String in the format name(setting1=foo,setting2=bar) and returns a CustomObject. ++ * @param string ++ * @param world The world to search in ++ * @return A CustomObject, or null if no one was found. ++ */ ++ public CustomObject getObjectFromString(String string, LocalWorld world) ++ { ++ return this.getObjectFromString(string, world.getSettings()); ++ } ++ ++ /** ++ * Parses a String in the format name(setting1=foo,setting2=bar) and returns a CustomObject. ++ * @param string ++ * @param config The config to search in ++ * @return A CustomObject, or null if no one was found. ++ */ ++ public CustomObject getObjectFromString(String string, WorldConfig config) ++ { ++ String[] parts = new String[]{string, """"}; ++ ++ int start = string.indexOf(""(""); ++ int end = string.lastIndexOf("")""); ++ if (start != -1 && end != -1) ++ { ++ parts[0] = string.substring(0, start); ++ parts[1] = string.substring(start + 1, end); ++ } ++ ++ CustomObject object = getCustomObject(parts[0], config); ++ ++ if(object != null && parts[1].length() != 0) { ++ // More settings have been given ++ Map settingsMap = new HashMap(); ++ ++ String[] settings = parts[1].split("";""); ++ for(String setting: settings) ++ { ++ String[] settingParts = setting.split(""=""); ++ if(settingParts.length == 1) ++ { ++ // Boolean values ++ settingsMap.put(settingParts[0], ""true""); ++ } else if(settingParts.length == 2) ++ { ++ settingsMap.put(settingParts[0], settingParts[1]); ++ } ++ } ++ ++ if(settingsMap.size() > 0) ++ { ++ object = object.applySettings(settingsMap); ++ } ++ } ++ ++ return object; ++ } ++} +diff --git a/common/src/com/khorn/terraincontrol/customobjects/ObjectBuffer.java b/common/src/com/khorn/terraincontrol/customobjects/ObjectBuffer.java +deleted file mode 100644 +index fb4562ae5..000000000 +--- a/common/src/com/khorn/terraincontrol/customobjects/ObjectBuffer.java ++++ /dev/null +@@ -1,6 +0,0 @@ +-package com.khorn.terraincontrol.customobjects; - -- try { -- Statement statement = connection.createStatement(); -- final ResultSet set = statement.executeQuery(""SELECT id FROM "" + prefix + ""sessions""); +-public class ObjectBuffer +-{ - -- while (set.next()) { -- count++; -- } +-} +diff --git a/common/src/com/khorn/terraincontrol/customobjects/ObjectCoordinatesStore.java b/common/src/com/khorn/terraincontrol/customobjects/ObjectCoordinatesStore.java +deleted file mode 100644 +index 1236b4b6d..000000000 +--- a/common/src/com/khorn/terraincontrol/customobjects/ObjectCoordinatesStore.java ++++ /dev/null +@@ -1,28 +0,0 @@ +-package com.khorn.terraincontrol.customobjects; +- +-import java.io.File; +-import java.util.Hashtable; - -- statement.close(); +-public class ObjectCoordinatesStore implements Runnable +-{ - -- } catch (final Exception e) { -- printException(e); -- } +- public Hashtable Coordinates; +- +- public ObjectCoordinatesStore() +- { +- this.Coordinates = new Hashtable(); - -- return count; - } - -- /** -- * Unregister a pending chest unlock -- * -- * @param player the player to unregister -- */ -- public void unregisterAction(String action, String player) { -- try { -- PreparedStatement statement = prepare(""DELETE FROM "" + prefix + ""actions WHERE action = ? AND player = ?""); -- statement.setString(1, action); -- statement.setString(2, player); +- public void ReadStore(File data) +- { - -- statement.executeUpdate(); +- } - +- // Save thread. +- @Override +- public void run() +- { - -- } catch (final Exception e) { -- printException(e); -- } - } +-} +diff --git a/common/src/com/khorn/terraincontrol/customobjects/ObjectsStore.java b/common/src/com/khorn/terraincontrol/customobjects/ObjectsStore.java +deleted file mode 100644 +index b54a7a491..000000000 +--- a/common/src/com/khorn/terraincontrol/customobjects/ObjectsStore.java ++++ /dev/null +@@ -1,139 +0,0 @@ +-package com.khorn.terraincontrol.customobjects; - -- /** -- * Unregister all of the actions for a player -- * -- * @param player the player to unregister -- */ -- public void unregisterAllActions(String player) { -- try { -- PreparedStatement statement = prepare(""DELETE FROM "" + prefix + ""actions WHERE player = ?""); -- statement.setString(1, player); - -- statement.executeUpdate(); +-import java.io.File; +-import java.util.ArrayList; - +-public class ObjectsStore +-{ - -- } catch (final Exception e) { -- printException(e); -- } -- } +- /* +- Load: +- 1)Load here all objects. +- 2)Start save coordinates thread. +- 2)Pre compile objects (make arrays for different angle) ?? +- 3)Compile custom objects array for each biome. Based on world Bo2 list + Biome Bo2 list + Biome CustomTree list +- a) store in one array in biome +- b) store in different arrays in biome ??? +- 4) Load ObjectCoordinates from file and add that instance to save thread. - -- /** -- * Remove all the pending chest requests -- */ -- public void unregisterAllChests() { -- try { -- Statement statement = connection.createStatement(); -- statement.executeUpdate(""DELETE FROM "" + prefix + ""locks""); +- New load +- 1) Load all objects from world directory +- 2) Search and load objects from plugin directory - -- statement.close(); - -- } catch (final Exception e) { -- printException(e); -- } -- } - -- /** -- * Unregister all of the modes FROM "" + prefix + ""a player -- * -- * @param player the player to unregister all modes from -- */ -- public void unregisterAllModes(String player) { -- try { -- PreparedStatement statement = prepare(""DELETE FROM "" + prefix + ""modes WHERE player = ?""); -- statement.setString(1, player); +- Spawn: +- 1)CustomObject resource, Tree resource, sapling, command +- 2)Select random object if needed. +- 3)Check for biome and select CustomBiome array if needed. +- 4)Check for spawn conditions. +- 5)Check for collision +- a) Check for block collisions +- b) If out of loaded chunks and object.dig == false - drop. +- c) If out of loaded chunks and object.branch && !object.digBranch == true - drop +- d) ?? +- 6)Set blocks +- a) If out of loaded chunks - get ObjectBuffer from CoordinatesStore and save to it. +- b) If found branch start point - select random branch from group and call 5 for it. +- +- +- Calculate branch size for in chunk check?? +- Call branch in this chunk or in next ?? +- - -- statement.executeUpdate(); - - -- } catch (final Exception e) { -- printException(e); -- } -- } - -- /** -- * Unregister a mode FROM "" + prefix + ""a player -- * -- * @param player the player to register the mode to -- * @param mode the mode to unregister - */ -- public void unregisterMode(String player, String mode) { -- try { -- PreparedStatement statement = prepare(""DELETE FROM "" + prefix + ""modes WHERE player = ? AND mode = ?""); -- statement.setString(1, player); -- statement.setString(2, mode); - -- statement.executeUpdate(); - +- public static File GlobalDirectory; +- +- public static void ReadObjects(File pluginPath) +- { +- GlobalDirectory = new File(pluginPath, BODefaultValues.BO_GlobalDirectoryName.stringValue()); - -- } catch (final Exception e) { -- printException(e); +- if (!GlobalDirectory.exists()) +- { +- if (!GlobalDirectory.mkdirs()) +- { +- System.out.println(""TerrainControl: can`t create GlobalObjects directory""); +- } - } +- +- +- //objectsList = LoadObjectsFromDirectory(directory); +- +- //System.out.println(""TerrainControl: "" + objectsList.size() + "" custom objects loaded""); +- +- - } - -- /** -- * Remove a pending lock request FROM "" + prefix + ""a player -- * -- * @param player the player to remove -- */ -- public void unregisterPendingLock(String player) { -- try { -- PreparedStatement statement = prepare(""DELETE FROM "" + prefix + ""locks WHERE player = ?""); -- statement.setString(1, player); - -- statement.executeUpdate(); +- public static ArrayList LoadObjectsFromDirectory(File path) +- { +- ArrayList outputList = new ArrayList(); - +- File[] files = path.listFiles(); +- if (files == null) +- return outputList; - -- } catch (final Exception e) { -- printException(e); +- for (File customObjectFile : files) +- { +- if (customObjectFile.isFile()) +- { +- CustomObject object = new CustomObject(customObjectFile); +- if (object.IsValid) +- outputList.add(object); +- } - } -- } +- return outputList; - -- /** -- * Remove a player FROM "" + prefix + ""any sessions -- * -- * @param player the player to remove -- */ -- public void unregisterPlayer(String player) { -- try { -- PreparedStatement statement = prepare(""DELETE FROM "" + prefix + ""sessions WHERE player = ?""); -- statement.setString(1, player); - -- statement.executeUpdate(); +- } +- +- public static String[] ParseString(String key) +- { +- String[] output = new String[]{key, """"}; - -- } catch (final Exception e) { -- printException(e); +- int start = key.indexOf(""(""); +- int end = key.lastIndexOf("")""); +- if (start != -1 && end != -1) +- { +- output[0] = key.substring(0, start); +- output[1] = key.substring(start + 1, end); - } +- return output; - } - -- /** -- * Unregister a pending chest unlock -- * -- * @param player the player to unregister -- */ -- public void unregisterUnlock(String player) { -- unregisterAction(""unlock"", player); +- public static CustomObject GetObjectFromDirectory(String name, File directory) +- { +- File customObjectFile = new File(directory, name+"".""+BODefaultValues.BO_Extension.stringValue()); +- +- if (!customObjectFile.isFile()) +- { +- return null; +- } +- +- CustomObject object = new CustomObject(customObjectFile); +- +- if (!object.IsValid) +- { +- return null; +- } +- +- return object; - } - --}" -c9d4924dcf129512dadd22dcd6fe0046cbcded43,drools,BZ-1039639 - GRE doesn't recognize MVEL inline- lists when opening rule--,c,https://github.com/kiegroup/drools,"diff --git a/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceUnmarshallingTest.java b/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceUnmarshallingTest.java -index 05b7b836bf1..9da1dd93e78 100644 ---- a/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceUnmarshallingTest.java -+++ b/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceUnmarshallingTest.java -@@ -26,7 +26,9 @@ - import org.drools.workbench.models.datamodel.oracle.ModelField; - import org.drools.workbench.models.datamodel.oracle.PackageDataModelOracle; - import org.drools.workbench.models.datamodel.rule.ActionCallMethod; -+import org.drools.workbench.models.datamodel.rule.ActionFieldValue; - import org.drools.workbench.models.datamodel.rule.ActionGlobalCollectionAdd; -+import org.drools.workbench.models.datamodel.rule.ActionSetField; - import org.drools.workbench.models.datamodel.rule.BaseSingleFieldConstraint; - import org.drools.workbench.models.datamodel.rule.CEPWindow; - import org.drools.workbench.models.datamodel.rule.CompositeFactPattern; -@@ -36,6 +38,8 @@ - import org.drools.workbench.models.datamodel.rule.ExpressionVariable; - import org.drools.workbench.models.datamodel.rule.FactPattern; - import org.drools.workbench.models.datamodel.rule.FieldConstraint; -+import org.drools.workbench.models.datamodel.rule.FieldNature; -+import org.drools.workbench.models.datamodel.rule.FieldNatureType; - import org.drools.workbench.models.datamodel.rule.FreeFormLine; - import org.drools.workbench.models.datamodel.rule.IPattern; - import org.drools.workbench.models.datamodel.rule.RuleModel; -@@ -1982,6 +1986,43 @@ public void testExpressionWithListSize() throws Exception { - assertEquals(1,constraint.getConstraintValueType()); - } - -+ @Test -+ @Ignore(""https://bugzilla.redhat.com/show_bug.cgi?id=1039639 - GRE doesn't recognize MVEL inline lists when opening rule"") -+ public void testMVELInlineList() throws Exception { -+ String drl = """" + -+ ""rule \""Borked\""\n"" + -+ "" dialect \""mvel\""\n"" + -+ "" when\n"" + -+ "" c : Company( )\n"" + -+ "" then\n"" + -+ "" c.setEmps( [\""item1\"", \""item2\""] );\n"" + -+ ""end""; +- public static CustomObjectCompiled CompileString(String key, File directory) +- { +- String[] values = ParseString(key); +- CustomObject object = GetObjectFromDirectory(values[0], directory); +- +- if (object == null) +- return null; +- +- return object.Compile(values[1]); +- } +-} +diff --git a/common/src/com/khorn/terraincontrol/customobjects/TreeObject.java b/common/src/com/khorn/terraincontrol/customobjects/TreeObject.java +new file mode 100644 +index 000000000..aa19e3a59 +--- /dev/null ++++ b/common/src/com/khorn/terraincontrol/customobjects/TreeObject.java +@@ -0,0 +1,90 @@ ++package com.khorn.terraincontrol.customobjects; + -+ addModelField(""Company"", -+ ""emps"", -+ ""java.util.List"", -+ ""List""); ++import java.util.Map; ++import java.util.Random; + -+ RuleModel m = RuleModelDRLPersistenceImpl.getInstance().unmarshal( drl, -+ dmo ); -+ assertEquals( 1, -+ m.rhs.length ); -+ assertTrue( m.rhs[0] instanceof ActionSetField); -+ ActionSetField actionSetField = (ActionSetField) m.rhs[0]; ++import com.khorn.terraincontrol.LocalBiome; ++import com.khorn.terraincontrol.LocalWorld; ++import com.khorn.terraincontrol.generator.resourcegens.TreeType; + -+ assertEquals(""c"", actionSetField.getVariable()); ++public class TreeObject implements CustomObject ++{ ++ TreeType type; ++ ++ public TreeObject(TreeType type) ++ { ++ this.type = type; ++ } ++ ++ @Override ++ public String getName() ++ { ++ return type.name(); ++ } + -+ assertEquals(1, actionSetField.getFieldValues().length); ++ @Override ++ public boolean canSpawnAsTree() ++ { ++ return true; ++ } ++ ++ @Override ++ public boolean canSpawnAsObject() ++ { ++ return false; ++ } + -+ ActionFieldValue actionFieldValue = actionSetField.getFieldValues()[0]; ++ @Override ++ public boolean spawn(LocalWorld world, Random random, int x, int y, int z) ++ { ++ return world.PlaceTree(type, random, x, y, z); ++ } + -+ assertEquals(""[\""item1\"", \""item2\""]"",actionFieldValue.getValue()); -+ assertEquals(""emps"",actionFieldValue.getField()); -+ assertEquals(FieldNatureType.TYPE_FORMULA, actionFieldValue.getNature()); -+ assertEquals(""Collection"",actionFieldValue.getType()); ++ @Override ++ public boolean spawnAsTree(LocalWorld world, Random random, int x, int y, int z) ++ { ++ return world.PlaceTree(type, random, x, y, z); ++ } ++ ++ @Override ++ public boolean spawn(LocalWorld world, Random random, int x, int z) ++ { ++ return world.PlaceTree(type, random, x, world.getHighestBlockYAt(x, z), z); ++ } + ++ @Override ++ public boolean spawnAsTree(LocalWorld world, Random random, int x, int z) ++ { ++ return world.PlaceTree(type, random, x, world.getHighestBlockYAt(x, z), z); + } + - private void assertEqualsIgnoreWhitespace( final String expected, - final String actual ) { - final String cleanExpected = expected.replaceAll( ""\\s+""," -83d5b1e6a0280cc78625bacc2d3f7d1676c7385e,kotlin,Supported propagation for subclass of- j.u.Collection and similar classes.--,a,https://github.com/JetBrains/intellij-community,⚠️ HTTP 404: Not Found -7db30f8428ef341cc39b2758d3bd6dcccc25b080,hadoop,MAPREDUCE-3345. Fixed a race condition in- ResourceManager that was causing TestContainerManagerSecurity to fail- sometimes. Contributed by Hitesh Shah. svn merge -c r1199144- --ignore-ancestry ../../trunk/--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1199145 13f79535-47bb-0310-9956-ffa450edef68-,c,https://github.com/apache/hadoop,"diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt -index 43cb6b7dd248c..e1034c880ef65 100644 ---- a/hadoop-mapreduce-project/CHANGES.txt -+++ b/hadoop-mapreduce-project/CHANGES.txt -@@ -39,6 +39,9 @@ Release 0.23.1 - Unreleased - MAPREDUCE-3342. Fixed JobHistoryServer to also show the job's queue - name. (Jonathan Eagles via vinodkv) - -+ MAPREDUCE-3345. Fixed a race condition in ResourceManager that was causing -+ TestContainerManagerSecurity to fail sometimes. (Hitesh Shah via vinodkv) ++ @Override ++ public void process(LocalWorld world, Random random, int chunkX, int chunkZ) ++ { ++ // A tree has no rarity, so spawn it once in the chunk ++ int x = chunkX * 16 + random.nextInt(16); ++ int z = chunkZ * 16 + random.nextInt(16); ++ int y = world.getHighestBlockYAt(x, z); ++ world.PlaceTree(type, random, x, y, z); ++ } + - Release 0.23.0 - 2011-11-01 - - INCOMPATIBLE CHANGES -diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java -index 0d81f80121213..71dd982b607af 100644 ---- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java -+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java -@@ -595,8 +595,13 @@ public void transition(RMAppAttemptImpl appAttempt, - AM_CONTAINER_PRIORITY, ""*"", appAttempt.submissionContext - .getAMContainerSpec().getResource(), 1); - -- appAttempt.scheduler.allocate(appAttempt.applicationAttemptId, -- Collections.singletonList(request), EMPTY_CONTAINER_RELEASE_LIST); -+ Allocation amContainerAllocation = -+ appAttempt.scheduler.allocate(appAttempt.applicationAttemptId, -+ Collections.singletonList(request), EMPTY_CONTAINER_RELEASE_LIST); -+ if (amContainerAllocation != null -+ && amContainerAllocation.getContainers() != null) { -+ assert(amContainerAllocation.getContainers().size() == 0); -+ } - } - } - -diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/FifoScheduler.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/FifoScheduler.java -index c61c7ab89f0ec..977150520a195 100644 ---- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/FifoScheduler.java -+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/FifoScheduler.java -@@ -236,28 +236,30 @@ public Allocation allocate( - RMContainerEventType.RELEASED); - } - -- if (!ask.isEmpty()) { -- LOG.debug(""allocate: pre-update"" + -- "" applicationId="" + applicationAttemptId + -- "" application="" + application); -- application.showRequests(); -- -- // Update application requests -- application.updateResourceRequests(ask); -- -- LOG.debug(""allocate: post-update"" + -- "" applicationId="" + applicationAttemptId + -- "" application="" + application); -- application.showRequests(); -+ synchronized (application) { -+ if (!ask.isEmpty()) { -+ LOG.debug(""allocate: pre-update"" + -+ "" applicationId="" + applicationAttemptId + -+ "" application="" + application); -+ application.showRequests(); ++ @Override ++ public void processAsTree(LocalWorld world, Random random, int chunkX, int chunkZ) ++ { ++ process(world, random, chunkX, chunkZ); ++ } + -+ // Update application requests -+ application.updateResourceRequests(ask); ++ @Override ++ public CustomObject applySettings(Map settings) ++ { ++ // Trees don't support this ++ return this; ++ } + -+ LOG.debug(""allocate: post-update"" + -+ "" applicationId="" + applicationAttemptId + -+ "" application="" + application); -+ application.showRequests(); ++ @Override ++ public boolean hasPreferenceToSpawnIn(LocalBiome biome) ++ { ++ return true; ++ } + -+ LOG.debug(""allocate:"" + -+ "" applicationId="" + applicationAttemptId + -+ "" #ask="" + ask.size()); -+ } - -- LOG.debug(""allocate:"" + -- "" applicationId="" + applicationAttemptId + -- "" #ask="" + ask.size()); -+ return new Allocation( -+ application.pullNewlyAllocatedContainers(), -+ application.getHeadroom()); - } -- -- return new Allocation( -- application.pullNewlyAllocatedContainers(), -- application.getHeadroom()); - } - - private SchedulerApp getApplication(" -7631ac6f526edf472d1383f7b82171c7ac29f0fb,Delta Spike,"DELTASPIKE-378 add getPropertyAwarePropertyValue - -feel free to propose a better name if you find one :) -",a,https://github.com/apache/deltaspike,"diff --git a/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/config/ConfigResolver.java b/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/config/ConfigResolver.java -index 05cbc59df..6b3ada263 100644 ---- a/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/config/ConfigResolver.java -+++ b/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/config/ConfigResolver.java -@@ -162,18 +162,110 @@ public static String getPropertyValue(String key) - *

Attention This method must only be used after all ConfigSources - * got registered and it also must not be used to determine the ProjectStage itself.

- * @param key -+ * @return the configured value or if non found the defaultValue -+ * -+ */ -+ public static String getProjectStageAwarePropertyValue(String key) ++} +diff --git a/common/src/com/khorn/terraincontrol/customobjects/UseBiome.java b/common/src/com/khorn/terraincontrol/customobjects/UseBiome.java +new file mode 100644 +index 000000000..ca6864099 +--- /dev/null ++++ b/common/src/com/khorn/terraincontrol/customobjects/UseBiome.java +@@ -0,0 +1,124 @@ ++package com.khorn.terraincontrol.customobjects; ++ ++import java.util.ArrayList; ++import java.util.Map; ++import java.util.Random; ++ ++import com.khorn.terraincontrol.LocalBiome; ++import com.khorn.terraincontrol.LocalWorld; ++ ++/** ++ * UseBiome is a keyword that spawns the objects in the BiomeConfig/BiomeObjects ++ * setting. ++ * ++ */ ++public class UseBiome implements CustomObject ++{ ++ public ArrayList getPossibleObjectsAt(LocalWorld world, int x, int z) ++ { ++ return world.getSettings().biomeConfigs[world.getBiome(x, z).getId()].biomeObjects; ++ } ++ ++ @Override ++ public String getName() ++ { ++ return ""UseBiome""; ++ } ++ ++ @Override ++ public boolean canSpawnAsTree() ++ { ++ return true; ++ } ++ ++ @Override ++ public boolean canSpawnAsObject() + { -+ ProjectStage ps = getProjectStage(); ++ return true; ++ } + -+ String value = getPropertyValue(key + '.' + ps); -+ if (value == null) ++ @Override ++ public boolean spawn(LocalWorld world, Random random, int x, int y, int z) ++ { ++ for (CustomObject object : getPossibleObjectsAt(world, x, z)) + { -+ value = getPropertyValue(key); ++ if (object.spawn(world, random, x, y, z)) ++ { ++ return true; ++ } + } ++ return false; ++ } + -+ return value; ++ @Override ++ public boolean spawnAsTree(LocalWorld world, Random random, int x, int y, int z) ++ { ++ for (CustomObject object : getPossibleObjectsAt(world, x, z)) ++ { ++ if (object.spawnAsTree(world, random, x, y, z)) ++ { ++ return true; ++ } ++ } ++ return false; + } -+ /** -+ * {@link #getProjectStageAwarePropertyValue(String)} which returns the defaultValue -+ * if the property is null or empty. -+ * @param key - * @param defaultValue - * @return the configured value or if non found the defaultValue - * - */ - public static String getProjectStageAwarePropertyValue(String key, String defaultValue) - { -- ProjectStage ps = getProjectStage(); -+ String value = getProjectStageAwarePropertyValue(key); + -+ if (value == null || value.length() == 0) ++ @Override ++ public boolean spawn(LocalWorld world, Random random, int x, int z) ++ { ++ for (CustomObject object : getPossibleObjectsAt(world, x, z)) + { -+ value = defaultValue; ++ if (object.spawn(world, random, x, z)) ++ { ++ return true; ++ } + } ++ return false; ++ } + -+ return value; ++ @Override ++ public boolean spawnAsTree(LocalWorld world, Random random, int x, int z) ++ { ++ for (CustomObject object : getPossibleObjectsAt(world, x, z)) ++ { ++ if (object.spawnAsTree(world, random, x, z)) ++ { ++ return true; ++ } ++ } ++ return false; + } + -+ /** -+ *

Search for the configured value in all {@link ConfigSource}s and take the -+ * current {@link org.apache.deltaspike.core.api.projectstage.ProjectStage} -+ * and the value configured for the given property into account.

-+ * -+ *

The first step is to resolve the value of the given property. This will -+ * take the current ProjectStage into account. E.g. given the property is 'dbvendor' -+ * and the ProjectStage is 'UnitTest', the first lookup is -+ *

  • 'dbvendor.UnitTest'
. -+ * If this value is not found then we will do a 2nd lookup for -+ *
  • 'dbvendor'

-+ * -+ *

If a value was found for the given property (e.g. dbvendor = 'mysql' -+ * then we will use this value to lookup in the following order until we -+ * found a non-null value. If there was no value found for the property -+ * we will only do the key+ProjectStage and key lookup. -+ * In the following sample 'dataSource' is used as key parameter: -+ * -+ *

    -+ *
  • 'datasource.mysql.UnitTest'
  • -+ *
  • 'datasource.mysql'
  • -+ *
  • 'datasource.UnitTest'
  • -+ *
  • 'datasource'
  • -+ *
-+ *

-+ * -+ * -+ *

Attention This method must only be used after all ConfigSources -+ * got registered and it also must not be used to determine the ProjectStage itself.

-+ * @param key -+ * @param property the property to look up first -+ * @return the configured value or if non found the defaultValue -+ * -+ */ -+ public static String getPropertyAwarePropertyValue(String key, String property) ++ @Override ++ public void process(LocalWorld world, Random random, int chunkX, int chunkZ) ++ { ++ for (CustomObject object : getPossibleObjectsAt(world, chunkX * 16 + 8, chunkZ * 16 + 8)) ++ { ++ object.process(world, random, chunkX, chunkZ); ++ } ++ } ++ ++ @Override ++ public void processAsTree(LocalWorld world, Random random, int chunkX, int chunkZ) ++ { ++ for (CustomObject object : getPossibleObjectsAt(world, chunkX * 16 + 8, chunkZ * 16 + 8)) ++ { ++ object.processAsTree(world, random, chunkX, chunkZ); ++ } ++ } ++ ++ @Override ++ public CustomObject applySettings(Map settings) ++ { ++ // Not supported ++ return this; ++ } ++ ++ @Override ++ public boolean hasPreferenceToSpawnIn(LocalBiome biome) ++ { ++ // Never, ever spawn this with UseWorld. ++ return false; ++ } ++ ++} +diff --git a/common/src/com/khorn/terraincontrol/customobjects/UseWorld.java b/common/src/com/khorn/terraincontrol/customobjects/UseWorld.java +new file mode 100644 +index 000000000..cfd50f842 +--- /dev/null ++++ b/common/src/com/khorn/terraincontrol/customobjects/UseWorld.java +@@ -0,0 +1,118 @@ ++package com.khorn.terraincontrol.customobjects; ++ ++import java.util.Map; ++import java.util.Random; ++ ++import com.khorn.terraincontrol.LocalBiome; ++import com.khorn.terraincontrol.LocalWorld; ++ ++/** ++ * UseWorld is a keyword that spawns the objects in the WorldObjects folder. ++ * ++ */ ++public class UseWorld implements CustomObject ++{ ++ ++ @Override ++ public String getName() ++ { ++ return ""UseWorld""; ++ } ++ ++ @Override ++ public boolean canSpawnAsTree() + { -+ String propertyValue = getProjectStageAwarePropertyValue(property); ++ return true; ++ } + -+ String value = null; ++ @Override ++ public boolean canSpawnAsObject() ++ { ++ return true; ++ } + -+ if (propertyValue != null && propertyValue.length() > 0) ++ @Override ++ public boolean spawn(LocalWorld world, Random random, int x, int y, int z) ++ { ++ for(CustomObject object: world.getSettings().customObjects.values()) + { -+ value = getProjectStageAwarePropertyValue(key + '.' + propertyValue); ++ if(object.hasPreferenceToSpawnIn(world.getBiome(x, z)) && object.spawn(world, random, x, y, z)) ++ { ++ return true; ++ } + } - -- String value = getPropertyValue(key + '.' + ps, defaultValue); - if (value == null) - { -- value = getPropertyValue(key, defaultValue); -+ value = getProjectStageAwarePropertyValue(key); ++ return false; ++ } ++ ++ @Override ++ public boolean spawnAsTree(LocalWorld world, Random random, int x, int y, int z) ++ { ++ for(CustomObject object: world.getSettings().customObjects.values()) ++ { ++ if(object.spawnAsTree(world, random, x, y, z)) ++ { ++ return true; ++ } + } ++ return false; ++ } + -+ return value; ++ @Override ++ public boolean spawn(LocalWorld world, Random random, int x, int z) ++ { ++ for(CustomObject object: world.getSettings().customObjects.values()) ++ { ++ if(object.spawn(world, random, x, z)) ++ { ++ return true; ++ } ++ } ++ return false; + } + -+ /* -+ *

Attention This method must only be used after all ConfigSources -+ * got registered and it also must not be used to determine the ProjectStage itself.

-+ * @param key -+ * @param property the property to look up first -+ * @param defaultValue -+ * @return the configured value or if non found the defaultValue -+ * -+ */ -+ public static String getPropertyAwarePropertyValue(String key, String property, String defaultValue) ++ @Override ++ public boolean spawnAsTree(LocalWorld world, Random random, int x, int z) + { -+ String value = getPropertyAwarePropertyValue(key, property); ++ for(CustomObject object: world.getSettings().customObjects.values()) ++ { ++ if(object.spawnAsTree(world, random, x, z)) ++ { ++ return true; ++ } ++ } ++ return false; ++ } + -+ if (value == null || value.length() == 0) ++ @Override ++ public void process(LocalWorld world, Random random, int chunkX, int chunkZ) ++ { ++ for(CustomObject object: world.getSettings().customObjects.values()) + { -+ value = defaultValue; - } - - return value; -diff --git a/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/ConfigResolverTest.java b/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/ConfigResolverTest.java -index 91ebb22dc..d30e7de52 100644 ---- a/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/ConfigResolverTest.java -+++ b/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/ConfigResolverTest.java -@@ -28,6 +28,7 @@ - - public class ConfigResolverTest - { -+ private static final String DEFAULT_VALUE = ""defaultValue""; - @Test - public void testOverruledValue() - { -@@ -60,12 +61,43 @@ public void testGetProjectStageAwarePropertyValue() - Assert.assertNull(ConfigResolver.getProjectStageAwarePropertyValue(""notexisting"", null)); - - Assert.assertEquals(""testvalue"", ConfigResolver.getPropertyValue(""testkey"", null)); -+ Assert.assertEquals(""unittestvalue"", ConfigResolver.getProjectStageAwarePropertyValue(""testkey"")); - Assert.assertEquals(""unittestvalue"", ConfigResolver.getProjectStageAwarePropertyValue(""testkey"", null)); - - Assert.assertEquals(""testvalue"", ConfigResolver.getPropertyValue(""testkey2"", null)); -+ Assert.assertEquals(""testvalue"", ConfigResolver.getProjectStageAwarePropertyValue(""testkey2"")); - Assert.assertEquals(""testvalue"", ConfigResolver.getProjectStageAwarePropertyValue(""testkey2"", null)); - - Assert.assertEquals(""testvalue"", ConfigResolver.getPropertyValue(""testkey3"", null)); -- Assert.assertEquals("""", ConfigResolver.getProjectStageAwarePropertyValue(""testkey3"", null)); -+ Assert.assertEquals("""", ConfigResolver.getProjectStageAwarePropertyValue(""testkey3"")); -+ Assert.assertEquals(DEFAULT_VALUE, ConfigResolver.getProjectStageAwarePropertyValue(""testkey3"", DEFAULT_VALUE)); ++ object.process(world, random, chunkX, chunkZ); ++ } + } + -+ @Test -+ public void testGetPropertyAwarePropertyValue() { -+ ProjectStageProducer.setProjectStage(ProjectStage.UnitTest); ++ @Override ++ public void processAsTree(LocalWorld world, Random random, int chunkX, int chunkZ) ++ { ++ for(CustomObject object: world.getSettings().customObjects.values()) ++ { ++ object.processAsTree(world, random, chunkX, chunkZ); ++ } ++ } + -+ Assert.assertNull(ConfigResolver.getPropertyAwarePropertyValue(""notexisting"", null)); ++ @Override ++ public CustomObject applySettings(Map settings) ++ { ++ // Not supported ++ return this; ++ } + -+ Assert.assertEquals(""testvalue"", ConfigResolver.getPropertyValue(""testkey"", null)); -+ Assert.assertEquals(""unittestvalue"", ConfigResolver.getPropertyAwarePropertyValue(""testkey"", ""dbvendor"")); -+ Assert.assertEquals(""unittestvalue"", ConfigResolver.getPropertyAwarePropertyValue(""testkey"", ""dbvendor"", null)); ++ @Override ++ public boolean hasPreferenceToSpawnIn(LocalBiome biome) ++ { ++ // Never, ever spawn this in UseWorld. It would cause an infinite loop. ++ return false; ++ } + -+ Assert.assertEquals(""testvalue"", ConfigResolver.getPropertyValue(""testkey2"", null)); -+ Assert.assertEquals(""testvalue"", ConfigResolver.getPropertyAwarePropertyValue(""testkey2"", ""dbvendor"")); -+ Assert.assertEquals(""testvalue"", ConfigResolver.getPropertyAwarePropertyValue(""testkey2"", ""dbvendor"", null)); ++} +diff --git a/common/src/com/khorn/terraincontrol/customobjects/bo2/BO2.java b/common/src/com/khorn/terraincontrol/customobjects/bo2/BO2.java +new file mode 100644 +index 000000000..638fe02bf +--- /dev/null ++++ b/common/src/com/khorn/terraincontrol/customobjects/bo2/BO2.java +@@ -0,0 +1,454 @@ ++package com.khorn.terraincontrol.customobjects.bo2; + -+ Assert.assertEquals(""testvalue"", ConfigResolver.getPropertyValue(""testkey3"", null)); -+ Assert.assertEquals("""", ConfigResolver.getPropertyAwarePropertyValue(""testkey3"", ""dbvendor"")); -+ Assert.assertEquals(DEFAULT_VALUE, ConfigResolver.getPropertyAwarePropertyValue(""testkey3"", ""dbvendor"", DEFAULT_VALUE)); ++import java.io.File; ++import java.io.IOException; ++import java.util.ArrayList; ++import java.util.HashMap; ++import java.util.HashSet; ++import java.util.Map; ++import java.util.Random; + -+ Assert.assertEquals(""TestDataSource"", ConfigResolver.getPropertyAwarePropertyValue(""dataSource"", ""dbvendor"")); -+ Assert.assertEquals(""PostgreDataSource"", ConfigResolver.getPropertyAwarePropertyValue(""dataSource"", ""dbvendor2"")); -+ Assert.assertEquals(""DefaultDataSource"", ConfigResolver.getPropertyAwarePropertyValue(""dataSource"", ""dbvendorX"")); ++import com.khorn.terraincontrol.DefaultMaterial; ++import com.khorn.terraincontrol.LocalBiome; ++import com.khorn.terraincontrol.LocalWorld; ++import com.khorn.terraincontrol.configuration.ConfigFile; ++import com.khorn.terraincontrol.customobjects.BODefaultValues; ++import com.khorn.terraincontrol.customobjects.CustomObject; ++import com.khorn.terraincontrol.customobjects.ObjectCoordinate; + -+ Assert.assertEquals(""TestDataSource"", ConfigResolver.getPropertyAwarePropertyValue(""dataSource"", ""dbvendor"", null)); -+ Assert.assertEquals(""PostgreDataSource"", ConfigResolver.getPropertyAwarePropertyValue(""dataSource"", ""dbvendor2"", null)); -+ Assert.assertEquals(""DefaultDataSource"", ConfigResolver.getPropertyAwarePropertyValue(""dataSource"", ""dbvendorX"", null)); -+ Assert.assertEquals(DEFAULT_VALUE, ConfigResolver.getPropertyAwarePropertyValue(""dataSourceX"", ""dbvendorX"", DEFAULT_VALUE)); - } - } -diff --git a/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/TestConfigSource.java b/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/TestConfigSource.java -index e9e066fba..d0f2c938b 100644 ---- a/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/TestConfigSource.java -+++ b/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/TestConfigSource.java -@@ -48,6 +48,20 @@ public TestConfigSource() - // a value which got ProjectStage overloaded to an empty value - props.put(""testkey3"", ""testvalue""); - props.put(""testkey3.UnitTest"", """"); ++/** ++ * The good old BO2. ++ * ++ */ ++public class BO2 extends ConfigFile implements CustomObject ++{ ++ public ObjectCoordinate[][] Data = new ObjectCoordinate[4][]; + -+ // now for the PropertyAware tests -+ props.put(""dbvendor.UnitTest"", ""mysql""); -+ props.put(""dbvendor"", ""postgresql""); ++ public BO2[] GroupObjects = null; + -+ props.put(""dataSource.mysql.Production"", ""java:/comp/env/MyDs""); -+ props.put(""dataSource.mysql.UnitTest"", ""TestDataSource""); -+ props.put(""dataSource.postgresql"", ""PostgreDataSource""); -+ props.put(""dataSource"", ""DefaultDataSource""); ++ public String Name; + -+ // another one -+ props.put(""dbvendor2.Production"", ""mysql""); -+ props.put(""dbvendor2"", ""postgresql""); ++ public HashSet SpawnInBiome; + - } - - @Override" -3cf09ec9a51263838270cb5f62d5b34cb58f26bb,Vala,"clutter-1.0: add missing type_arguments for several new methods - -Partially fixes bug 609875. -",c,https://github.com/GNOME/vala/,⚠️ Could not parse repo info -1cda4eacbf8e44d03ecdfb6f16ab523306677cbc,restlet-framework-java, - The Request-isConfidential() method has- been refactored to be supported by Message and Response as well.- The method Request-setConfidential() has been removed (back to- Restlet 1.0 state). Added Protocol-isConfidential() method to- support the new implementation which rely on Request-getProtocol().- Reported by Kevin Conaway.--,p,https://github.com/restlet/restlet-framework-java,"diff --git a/build/tmpl/text/changes.txt b/build/tmpl/text/changes.txt -index 3bb3740fe2..51208741e0 100644 ---- a/build/tmpl/text/changes.txt -+++ b/build/tmpl/text/changes.txt -@@ -44,6 +44,12 @@ Changes log - ""style"" attributes are required. - - Added the ability the give a title to WadlApplication or - WadlResource instances. Suggested by Jérôme Bernard. -+ - The Request#isConfidential() method has been refactored to -+ be supported by Message and Response as well. The method -+ Request#setConfidential() has been removed (back to Restlet -+ 1.0 state). Added Protocol#isConfidential() method to support -+ the new implementation which rely on Request#getProtocol(). -+ Reported by Kevin Conaway. - - Misc - - Updated JAX-RS API to version 1.0. The implementation of the - runtime environment is not fully finished yet. We are -diff --git a/modules/com.noelios.restlet/src/com/noelios/restlet/component/ChildClientDispatcher.java b/modules/com.noelios.restlet/src/com/noelios/restlet/component/ChildClientDispatcher.java -index 4239efb2cf..6d9f19d948 100644 ---- a/modules/com.noelios.restlet/src/com/noelios/restlet/component/ChildClientDispatcher.java -+++ b/modules/com.noelios.restlet/src/com/noelios/restlet/component/ChildClientDispatcher.java -@@ -72,9 +72,6 @@ public void doHandle(Request request, Response response) { - final Protocol protocol = request.getProtocol(); - - if (protocol.equals(Protocol.RIAP)) { -- // Consider that the request is confidential -- request.setConfidential(true); -- - // Let's dispatch it - final LocalReference cr = new LocalReference(request - .getResourceRef()); -diff --git a/modules/com.noelios.restlet/src/com/noelios/restlet/component/ComponentClientDispatcher.java b/modules/com.noelios.restlet/src/com/noelios/restlet/component/ComponentClientDispatcher.java -index fc66f6e836..d81f983b14 100644 ---- a/modules/com.noelios.restlet/src/com/noelios/restlet/component/ComponentClientDispatcher.java -+++ b/modules/com.noelios.restlet/src/com/noelios/restlet/component/ComponentClientDispatcher.java -@@ -64,9 +64,6 @@ protected void doHandle(Request request, Response response) { - final Protocol protocol = request.getProtocol(); - - if (protocol.equals(Protocol.RIAP)) { -- // Consider that the request is confidential -- request.setConfidential(true); -- - // Let's dispatch it - final LocalReference cr = new LocalReference(request - .getResourceRef()); -diff --git a/modules/com.noelios.restlet/src/com/noelios/restlet/http/HttpRequest.java b/modules/com.noelios.restlet/src/com/noelios/restlet/http/HttpRequest.java -index ab6f7fa405..120613d7f6 100644 ---- a/modules/com.noelios.restlet/src/com/noelios/restlet/http/HttpRequest.java -+++ b/modules/com.noelios.restlet/src/com/noelios/restlet/http/HttpRequest.java -@@ -103,14 +103,6 @@ public HttpRequest(Context context, HttpServerCall httpCall) { - // Set the properties - setMethod(Method.valueOf(httpCall.getMethod())); - -- if (getHttpCall().isConfidential()) { -- setConfidential(true); -- } else { -- // We don't want to autocreate the security data just for this -- // information, because that will by the default value of this -- // property if read by someone. -- } -- - // Set the host reference - final StringBuilder sb = new StringBuilder(); - sb.append(httpCall.getProtocol().getSchemeName()).append(""://""); -diff --git a/modules/org.restlet/src/org/restlet/data/Message.java b/modules/org.restlet/src/org/restlet/data/Message.java -index d5ba7847fc..72fdfcd15e 100644 ---- a/modules/org.restlet/src/org/restlet/data/Message.java -+++ b/modules/org.restlet/src/org/restlet/data/Message.java -@@ -78,49 +78,50 @@ public Message(Representation entity) { - this.saxRepresentation = null; - } - --/** -- * Returns the modifiable map of attributes that can be used by developers -- * to save information relative to the message. Creates a new instance if no -- * one has been set. This is an easier alternative to the creation of a -- * wrapper instance around the whole message.
-- *
-- * -- * In addition, this map is a shared space between the developer and the -- * connectors. In this case, it is used to exchange information that is not -- * uniform across all protocols and couldn't therefore be directly included -- * in the API. For this purpose, all attribute names starting with -- * ""org.restlet"" are reserved. Currently the following attributes are used: -- * -- * -- * -- * -- * -- * -- * -- * -- * -- * -- * -- * -- * -- * -- * -- * -- *
Attribute nameClass nameDescription
org.restlet.http.headersorg.restlet.data.FormServer HTTP connectors must provide all request headers and client -- * HTTP connectors must provide all response headers, exactly as they were -- * received. In addition, developers can also use this attribute to specify -- * non-standard headers that should be added to the request or to -- * the response.
org.restlet.https.clientCertificatesListFor requests received via a secure connector, indicates the ordered -- * list of client certificates, if they are available and accessible.

-- * Most of the standard HTTP headers are directly supported via the Restlet -- * API. Thus, adding such HTTP headers is forbidden because it could -- * conflict with the connector's internal behavior, limit portability or -- * prevent future optimizations. The other standard HTTP headers (that are -- * not supported) can be added as attributes via the -- * ""org.restlet.http.headers"" key.
-- * -- * @return The modifiable attributes map. -- */ -+ /** -+ * Returns the modifiable map of attributes that can be used by developers -+ * to save information relative to the message. Creates a new instance if no -+ * one has been set. This is an easier alternative to the creation of a -+ * wrapper instance around the whole message.
-+ *
-+ * -+ * In addition, this map is a shared space between the developer and the -+ * connectors. In this case, it is used to exchange information that is not -+ * uniform across all protocols and couldn't therefore be directly included -+ * in the API. For this purpose, all attribute names starting with -+ * ""org.restlet"" are reserved. Currently the following attributes are used: -+ * -+ * -+ * -+ * -+ * -+ * -+ * -+ * -+ * -+ * -+ * -+ * -+ * -+ * -+ * -+ * -+ *
Attribute nameClass nameDescription
org.restlet.http.headersorg.restlet.data.FormServer HTTP connectors must provide all request headers and client -+ * HTTP connectors must provide all response headers, exactly as they were -+ * received. In addition, developers can also use this attribute to specify -+ * non-standard headers that should be added to the request or to the -+ * response.
org.restlet.https.clientCertificatesListFor requests received via a secure connector, indicates the ordered -+ * list of client certificates, if they are available and accessible.
-+ *
-+ * Most of the standard HTTP headers are directly supported via the Restlet -+ * API. Thus, adding such HTTP headers is forbidden because it could -+ * conflict with the connector's internal behavior, limit portability or -+ * prevent future optimizations. The other standard HTTP headers (that are -+ * not supported) can be added as attributes via the -+ * ""org.restlet.http.headers"" key.
-+ * -+ * @return The modifiable attributes map. -+ */ - public Map getAttributes() { - if (this.attributes == null) { - this.attributes = new TreeMap(); -@@ -234,6 +235,14 @@ public SaxRepresentation getEntityAsSax() { - return this.saxRepresentation; - } - -+ /** -+ * Indicates if the message was or will be exchanged confidentially, for -+ * example via a SSL-secured connection. -+ * -+ * @return True if the message is confidential. -+ */ -+ public abstract boolean isConfidential(); ++ public String Version; ++ public HashSet SpawnOnBlockType; + - /** - * Indicates if a content is available and can be sent. Several conditions - * must be met: the content must exists and have some available data. -diff --git a/modules/org.restlet/src/org/restlet/data/Protocol.java b/modules/org.restlet/src/org/restlet/data/Protocol.java -index f21d806ff5..65669bbd3e 100644 ---- a/modules/org.restlet/src/org/restlet/data/Protocol.java -+++ b/modules/org.restlet/src/org/restlet/data/Protocol.java -@@ -59,7 +59,7 @@ public final class Protocol extends Metadata { - * @see org.restlet.data.LocalReference - */ - public static final Protocol CLAP = new Protocol(""clap"", ""CLAP"", -- ""Class Loader Access Protocol"", UNKNOWN_PORT); -+ ""Class Loader Access Protocol"", UNKNOWN_PORT, true); - - /** - * FILE is a standard scheme to access to representations stored in the file -@@ -72,7 +72,7 @@ public final class Protocol extends Metadata { - * @see org.restlet.data.LocalReference - */ - public static final Protocol FILE = new Protocol(""file"", ""FILE"", -- ""Local File System Protocol"", UNKNOWN_PORT); -+ ""Local File System Protocol"", UNKNOWN_PORT, true); - - /** FTP protocol. */ - public static final Protocol FTP = new Protocol(""ftp"", ""FTP"", -@@ -84,7 +84,7 @@ public final class Protocol extends Metadata { - - /** HTTPS protocol (via SSL socket). */ - public static final Protocol HTTPS = new Protocol(""https"", ""HTTPS"", -- ""HyperText Transport Protocol (Secure)"", 443); -+ ""HyperText Transport Protocol (Secure)"", 443, true); - - /** - * JAR (Java ARchive) is a common scheme to access to representations inside -@@ -94,7 +94,7 @@ public final class Protocol extends Metadata { - * @see org.restlet.data.LocalReference - */ - public static final Protocol JAR = new Protocol(""jar"", ""JAR"", -- ""Java ARchive"", UNKNOWN_PORT); -+ ""Java ARchive"", UNKNOWN_PORT, true); - - /** JDBC protocol. */ - public static final Protocol JDBC = new Protocol(""jdbc"", ""JDBC"", -@@ -106,7 +106,7 @@ public final class Protocol extends Metadata { - - /** POPS protocol (via SSL/TLS socket).. */ - public static final Protocol POPS = new Protocol(""pops"", ""POPS"", -- ""Post Office Protocol (Secure)"", 995); -+ ""Post Office Protocol (Secure)"", 995, true); - - /** - * RIAP (Restlet Internal Access Protocol) is a custom scheme to access -@@ -120,7 +120,7 @@ public final class Protocol extends Metadata { - * @see org.restlet.data.LocalReference - */ - public static final Protocol RIAP = new Protocol(""riap"", ""RIAP"", -- ""Restlet Internal Access Protocol"", UNKNOWN_PORT); -+ ""Restlet Internal Access Protocol"", UNKNOWN_PORT, true); - - /** SMTP protocol. */ - public static final Protocol SMTP = new Protocol(""smtp"", ""SMTP"", -@@ -135,15 +135,16 @@ public final class Protocol extends Metadata { - @Deprecated - public static final Protocol SMTP_STARTTLS = new Protocol(""smtp"", - ""SMTP_STARTTLS"", -- ""Simple Mail Transfer Protocol (starting a TLS encryption)"", 25); -+ ""Simple Mail Transfer Protocol (starting a TLS encryption)"", 25, -+ true); - - /** SMTPS protocol (via SSL/TLS socket). */ - public static final Protocol SMTPS = new Protocol(""smtps"", ""SMTPS"", -- ""Simple Mail Transfer Protocol (Secure)"", 465); -+ ""Simple Mail Transfer Protocol (Secure)"", 465, true); - - /** Local Web Archive access protocol. */ - public static final Protocol WAR = new Protocol(""war"", ""WAR"", -- ""Web Archive Access Protocol"", UNKNOWN_PORT); -+ ""Web Archive Access Protocol"", UNKNOWN_PORT, true); - - /** - * Creates the protocol associated to a URI scheme name. If an existing -@@ -195,6 +196,9 @@ public static Protocol valueOf(final String name) { - return result; - } - -+ /** The confidentiality. */ -+ private volatile boolean confidential; ++ public HashSet CollisionBlockType; + - /** The default port if known or -1. */ - private volatile int defaultPort; - -@@ -226,9 +230,30 @@ public Protocol(final String schemeName) { - */ - public Protocol(final String schemeName, final String name, - final String description, int defaultPort) { -+ this(schemeName, name, description, defaultPort, false); ++ public boolean SpawnWater; ++ public boolean SpawnLava; ++ public boolean SpawnAboveGround; ++ public boolean SpawnUnderGround; ++ ++ public boolean SpawnSunlight; ++ public boolean SpawnDarkness; ++ ++ public boolean UnderFill; ++ public boolean RandomRotation; ++ public boolean Dig; ++ public boolean Tree; ++ public boolean Branch; ++ public boolean DiggingBranch; ++ public boolean NeedsFoundation; ++ public int Rarity; ++ public double CollisionPercentage; ++ public int SpawnElevationMin; ++ public int SpawnElevationMax; ++ ++ public int GroupFrequencyMin; ++ public int GroupFrequencyMax; ++ public int GroupSeparationMin; ++ public int GroupSeparationMax; ++ public String GroupId; ++ ++ public int BranchLimit; ++ ++ public BO2(File file, String name) ++ { ++ ReadSettingsFile(file); ++ this.Name = name; ++ ++ ReadConfigSettings(); ++ CorrectSettings(); + } + -+ /** -+ * Constructor. -+ * -+ * @param schemeName -+ * The scheme name. -+ * @param name -+ * The unique name. -+ * @param description -+ * The description. -+ * @param defaultPort -+ * The default port. -+ * @param confidential -+ * The confidentiality. -+ */ -+ public Protocol(final String schemeName, final String name, -+ final String description, int defaultPort, -+ final boolean confidential) { - super(name, description); - this.schemeName = schemeName; - this.defaultPort = defaultPort; -+ this.confidential = confidential; - } - - /** {@inheritDoc} */ -@@ -261,4 +286,15 @@ public String getSchemeName() { - public int hashCode() { - return (getName() == null) ? 0 : getName().toLowerCase().hashCode(); - } ++ public BO2(Map settings, String name) ++ { ++ SettingsCache = settings; ++ this.Name = name; + -+ /** -+ * Indicates if the protocol guarantees the confidentially of the messages -+ * exchanged, for example via a SSL-secured connection. -+ * -+ * @return True if the protocol is confidential. -+ */ -+ public boolean isConfidential() { -+ return this.confidential; ++ ReadConfigSettings(); ++ CorrectSettings(); + } + - } -diff --git a/modules/org.restlet/src/org/restlet/data/Request.java b/modules/org.restlet/src/org/restlet/data/Request.java -index 9c81cbd5d5..300e05e0ab 100644 ---- a/modules/org.restlet/src/org/restlet/data/Request.java -+++ b/modules/org.restlet/src/org/restlet/data/Request.java -@@ -109,9 +109,6 @@ public static Request getCurrent() { - /** The condition data. */ - private volatile Conditions conditions; - -- /** Indicates if the call came over a confidential channel. */ -- private volatile boolean confidential; -- - /** The cookies provided by the client. */ - private volatile Series cookies; - -@@ -140,7 +137,6 @@ public static Request getCurrent() { - * Constructor. - */ - public Request() { -- this.confidential = false; - } - - /** -@@ -387,13 +383,12 @@ public Reference getRootRef() { - } - - /** -- * Indicates if the call came over a confidential channel such as an -- * SSL-secured connection. -- * -- * @return True if the call came over a confidential channel. -+ * Implemented based on the {@link Protocol#isConfidential()} method for the -+ * request's protocol returned by {@link #getProtocol()}; - */ + @Override - public boolean isConfidential() { -- return this.confidential; -+ return (getProtocol() == null) ? false : getProtocol().isConfidential(); - } - - /** -@@ -444,17 +439,6 @@ public void setConditions(Conditions conditions) { - this.conditions = conditions; - } - -- /** -- * Indicates if the call came over a confidential channel such as an -- * SSL-secured connection. -- * -- * @param confidential -- * True if the call came over a confidential channel. -- */ -- public void setConfidential(boolean confidential) { -- this.confidential = confidential; -- } -- - /** - * Sets the cookies provided by the client. - * -diff --git a/modules/org.restlet/src/org/restlet/data/Response.java b/modules/org.restlet/src/org/restlet/data/Response.java -index cd34d0b5a1..b88c96c3ef 100644 ---- a/modules/org.restlet/src/org/restlet/data/Response.java -+++ b/modules/org.restlet/src/org/restlet/data/Response.java -@@ -307,6 +307,11 @@ public Status getStatus() { - return this.status; - } - ++ public String getName() ++ { ++ return this.Name; ++ } ++ + @Override -+ public boolean isConfidential() { -+ return getRequest().isConfidential(); ++ public boolean canSpawnAsTree() ++ { ++ return Tree; + } + - /** - * Permanently redirects the client to a target URI. The client is expected - * to reuse the same method for the new request. -diff --git a/modules/org.restlet/src/org/restlet/util/WrapperRequest.java b/modules/org.restlet/src/org/restlet/util/WrapperRequest.java -index 039e608531..a8c52196d0 100644 ---- a/modules/org.restlet/src/org/restlet/util/WrapperRequest.java -+++ b/modules/org.restlet/src/org/restlet/util/WrapperRequest.java -@@ -315,18 +315,6 @@ public void setChallengeResponse(ChallengeResponse response) { - getWrappedRequest().setChallengeResponse(response); - } - -- /** -- * Indicates if the call came over a confidential channel such as an -- * SSL-secured connection. -- * -- * @param confidential -- * True if the call came over a confidential channel. -- */ -- @Override -- public void setConfidential(boolean confidential) { -- getWrappedRequest().setConfidential(confidential); -- } -- - /** - * Sets the entity from a higher-level object. This object is converted to a - * representation using the Application's converter service. If you want to -diff --git a/modules/org.restlet/src/org/restlet/util/WrapperResponse.java b/modules/org.restlet/src/org/restlet/util/WrapperResponse.java -index f75306c2c1..1fc4fdf01d 100644 ---- a/modules/org.restlet/src/org/restlet/util/WrapperResponse.java -+++ b/modules/org.restlet/src/org/restlet/util/WrapperResponse.java -@@ -301,6 +301,17 @@ protected Response getWrappedResponse() { - return this.wrappedResponse; - } - -+ /** -+ * Indicates if the call came over a confidential channel such as an -+ * SSL-secured connection. -+ * -+ * @return True if the call came over a confidential channel. -+ */ + @Override -+ public boolean isConfidential() { -+ return getWrappedResponse().isConfidential(); ++ public boolean canSpawnAsObject() ++ { ++ return true; + } + - /** - * Indicates if a content is available and can be sent. Several conditions - * must be met: the content must exists and have some available data." -8ee632caa79b92b1af98684f83b01c3447a119ee,hadoop,YARN-2740. Fix NodeLabelsManager to properly handle- node label modifications when distributed node label configuration enabled.- (Naganarasimha G R via wangda)--(cherry picked from commit db1b674b50ddecf2774f4092d677c412722bdcb1)-,c,https://github.com/apache/hadoop,"diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt -index 20de1edb7efbe..ca9247f13eb2d 100644 ---- a/hadoop-yarn-project/CHANGES.txt -+++ b/hadoop-yarn-project/CHANGES.txt -@@ -217,6 +217,9 @@ Release 2.8.0 - UNRELEASED - YARN-3530. ATS throws exception on trying to filter results without otherinfo. - (zhijie shen via xgong) - -+ YARN-2740. Fix NodeLabelsManager to properly handle node label modifications -+ when distributed node label configuration enabled. (Naganarasimha G R via wangda) ++ @Override ++ public boolean spawn(LocalWorld world, Random random, int x, int y, int z) ++ { ++ if (!ObjectCanSpawn(world, x, y, z)) ++ { ++ return false; ++ } + - Release 2.7.1 - UNRELEASED - - INCOMPATIBLE CHANGES -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java -index c8f9648147fb3..4dd01d24bb8d9 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java -@@ -1779,6 +1779,12 @@ private static void addDeprecatedKeys() { - public static final String DEFAULT_NODELABEL_CONFIGURATION_TYPE = - CENTALIZED_NODELABEL_CONFIGURATION_TYPE; - -+ @Private -+ public static boolean isDistributedNodeLabelConfiguration(Configuration conf) { -+ return DISTRIBUTED_NODELABEL_CONFIGURATION_TYPE.equals(conf.get( -+ NODELABEL_CONFIGURATION_TYPE, DEFAULT_NODELABEL_CONFIGURATION_TYPE)); -+ } ++ ObjectCoordinate[] data = Data[0]; ++ if (RandomRotation) ++ data = Data[random.nextInt(4)]; + - public YarnConfiguration() { - super(); - } -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/CommonNodeLabelsManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/CommonNodeLabelsManager.java -index 7493169201ebc..f2ff0f629971c 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/CommonNodeLabelsManager.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/CommonNodeLabelsManager.java -@@ -97,6 +97,8 @@ public class CommonNodeLabelsManager extends AbstractService { - protected NodeLabelsStore store; - private boolean nodeLabelsEnabled = false; - -+ private boolean isDistributedNodeLabelConfiguration = false; ++ int faultCounter = 0; + - /** - * A Host can have multiple Nodes - */ -@@ -213,6 +215,10 @@ protected void serviceInit(Configuration conf) throws Exception { - nodeLabelsEnabled = - conf.getBoolean(YarnConfiguration.NODE_LABELS_ENABLED, - YarnConfiguration.DEFAULT_NODE_LABELS_ENABLED); ++ for (ObjectCoordinate point : data) ++ { ++ if (!world.isLoaded((x + point.x), (y + point.y), (z + point.z))) ++ return false; + -+ isDistributedNodeLabelConfiguration = -+ YarnConfiguration.isDistributedNodeLabelConfiguration(conf); ++ if (!Dig) ++ { ++ if (CollisionBlockType.contains(world.getTypeId((x + point.x), (y + point.y), (z + point.z)))) ++ { ++ faultCounter++; ++ if (faultCounter > (data.length * (CollisionPercentage / 100))) ++ { ++ return false; ++ } ++ } ++ } + - if (nodeLabelsEnabled) { - initNodeLabelStore(conf); - } -@@ -223,7 +229,7 @@ protected void serviceInit(Configuration conf) throws Exception { - protected void initNodeLabelStore(Configuration conf) throws Exception { - this.store = new FileSystemNodeLabelsStore(this); - this.store.init(conf); -- this.store.recover(); -+ this.store.recover(isDistributedNodeLabelConfiguration); - } - - // for UT purpose -@@ -613,7 +619,10 @@ protected void internalUpdateLabelsOnNodes( - } - } - -- if (null != dispatcher) { -+ if (null != dispatcher && !isDistributedNodeLabelConfiguration) { -+ // In case of DistributedNodeLabelConfiguration, no need to save the the -+ // NodeLabels Mapping to the back-end store, as on RM restart/failover -+ // NodeLabels are collected from NM through Register/Heartbeat again - dispatcher.getEventHandler().handle( - new UpdateNodeToLabelsMappingsEvent(newNMToLabels)); - } -@@ -799,8 +808,10 @@ public List getClusterNodeLabels() { - readLock.lock(); - List nodeLabels = new ArrayList<>(); - for (RMNodeLabel label : labelCollections.values()) { -- nodeLabels.add(NodeLabel.newInstance(label.getLabelName(), -- label.getIsExclusive())); -+ if (!label.getLabelName().equals(NO_LABEL)) { -+ nodeLabels.add(NodeLabel.newInstance(label.getLabelName(), -+ label.getIsExclusive())); + } - } - return nodeLabels; - } finally { -@@ -824,7 +835,6 @@ public boolean isExclusiveNodeLabel(String nodeLabel) throws IOException { - readLock.unlock(); - } - } -- - - private void checkAndThrowLabelName(String label) throws IOException { - if (label == null || label.isEmpty() || label.length() > MAX_LABEL_LENGTH) { -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/FileSystemNodeLabelsStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/FileSystemNodeLabelsStore.java -index ea185f2c0a248..f26e2048a02cd 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/FileSystemNodeLabelsStore.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/FileSystemNodeLabelsStore.java -@@ -154,8 +154,12 @@ public void removeClusterNodeLabels(Collection labels) - ensureCloseEditlogFile(); - } - -+ /* (non-Javadoc) -+ * @see org.apache.hadoop.yarn.nodelabels.NodeLabelsStore#recover(boolean) -+ */ - @Override -- public void recover() throws YarnException, IOException { -+ public void recover(boolean ignoreNodeToLabelsMappings) throws YarnException, -+ IOException { - /* - * Steps of recover - * 1) Read from last mirror (from mirror or mirror.old) -@@ -222,7 +226,15 @@ public void recover() throws YarnException, IOException { - new ReplaceLabelsOnNodeRequestPBImpl( - ReplaceLabelsOnNodeRequestProto.parseDelimitedFrom(is)) - .getNodeToLabels(); -- mgr.replaceLabelsOnNode(map); -+ if (!ignoreNodeToLabelsMappings) { -+ /* -+ * In case of Distributed NodeLabels setup, -+ * ignoreNodeToLabelsMappings will be set to true and recover will -+ * be invoked. As RM will collect the node labels from NM through -+ * registration/HB -+ */ -+ mgr.replaceLabelsOnNode(map); ++ ++ for (ObjectCoordinate point : data) ++ { ++ ++ if (world.getTypeId(x + point.x, y + point.y, z + point.z) == 0) ++ { ++ world.setBlock((x + point.x), y + point.y, z + point.z, point.BlockId, point.BlockData, true, false, true); ++ } else if (Dig) ++ { ++ world.setBlock((x + point.x), y + point.y, z + point.z, point.BlockId, point.BlockData, true, false, true); + } - break; - } - } -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/NodeLabelsStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/NodeLabelsStore.java -index 47b7370dff843..46b94fd0d5c9a 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/NodeLabelsStore.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/NodeLabelsStore.java -@@ -56,9 +56,18 @@ public abstract void removeClusterNodeLabels(Collection labels) - throws IOException; - - /** -- * Recover labels and node to labels mappings from store -+ * Recover labels and node to labels mappings from store, but if -+ * ignoreNodeToLabelsMappings is true then node to labels mappings should not -+ * be recovered. In case of Distributed NodeLabels setup -+ * ignoreNodeToLabelsMappings will be set to true and recover will be invoked -+ * as RM will collect the node labels from NM through registration/HB -+ * -+ * @param ignoreNodeToLabelsMappings -+ * @throws IOException -+ * @throws YarnException - */ -- public abstract void recover() throws IOException, YarnException; -+ public abstract void recover(boolean ignoreNodeToLabelsMappings) -+ throws IOException, YarnException; - - public void init(Configuration conf) throws Exception {} - -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/DummyCommonNodeLabelsManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/DummyCommonNodeLabelsManager.java -index 48d6dc877154b..fce663a1c952c 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/DummyCommonNodeLabelsManager.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/DummyCommonNodeLabelsManager.java -@@ -39,7 +39,8 @@ public void initNodeLabelStore(Configuration conf) { - this.store = new NodeLabelsStore(this) { - - @Override -- public void recover() throws IOException { -+ public void recover(boolean ignoreNodeToLabelsMappings) -+ throws IOException { - } - - @Override -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestCommonNodeLabelsManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestCommonNodeLabelsManager.java -index beb2cf8585851..09838b43ada1d 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestCommonNodeLabelsManager.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestCommonNodeLabelsManager.java -@@ -554,4 +554,29 @@ private void verifyNodeLabelAdded(Set expectedAddedLabelNames, - Assert.assertTrue(expectedAddedLabelNames.contains(label.getName())); - } - } + -+ @Test(timeout = 5000) -+ public void testReplaceLabelsOnNodeInDistributedMode() throws Exception { -+ //create new DummyCommonNodeLabelsManager than the one got from @before -+ mgr.stop(); -+ mgr = new DummyCommonNodeLabelsManager(); -+ Configuration conf = new YarnConfiguration(); -+ conf.setBoolean(YarnConfiguration.NODE_LABELS_ENABLED, true); -+ conf.set(YarnConfiguration.NODELABEL_CONFIGURATION_TYPE, -+ YarnConfiguration.DISTRIBUTED_NODELABEL_CONFIGURATION_TYPE); ++ } ++ return true; ++ } + -+ mgr.init(conf); -+ mgr.start(); ++ @Override ++ public boolean spawnAsTree(LocalWorld world, Random random, int x, int y, int z) ++ { ++ if (!Tree) ++ { ++ // Can only spawn as a tree if this is a tree. ++ return false; ++ } ++ return spawn(world, random, x, y, z); ++ } + -+ mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet(""p1"", ""p2"", ""p3"")); -+ mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId(""n1""), toSet(""p1""))); -+ Set labelsByNode = mgr.getLabelsByNode(toNodeId(""n1"")); ++ @Override ++ public boolean spawn(LocalWorld world, Random random, int x, int z) ++ { ++ int y; ++ if (SpawnAboveGround) ++ y = world.getSolidHeight(x, z); ++ else if (SpawnUnderGround) ++ { ++ int solidHeight = world.getSolidHeight(x, z); ++ if (solidHeight < 1 || solidHeight <= SpawnElevationMin) ++ return false; ++ if (solidHeight > SpawnElevationMax) ++ solidHeight = SpawnElevationMax; ++ y = random.nextInt(solidHeight - SpawnElevationMin) + SpawnElevationMin; ++ } else ++ y = world.getHighestBlockYAt(x, z); + -+ Assert.assertNull( -+ ""Labels are not expected to be written to the NodeLabelStore"", -+ mgr.lastNodeToLabels); -+ Assert.assertNotNull(""Updated labels should be available from the Mgr"", -+ labelsByNode); -+ Assert.assertTrue(labelsByNode.contains(""p1"")); -+ } - } -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestFileSystemNodeLabelsStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestFileSystemNodeLabelsStore.java -index f070c205f5a1c..fb60cd6a6427c 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestFileSystemNodeLabelsStore.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestFileSystemNodeLabelsStore.java -@@ -144,6 +144,40 @@ public void testRecoverWithMirror() throws Exception { - mgr.stop(); - } - -+ @SuppressWarnings({ ""unchecked"", ""rawtypes"" }) -+ @Test(timeout = 10000) -+ public void testRecoverWithDistributedNodeLabels() throws Exception { -+ mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet(""p1"", ""p2"", ""p3"")); -+ mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet(""p4"")); -+ mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet(""p5"", ""p6"")); -+ mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId(""n1""), toSet(""p1""), -+ toNodeId(""n2""), toSet(""p2""))); -+ mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId(""n3""), toSet(""p3""), -+ toNodeId(""n4""), toSet(""p4""), toNodeId(""n5""), toSet(""p5""), -+ toNodeId(""n6""), toSet(""p6""), toNodeId(""n7""), toSet(""p6""))); ++ if (y < 0) ++ return false; + -+ mgr.removeFromClusterNodeLabels(toSet(""p1"")); -+ mgr.removeFromClusterNodeLabels(Arrays.asList(""p3"", ""p5"")); -+ mgr.stop(); ++ if (!ObjectCanSpawn(world, x, y, z)) ++ return false; + -+ mgr = new MockNodeLabelManager(); -+ Configuration cf = new Configuration(conf); -+ cf.set(YarnConfiguration.NODELABEL_CONFIGURATION_TYPE, -+ YarnConfiguration.DISTRIBUTED_NODELABEL_CONFIGURATION_TYPE); -+ mgr.init(cf); ++ boolean objectSpawned = this.spawn(world, random, x, y, z); + -+ // check variables -+ Assert.assertEquals(3, mgr.getClusterNodeLabels().size()); -+ Assert.assertTrue(mgr.getClusterNodeLabelNames().containsAll( -+ Arrays.asList(""p2"", ""p4"", ""p6""))); ++ if (objectSpawned) ++ GenerateCustomObjectFromGroup(world, random, x, y, z); + -+ Assert.assertTrue(""During recovery in distributed node-labels setup, "" -+ + ""node to labels mapping should not be recovered "", mgr -+ .getNodeLabels().size() == 0); ++ return objectSpawned; ++ } + -+ mgr.stop(); -+ } ++ @Override ++ public boolean spawnAsTree(LocalWorld world, Random random, int x, int z) ++ { ++ return spawn(world, random, x, z); ++ } + - @SuppressWarnings({ ""unchecked"", ""rawtypes"" }) - @Test(timeout = 10000) - public void testEditlogRecover() throws Exception { -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java -index c921326fbdce3..0ad90c0ed4c6b 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java -@@ -112,6 +112,9 @@ public class AdminService extends CompositeService implements - private final RecordFactory recordFactory = - RecordFactoryProvider.getRecordFactory(null); - -+ @VisibleForTesting -+ boolean isDistributedNodeLabelConfiguration = false; ++ @Override ++ public void process(LocalWorld world, Random random, int chunkX, int chunkZ) ++ { ++ int randomRoll = random.nextInt(100); ++ int ObjectRarity = Rarity; + - public AdminService(ResourceManager rm, RMContext rmContext) { - super(AdminService.class.getName()); - this.rm = rm; -@@ -141,6 +144,10 @@ public void serviceInit(Configuration conf) throws Exception { - YarnConfiguration.DEFAULT_YARN_ADMIN_ACL)), UserGroupInformation - .getCurrentUser()); - rmId = conf.get(YarnConfiguration.RM_HA_ID); ++ while (randomRoll < ObjectRarity) ++ { ++ ObjectRarity -= 100; + -+ isDistributedNodeLabelConfiguration = -+ YarnConfiguration.isDistributedNodeLabelConfiguration(conf); ++ int x = chunkX * 16 + random.nextInt(16); ++ int z = chunkZ * 16 + random.nextInt(16); + - super.serviceInit(conf); - } - -@@ -637,32 +644,35 @@ public AddToClusterNodeLabelsResponse addToClusterNodeLabels(AddToClusterNodeLab - @Override - public RemoveFromClusterNodeLabelsResponse removeFromClusterNodeLabels( - RemoveFromClusterNodeLabelsRequest request) throws YarnException, IOException { -- String argName = ""removeFromClusterNodeLabels""; -+ String operation = ""removeFromClusterNodeLabels""; - final String msg = ""remove labels.""; -- UserGroupInformation user = checkAcls(argName); - -- checkRMStatus(user.getShortUserName(), argName, msg); -+ UserGroupInformation user = checkAcls(operation); ++ spawn(world, random, x, z); ++ } ++ } + -+ checkRMStatus(user.getShortUserName(), operation, msg); - - RemoveFromClusterNodeLabelsResponse response = - recordFactory.newRecordInstance(RemoveFromClusterNodeLabelsResponse.class); - try { - rmContext.getNodeLabelManager().removeFromClusterNodeLabels(request.getNodeLabels()); - RMAuditLogger -- .logSuccess(user.getShortUserName(), argName, ""AdminService""); -+ .logSuccess(user.getShortUserName(), operation, ""AdminService""); - return response; - } catch (IOException ioe) { -- throw logAndWrapException(ioe, user.getShortUserName(), argName, msg); -+ throw logAndWrapException(ioe, user.getShortUserName(), operation, msg); - } - } - - @Override - public ReplaceLabelsOnNodeResponse replaceLabelsOnNode( - ReplaceLabelsOnNodeRequest request) throws YarnException, IOException { -- String argName = ""replaceLabelsOnNode""; -+ String operation = ""replaceLabelsOnNode""; - final String msg = ""set node to labels.""; -- UserGroupInformation user = checkAcls(argName); ++ @Override ++ public void processAsTree(LocalWorld world, Random random, int chunkX, int chunkZ) ++ { ++ if (!Tree) ++ { ++ return; ++ } ++ ++ process(world, random, chunkX, chunkZ); ++ } ++ ++ @Override ++ public CustomObject applySettings(Map extraSettings) ++ { ++ Map newSettings = new HashMap(); ++ newSettings.putAll(SettingsCache); ++ newSettings.putAll(extraSettings); ++ return new BO2(newSettings, getName()); ++ } ++ ++ @Override ++ protected void WriteConfigSettings() throws IOException ++ { ++ // It doesn't write. ++ } ++ ++ @Override ++ protected void ReadConfigSettings() ++ { ++ this.Version = ReadModSettings(BODefaultValues.version.name(), BODefaultValues.version.stringValue()); ++ ++ this.SpawnOnBlockType = this.ReadBlockList(ReadModSettings(BODefaultValues.spawnOnBlockType.name(), BODefaultValues.spawnOnBlockType.StringArrayListValue()), BODefaultValues.spawnOnBlockType.name()); ++ this.CollisionBlockType = this.ReadBlockList(ReadModSettings(BODefaultValues.collisionBlockType.name(), BODefaultValues.collisionBlockType.StringArrayListValue()), BODefaultValues.collisionBlockType.name()); ++ ++ this.SpawnInBiome = new HashSet(ReadModSettings(BODefaultValues.spawnInBiome.name(), BODefaultValues.spawnInBiome.StringArrayListValue())); ++ ++ this.SpawnSunlight = ReadModSettings(BODefaultValues.spawnSunlight.name(), BODefaultValues.spawnSunlight.booleanValue()); ++ this.SpawnDarkness = ReadModSettings(BODefaultValues.spawnDarkness.name(), BODefaultValues.spawnDarkness.booleanValue()); ++ this.SpawnWater = ReadModSettings(BODefaultValues.spawnWater.name(), BODefaultValues.spawnWater.booleanValue()); ++ this.SpawnLava = ReadModSettings(BODefaultValues.spawnLava.name(), BODefaultValues.spawnLava.booleanValue()); ++ this.SpawnAboveGround = ReadModSettings(BODefaultValues.spawnAboveGround.name(), BODefaultValues.spawnAboveGround.booleanValue()); ++ this.SpawnUnderGround = ReadModSettings(BODefaultValues.spawnUnderGround.name(), BODefaultValues.spawnUnderGround.booleanValue()); ++ ++ this.UnderFill = ReadModSettings(BODefaultValues.underFill.name(), BODefaultValues.underFill.booleanValue()); ++ ++ this.RandomRotation = ReadModSettings(BODefaultValues.randomRotation.name(), BODefaultValues.randomRotation.booleanValue()); ++ this.Dig = ReadModSettings(BODefaultValues.dig.name(), BODefaultValues.dig.booleanValue()); ++ this.Tree = ReadModSettings(BODefaultValues.tree.name(), BODefaultValues.tree.booleanValue()); ++ this.Branch = ReadModSettings(BODefaultValues.branch.name(), BODefaultValues.branch.booleanValue()); ++ this.DiggingBranch = ReadModSettings(BODefaultValues.diggingBranch.name(), BODefaultValues.diggingBranch.booleanValue()); ++ this.NeedsFoundation = ReadModSettings(BODefaultValues.needsFoundation.name(), BODefaultValues.needsFoundation.booleanValue()); ++ this.Rarity = ReadModSettings(BODefaultValues.rarity.name(), BODefaultValues.rarity.intValue()); ++ this.CollisionPercentage = ReadModSettings(BODefaultValues.collisionPercentage.name(), BODefaultValues.collisionPercentage.intValue()); ++ this.SpawnElevationMin = ReadModSettings(BODefaultValues.spawnElevationMin.name(), BODefaultValues.spawnElevationMin.intValue()); ++ this.SpawnElevationMax = ReadModSettings(BODefaultValues.spawnElevationMax.name(), BODefaultValues.spawnElevationMax.intValue()); ++ ++ this.GroupFrequencyMin = ReadModSettings(BODefaultValues.groupFrequencyMin.name(), BODefaultValues.groupFrequencyMin.intValue()); ++ this.GroupFrequencyMax = ReadModSettings(BODefaultValues.groupFrequencyMax.name(), BODefaultValues.groupFrequencyMax.intValue()); ++ this.GroupSeparationMin = ReadModSettings(BODefaultValues.groupSeperationMin.name(), BODefaultValues.groupSeperationMin.intValue()); ++ this.GroupSeparationMax = ReadModSettings(BODefaultValues.groupSeperationMax.name(), BODefaultValues.groupSeperationMax.intValue()); ++ this.GroupId = ReadModSettings(BODefaultValues.groupId.name(), BODefaultValues.groupId.stringValue()); ++ ++ this.BranchLimit = ReadModSettings(BODefaultValues.branchLimit.name(), BODefaultValues.branchLimit.intValue()); ++ ++ this.ReadCoordinates(); ++ } ++ ++ @Override ++ protected void CorrectSettings() ++ { ++ // Stub method ++ } ++ ++ @Override ++ protected void RenameOldSettings() ++ { ++ // Stub method ++ } ++ ++ private void ReadCoordinates() ++ { ++ ArrayList coordinates = new ArrayList(); ++ ++ for (String key : SettingsCache.keySet()) ++ { ++ ObjectCoordinate buffer = ObjectCoordinate.getCoordinateFromString(key, SettingsCache.get(key)); ++ if (buffer != null) ++ coordinates.add(buffer); ++ } ++ ++ Data[0] = new ObjectCoordinate[coordinates.size()]; ++ Data[1] = new ObjectCoordinate[coordinates.size()]; ++ Data[2] = new ObjectCoordinate[coordinates.size()]; ++ Data[3] = new ObjectCoordinate[coordinates.size()]; ++ ++ for (int i = 0; i < coordinates.size(); i++) ++ { ++ ObjectCoordinate coordinate = coordinates.get(i); ++ ++ Data[0][i] = coordinate; ++ coordinate = coordinate.Rotate(); ++ Data[1][i] = coordinate; ++ coordinate = coordinate.Rotate(); ++ Data[2][i] = coordinate; ++ coordinate = coordinate.Rotate(); ++ Data[3][i] = coordinate; ++ } ++ ++ } ++ ++ private HashSet ReadBlockList(ArrayList blocks, String settingName) ++ { ++ HashSet output = new HashSet(); ++ ++ boolean nonIntegerValues = false; ++ boolean all = false; ++ boolean solid = false; ++ ++ for (String block : blocks) ++ { ++ ++ if (block.equals(BODefaultValues.BO_ALL_KEY.stringValue())) ++ { ++ all = true; ++ continue; ++ } ++ if (block.equals(BODefaultValues.BO_SolidKey.stringValue())) ++ { ++ solid = true; ++ continue; ++ } ++ try ++ { ++ int blockID = Integer.decode(block); ++ if (blockID != 0) ++ output.add(blockID); ++ } catch (NumberFormatException e) ++ { ++ nonIntegerValues = true; ++ } ++ } ++ ++ if (all || solid) ++ for (DefaultMaterial material : DefaultMaterial.values()) ++ { ++ if (material.id == 0) ++ continue; ++ if (solid && !material.isSolid()) ++ continue; ++ output.add(material.id); ++ ++ } ++ if (nonIntegerValues) ++ System.out.println(""TerrainControl: Custom object "" + this.Name + "" have wrong value "" + settingName); ++ ++ return output; ++ ++ } ++ ++ public boolean ObjectCanSpawn(LocalWorld world, int x, int y, int z) ++ { ++ if ((world.getTypeId(x, y - 5, z) == 0) && (NeedsFoundation)) ++ return false; ++ ++ boolean output = true; ++ int checkBlock = world.getTypeId(x, y + 2, z); ++ if (!SpawnWater) ++ output = !((checkBlock == DefaultMaterial.WATER.id) || (checkBlock == DefaultMaterial.STATIONARY_WATER.id)); ++ if (!SpawnLava) ++ output = !((checkBlock == DefaultMaterial.LAVA.id) || (checkBlock == DefaultMaterial.STATIONARY_LAVA.id)); ++ ++ checkBlock = world.getLightLevel(x, y + 2, z); ++ if (!SpawnSunlight) ++ output = !(checkBlock > 8); ++ if (!SpawnDarkness) ++ output = !(checkBlock < 9); ++ ++ if ((y < SpawnElevationMin) || (y > SpawnElevationMax)) ++ output = false; ++ ++ if (!SpawnOnBlockType.contains(world.getTypeId(x, y - 1, z))) ++ output = false; ++ ++ return output; ++ } ++ ++ public void GenerateCustomObjectFromGroup(LocalWorld world, Random random, int x, int y, int z) ++ { ++ if (GroupObjects == null) ++ return; ++ ++ int attempts = 3; ++ if ((GroupFrequencyMax - GroupFrequencyMin) > 0) ++ attempts = GroupFrequencyMin + random.nextInt(GroupFrequencyMax - GroupFrequencyMin); ++ ++ while (attempts > 0) ++ { ++ attempts--; ++ ++ int objIndex = random.nextInt(GroupObjects.length); ++ BO2 ObjectFromGroup = GroupObjects[objIndex]; ++ ++ if (Branch) ++ continue; ++ ++ x = x + random.nextInt(GroupSeparationMax - GroupSeparationMin) + GroupSeparationMin; ++ z = z + random.nextInt(GroupSeparationMax - GroupSeparationMin) + GroupSeparationMin; ++ int _y; ++ ++ if (SpawnAboveGround) ++ _y = world.getSolidHeight(x, z); ++ else if (SpawnUnderGround) ++ { ++ int solidHeight = world.getSolidHeight(x, z); ++ if (solidHeight < 1 || solidHeight <= SpawnElevationMin) ++ continue; ++ if (solidHeight > SpawnElevationMax) ++ solidHeight = SpawnElevationMax; ++ _y = random.nextInt(solidHeight - SpawnElevationMin) + SpawnElevationMin; ++ } else ++ _y = world.getHighestBlockYAt(x, z); ++ ++ if (y < 0) ++ continue; ++ ++ if ((y - _y) > 10 || (_y - y) > 10) ++ continue; ++ ++ ObjectFromGroup.spawn(world, random, x, _y, z); ++ } ++ ++ } ++ ++ @Override ++ public boolean hasPreferenceToSpawnIn(LocalBiome biome) ++ { ++ return SpawnInBiome.contains(biome.getName()) || SpawnInBiome.contains(""All""); ++ } ++ ++} +diff --git a/common/src/com/khorn/terraincontrol/customobjects/bo2/BO2Loader.java b/common/src/com/khorn/terraincontrol/customobjects/bo2/BO2Loader.java +new file mode 100644 +index 000000000..e7ee8bb76 +--- /dev/null ++++ b/common/src/com/khorn/terraincontrol/customobjects/bo2/BO2Loader.java +@@ -0,0 +1,14 @@ ++package com.khorn.terraincontrol.customobjects.bo2; ++ ++import java.io.File; ++ ++import com.khorn.terraincontrol.customobjects.CustomObject; ++import com.khorn.terraincontrol.customobjects.CustomObjectLoader; ++ ++public class BO2Loader implements CustomObjectLoader ++{ ++ public CustomObject loadFromFile(String objectName, File file) ++ { ++ return new BO2(file, objectName); ++ } ++} +diff --git a/common/src/com/khorn/terraincontrol/exception/InvalidResourceException.java b/common/src/com/khorn/terraincontrol/exception/InvalidResourceException.java +new file mode 100644 +index 000000000..f53a907ae +--- /dev/null ++++ b/common/src/com/khorn/terraincontrol/exception/InvalidResourceException.java +@@ -0,0 +1,11 @@ ++package com.khorn.terraincontrol.exception; ++ ++public class InvalidResourceException extends Exception ++{ ++ ++ public InvalidResourceException(String string) ++ { ++ super(string); ++ } ++ ++} +diff --git a/common/src/com/khorn/terraincontrol/generator/ObjectSpawner.java b/common/src/com/khorn/terraincontrol/generator/ObjectSpawner.java +index a0f81f51b..d0dde206a 100644 +--- a/common/src/com/khorn/terraincontrol/generator/ObjectSpawner.java ++++ b/common/src/com/khorn/terraincontrol/generator/ObjectSpawner.java +@@ -1,14 +1,14 @@ + package com.khorn.terraincontrol.generator; -- checkRMStatus(user.getShortUserName(), argName, msg); -+ checkAndThrowIfDistributedNodeLabelConfEnabled(operation); -+ UserGroupInformation user = checkAcls(operation); ++import java.util.Random; + -+ checkRMStatus(user.getShortUserName(), operation, msg); + import com.khorn.terraincontrol.DefaultMaterial; + import com.khorn.terraincontrol.LocalWorld; + import com.khorn.terraincontrol.configuration.BiomeConfig; + import com.khorn.terraincontrol.configuration.Resource; + import com.khorn.terraincontrol.configuration.TCDefaultValues; + import com.khorn.terraincontrol.configuration.WorldConfig; +-import com.khorn.terraincontrol.generator.resourcegens.ResourceType; +- +-import java.util.Random; ++import com.khorn.terraincontrol.generator.resourcegens.SmallLakeGen; - ReplaceLabelsOnNodeResponse response = - recordFactory.newRecordInstance(ReplaceLabelsOnNodeResponse.class); -@@ -670,30 +680,41 @@ public ReplaceLabelsOnNodeResponse replaceLabelsOnNode( - rmContext.getNodeLabelManager().replaceLabelsOnNode( - request.getNodeToLabels()); - RMAuditLogger -- .logSuccess(user.getShortUserName(), argName, ""AdminService""); -+ .logSuccess(user.getShortUserName(), operation, ""AdminService""); - return response; - } catch (IOException ioe) { -- throw logAndWrapException(ioe, user.getShortUserName(), argName, msg); -+ throw logAndWrapException(ioe, user.getShortUserName(), operation, msg); - } - } + public class ObjectSpawner + { +@@ -42,10 +42,10 @@ public void populate(int chunkX, int chunkZ) + for (int i = 0; i < localBiomeConfig.ResourceCount; i++) + { + Resource res = localBiomeConfig.ResourceSequence[i]; +- if (res.Type == ResourceType.SmallLake && Village) ++ if (res instanceof SmallLakeGen && Village) + continue; +- world.setChunksCreations(res.Type.CreateNewChunks); +- res.Type.Generator.Process(world, rand, res, x, z); ++ world.setChunksCreations(false); ++ res.process(world, rand, chunkX, chunkZ); + } -- private void checkRMStatus(String user, String argName, String msg) -+ private void checkRMStatus(String user, String operation, String msg) - throws StandbyException { - if (!isRMActive()) { -- RMAuditLogger.logFailure(user, argName, """", -+ RMAuditLogger.logFailure(user, operation, """", - ""AdminService"", ""ResourceManager is not active. Can not "" + msg); - throwStandbyException(); - } - } + // Snow and ice +@@ -67,10 +67,11 @@ public void populate(int chunkX, int chunkZ) + world.setBlock(blockToFreezeX, blockToFreezeY - 1, blockToFreezeZ, biomeConfig.iceBlock, 0); + } else + { +- // Snow has to be placed on an empty space on a solid block in the world ++ // Snow has to be placed on an empty space on a ++ // solid block in the world + if (world.getMaterial(blockToFreezeX, blockToFreezeY, blockToFreezeZ) == DefaultMaterial.AIR) + { +- if (world.getMaterial(blockToFreezeX, blockToFreezeY - 1, blockToFreezeZ).isSolid()) ++ if (world.getMaterial(blockToFreezeX, blockToFreezeY - 1, blockToFreezeZ).isSolid()) + { + world.setBlock(blockToFreezeX, blockToFreezeY, blockToFreezeZ, DefaultMaterial.SNOW.id, 0); + } +diff --git a/common/src/com/khorn/terraincontrol/generator/resourcegens/AboveWaterGen.java b/common/src/com/khorn/terraincontrol/generator/resourcegens/AboveWaterGen.java +index 63f205514..b4060afca 100644 +--- a/common/src/com/khorn/terraincontrol/generator/resourcegens/AboveWaterGen.java ++++ b/common/src/com/khorn/terraincontrol/generator/resourcegens/AboveWaterGen.java +@@ -1,15 +1,33 @@ + package com.khorn.terraincontrol.generator.resourcegens; + + import com.khorn.terraincontrol.LocalWorld; +-import com.khorn.terraincontrol.configuration.BiomeConfig; + import com.khorn.terraincontrol.configuration.Resource; ++import com.khorn.terraincontrol.exception.InvalidResourceException; - private YarnException logAndWrapException(Exception exception, String user, -- String argName, String msg) throws YarnException { -+ String operation, String msg) throws YarnException { - LOG.warn(""Exception "" + msg, exception); -- RMAuditLogger.logFailure(user, argName, """", -+ RMAuditLogger.logFailure(user, operation, """", - ""AdminService"", ""Exception "" + msg); - return RPCUtil.getRemoteException(exception); - } ++import java.util.List; + import java.util.Random; -+ private void checkAndThrowIfDistributedNodeLabelConfEnabled(String operation) -+ throws YarnException { -+ if (isDistributedNodeLabelConfiguration) { -+ String msg = -+ String.format(""Error when invoke method=%s because of "" -+ + ""distributed node label configuration enabled."", operation); -+ LOG.error(msg); -+ throw RPCUtil.getRemoteException(new IOException(msg)); +-public class AboveWaterGen extends ResourceGenBase ++public class AboveWaterGen extends Resource + { ++ private int blockId; ++ private int blockData; ++ + @Override +- protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, int z) ++ public void load(List args) throws InvalidResourceException ++ { ++ if (args.size() < 3) ++ { ++ throw new InvalidResourceException(""Too few arguments supplied""); ++ } ++ ++ blockId = getBlockId(args.get(0)); ++ blockData = getBlockData(args.get(0)); ++ frequency = getInt(args.get(1), 1, 100); ++ rarity = getInt(args.get(2), 1, 100); + } -+ } + - @Override - public CheckForDecommissioningNodesResponse checkForDecommissioningNodes( - CheckForDecommissioningNodesRequest checkForDecommissioningNodesRequest) -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java -index 5e2dc7e4f2543..16b6a890ac923 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java -@@ -104,7 +104,7 @@ public class ResourceTrackerService extends AbstractService implements - private int minAllocMb; - private int minAllocVcores; ++ @Override ++ public void spawn(LocalWorld world, Random rand, int x, int z) + { + int y = world.getLiquidHeight(x, z); + if (y == -1) +@@ -19,26 +37,23 @@ protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, + for (int i = 0; i < 10; i++) + { + int j = x + rand.nextInt(8) - rand.nextInt(8); +- //int k = y + rand.nextInt(4) - rand.nextInt(4); + int m = z + rand.nextInt(8) - rand.nextInt(8); + if (!world.isEmpty(j, y, m) || !world.getMaterial(j, y - 1, m).isLiquid()) + continue; +- world.setBlock(j, y, m, res.BlockId, 0, false, false, false); ++ world.setBlock(j, y, m, blockId, blockData, false, false, false); + } + } -- private boolean isDistributesNodeLabelsConf; -+ private boolean isDistributedNodeLabelsConf; + @Override +- protected boolean ReadString(Resource res, String[] Props, BiomeConfig biomeConfig) throws NumberFormatException ++ public ResourceType getType() + { +- res.BlockId = CheckBlock(Props[0]); +- res.Frequency = CheckValue(Props[1], 1, 100); +- res.Rarity = CheckValue(Props[2], 0, 100); +- return true; ++ return ResourceType.biomeConfigResource; + } - static { - resync.setNodeAction(NodeAction.RESYNC); -@@ -155,13 +155,8 @@ protected void serviceInit(Configuration conf) throws Exception { - YarnConfiguration.RM_NODEMANAGER_MINIMUM_VERSION, - YarnConfiguration.DEFAULT_RM_NODEMANAGER_MINIMUM_VERSION); + @Override +- protected String WriteString(Resource res, String blockSources) ++ public String makeString() + { +- return res.BlockIdToName(res.BlockId) + "","" + res.Frequency + "","" + res.Rarity; ++ return ""AboveWaterGen("" + makeMaterial(blockId) + "","" + frequency + "","" + rarity + "")""; + } ++ + } +diff --git a/common/src/com/khorn/terraincontrol/generator/resourcegens/CactusGen.java b/common/src/com/khorn/terraincontrol/generator/resourcegens/CactusGen.java +index 9e530c630..f0f1f2cfa 100644 +--- a/common/src/com/khorn/terraincontrol/generator/resourcegens/CactusGen.java ++++ b/common/src/com/khorn/terraincontrol/generator/resourcegens/CactusGen.java +@@ -1,17 +1,26 @@ + package com.khorn.terraincontrol.generator.resourcegens; -- String nodeLabelConfigurationType = -- conf.get(YarnConfiguration.NODELABEL_CONFIGURATION_TYPE, -- YarnConfiguration.DEFAULT_NODELABEL_CONFIGURATION_TYPE); +-import com.khorn.terraincontrol.configuration.BiomeConfig; +-import com.khorn.terraincontrol.configuration.Resource; +-import com.khorn.terraincontrol.LocalWorld; - -- isDistributesNodeLabelsConf = -- YarnConfiguration.DISTRIBUTED_NODELABEL_CONFIGURATION_TYPE -- .equals(nodeLabelConfigurationType); -+ isDistributedNodeLabelsConf = -+ YarnConfiguration.isDistributedNodeLabelConfiguration(conf); - - super.serviceInit(conf); - } -@@ -352,7 +347,7 @@ public RegisterNodeManagerResponse registerNodeManager( ++import java.util.ArrayList; ++import java.util.List; + import java.util.Random; - // Update node's labels to RM's NodeLabelManager. - Set nodeLabels = request.getNodeLabels(); -- if (isDistributesNodeLabelsConf && nodeLabels != null) { -+ if (isDistributedNodeLabelsConf && nodeLabels != null) { - try { - updateNodeLabelsFromNMReport(nodeLabels, nodeId); - response.setAreNodeLabelsAcceptedByRM(true); -@@ -470,7 +465,7 @@ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - this.rmContext.getDispatcher().getEventHandler().handle(nodeStatusEvent); +-public class CactusGen extends ResourceGenBase ++import com.khorn.terraincontrol.LocalWorld; ++import com.khorn.terraincontrol.TerrainControl; ++import com.khorn.terraincontrol.configuration.Resource; ++import com.khorn.terraincontrol.exception.InvalidResourceException; ++ ++public class CactusGen extends Resource + { ++ private int blockId; ++ private int blockData; ++ private int minAltitude; ++ private int maxAltitude; ++ private List sourceBlocks; ++ + @Override +- protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, int z) ++ public void spawn(LocalWorld world, Random rand, int x, int z) + { +- int y = rand.nextInt(res.MaxAltitude - res.MinAltitude) + res.MinAltitude; ++ int y = rand.nextInt(maxAltitude - minAltitude) + minAltitude; - // 5. Update node's labels to RM's NodeLabelManager. -- if (isDistributesNodeLabelsConf && request.getNodeLabels() != null) { -+ if (isDistributedNodeLabelsConf && request.getNodeLabels() != null) { - try { - updateNodeLabelsFromNMReport(request.getNodeLabels(), nodeId); - nodeHeartBeatResponse.setAreNodeLabelsAcceptedByRM(true); -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java -index 6cd6d56281f66..9aea62d1c8408 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java -@@ -149,6 +149,7 @@ - import org.apache.hadoop.yarn.webapp.NotFoundException; - import org.apache.hadoop.yarn.webapp.util.WebAppUtils; + for (int i = 0; i < 10; i++) + { +@@ -24,9 +33,9 @@ protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, + for (int i1 = 0; i1 < n; i1++) + { + int id = world.getTypeId(j, k + i1 - 1, m); +- if (res.CheckSourceId(id) || id == res.BlockId) ++ if (sourceBlocks.contains(id)) + { +- world.setBlock(j, k + i1, m, res.BlockId, 0, false, false, false); ++ world.setBlock(j, k + i1, m, blockId, blockData, false, false, false); + } + } + } +@@ -34,39 +43,35 @@ protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, + } -+import com.google.common.annotations.VisibleForTesting; - import com.google.inject.Inject; - import com.google.inject.Singleton; + @Override +- protected boolean ReadString(Resource res, String[] Props, BiomeConfig biomeConfig) throws NumberFormatException ++ public ResourceType getType() + { ++ return ResourceType.biomeConfigResource; ++ } -@@ -165,6 +166,9 @@ public class RMWebServices { - private final Configuration conf; - private @Context HttpServletResponse response; +- if (Props[0].contains(""."")) +- { +- String[] block = Props[0].split(""\\.""); +- res.BlockId = CheckBlock(block[0]); +- res.BlockData = CheckValue(block[1], 0, 16); +- } else +- { +- res.BlockId = CheckBlock(Props[0]); +- } +- +- res.Frequency = CheckValue(Props[1], 1, 100); +- res.Rarity = CheckValue(Props[2], 0, 100); +- res.MinAltitude = CheckValue(Props[3], 0, biomeConfig.worldConfig.WorldHeight); +- res.MaxAltitude = CheckValue(Props[4], 0, biomeConfig.worldConfig.WorldHeight, res.MinAltitude); +- +- res.SourceBlockId = new int[Props.length - 5]; +- for (int i = 5; i < Props.length; i++) +- res.SourceBlockId[i - 5] = CheckBlock(Props[i]); +- +- return true; ++ @Override ++ public String makeString() ++ { ++ return ""Cactus("" + makeMaterial(blockId, blockData) + "","" + frequency + "","" + rarity + "","" + minAltitude + "","" + maxAltitude + makeMaterial(sourceBlocks); + } -+ @VisibleForTesting -+ boolean isDistributedNodeLabelConfiguration = false; + @Override +- protected String WriteString(Resource res, String blockSources) ++ public void load(List args) throws InvalidResourceException + { +- String blockId = res.BlockIdToName(res.BlockId); +- if (res.BlockData > 0) ++ if (args.size() < 6) ++ { ++ throw new InvalidResourceException(""Too few arguments supplied""); ++ } + - public final static String DELEGATION_TOKEN_HEADER = - ""Hadoop-YARN-RM-Delegation-Token""; - -@@ -172,6 +176,19 @@ public class RMWebServices { - public RMWebServices(final ResourceManager rm, Configuration conf) { - this.rm = rm; - this.conf = conf; -+ isDistributedNodeLabelConfiguration = -+ YarnConfiguration.isDistributedNodeLabelConfiguration(conf); -+ } ++ blockId = getBlockId(args.get(0)); ++ blockData = getBlockData(args.get(0)); ++ frequency = getInt(args.get(1), 1, 100); ++ rarity = getInt(args.get(2), 1, 100); ++ minAltitude = getInt(args.get(3), TerrainControl.worldDepth, TerrainControl.worldHeight); ++ maxAltitude = getInt(args.get(4), minAltitude + 1, TerrainControl.worldHeight); ++ sourceBlocks = new ArrayList(); ++ for (int i = 5; i < args.size(); i++) + { +- blockId += ""."" + res.BlockData; ++ sourceBlocks.add(getBlockId(args.get(i))); + } +- return blockId + "","" + res.Frequency + "","" + res.Rarity + "","" + res.MinAltitude + "","" + res.MaxAltitude + blockSources; + } + } +\ No newline at end of file +diff --git a/common/src/com/khorn/terraincontrol/generator/resourcegens/CustomObjectGen.java b/common/src/com/khorn/terraincontrol/generator/resourcegens/CustomObjectGen.java +new file mode 100644 +index 000000000..a4a943e57 +--- /dev/null ++++ b/common/src/com/khorn/terraincontrol/generator/resourcegens/CustomObjectGen.java +@@ -0,0 +1,67 @@ ++package com.khorn.terraincontrol.generator.resourcegens; + -+ private void checkAndThrowIfDistributedNodeLabelConfEnabled(String operation) -+ throws IOException { -+ if (isDistributedNodeLabelConfiguration) { -+ String msg = -+ String.format(""Error when invoke method=%s because of "" -+ + ""distributed node label configuration enabled."", operation); -+ LOG.error(msg); -+ throw new IOException(msg); -+ } - } - - RMWebServices(ResourceManager rm, Configuration conf, -@@ -816,38 +833,64 @@ public LabelsToNodesInfo getLabelsToNodes( - @POST - @Path(""/replace-node-to-labels"") - @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) -- public Response replaceLabelsOnNodes( -- final NodeToLabelsInfo newNodeToLabels, -- @Context HttpServletRequest hsr) -- throws IOException { -+ public Response replaceLabelsOnNodes(final NodeToLabelsInfo newNodeToLabels, -+ @Context HttpServletRequest hsr) throws IOException { -+ Map> nodeIdToLabels = -+ new HashMap>(); ++import java.util.ArrayList; ++import java.util.List; ++import java.util.Random; + -+ for (Map.Entry nitle : newNodeToLabels -+ .getNodeToLabels().entrySet()) { -+ nodeIdToLabels.put( -+ ConverterUtils.toNodeIdWithDefaultPort(nitle.getKey()), -+ new HashSet(nitle.getValue().getNodeLabels())); ++import com.khorn.terraincontrol.LocalWorld; ++import com.khorn.terraincontrol.TerrainControl; ++import com.khorn.terraincontrol.configuration.Resource; ++import com.khorn.terraincontrol.customobjects.CustomObject; ++import com.khorn.terraincontrol.exception.InvalidResourceException; ++import com.khorn.terraincontrol.util.Txt; ++ ++public class CustomObjectGen extends Resource ++{ ++ private List objects; ++ private List objectNames; ++ ++ @Override ++ public void load(List args) throws InvalidResourceException ++ { ++ if (args.size() == 0) ++ { ++ // Backwards compability ++ args.add(""UseWorld""); ++ } ++ objects = new ArrayList(); ++ for (String arg : args) ++ { ++ CustomObject object = TerrainControl.getCustomObjectManager().getObjectFromString(arg, worldConfig); ++ if (object == null || !object.canSpawnAsObject()) ++ { ++ throw new InvalidResourceException(""No custom object found with the name "" + arg); ++ } ++ objects.add(object); ++ objectNames.add(arg); ++ } + } + -+ return replaceLabelsOnNode(nodeIdToLabels, hsr, ""/replace-node-to-labels""); -+ } ++ @Override ++ public void spawn(LocalWorld world, Random random, int x, int z) ++ { ++ // Left blank, as process(..) already handles this. ++ } + -+ @POST -+ @Path(""/nodes/{nodeId}/replace-labels"") -+ @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) -+ public Response replaceLabelsOnNode(NodeLabelsInfo newNodeLabelsInfo, -+ @Context HttpServletRequest hsr, @PathParam(""nodeId"") String nodeId) -+ throws Exception { -+ NodeId nid = ConverterUtils.toNodeIdWithDefaultPort(nodeId); -+ Map> newLabelsForNode = -+ new HashMap>(); -+ newLabelsForNode.put(nid, -+ new HashSet(newNodeLabelsInfo.getNodeLabels())); ++ @Override ++ public void process(LocalWorld world, Random random, int chunkX, int chunkZ) ++ { ++ for (CustomObject object : objects) ++ { ++ object.process(world, random, chunkX, chunkZ); ++ } ++ } + -+ return replaceLabelsOnNode(newLabelsForNode, hsr, ""/nodes/nodeid/replace-labels""); -+ } ++ @Override ++ public ResourceType getType() ++ { ++ return ResourceType.biomeConfigResource; ++ } + -+ private Response replaceLabelsOnNode( -+ Map> newLabelsForNode, HttpServletRequest hsr, -+ String operation) throws IOException { - init(); -- ++ @Override ++ public String makeString() ++ { ++ return ""CustomObject("" + Txt.implode(objectNames, "","") + "")""; ++ } + -+ checkAndThrowIfDistributedNodeLabelConfEnabled(""replaceLabelsOnNode""); ++} +diff --git a/common/src/com/khorn/terraincontrol/generator/resourcegens/DungeonGen.java b/common/src/com/khorn/terraincontrol/generator/resourcegens/DungeonGen.java +index 630d447df..3d7b16d6a 100644 +--- a/common/src/com/khorn/terraincontrol/generator/resourcegens/DungeonGen.java ++++ b/common/src/com/khorn/terraincontrol/generator/resourcegens/DungeonGen.java +@@ -1,34 +1,47 @@ + package com.khorn.terraincontrol.generator.resourcegens; + +-import com.khorn.terraincontrol.configuration.BiomeConfig; + import com.khorn.terraincontrol.configuration.Resource; ++import com.khorn.terraincontrol.exception.InvalidResourceException; + import com.khorn.terraincontrol.LocalWorld; ++import com.khorn.terraincontrol.TerrainControl; + ++import java.util.List; + import java.util.Random; + +-public class DungeonGen extends ResourceGenBase ++public class DungeonGen extends Resource + { ++ private int minAltitude; ++ private int maxAltitude; + - UserGroupInformation callerUGI = getCallerUserGroupInformation(hsr, true); - if (callerUGI == null) { -- String msg = ""Unable to obtain user name, user not authenticated for"" -- + "" post to .../replace-node-to-labels""; -+ String msg = -+ ""Unable to obtain user name, user not authenticated for"" -+ + "" post to ..."" + operation; - throw new AuthorizationException(msg); + @Override +- protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, int z) ++ public void load(List args) throws InvalidResourceException + { +- int _y = rand.nextInt(res.MaxAltitude - res.MinAltitude) + res.MinAltitude; +- world.PlaceDungeons(rand, x, _y, z); ++ if (args.size() < 4) ++ { ++ throw new InvalidResourceException(""Too few arguments supplied""); ++ } ++ frequency = getInt(args.get(0), 1, 100); ++ rarity = getInt(args.get(1), 1, 100); ++ minAltitude = getInt(args.get(2), TerrainControl.worldDepth, TerrainControl.worldHeight); ++ maxAltitude = getInt(args.get(3), minAltitude + 1, TerrainControl.worldHeight); } -+ - if (!rm.getRMContext().getNodeLabelManager().checkAccess(callerUGI)) { -- String msg = ""User "" + callerUGI.getShortUserName() + "" not authorized"" -- + "" for post to .../replace-node-to-labels ""; -+ String msg = -+ ""User "" + callerUGI.getShortUserName() + "" not authorized"" -+ + "" for post to ..."" + operation; - throw new AuthorizationException(msg); + + @Override +- protected boolean ReadString(Resource res, String[] Props, BiomeConfig biomeConfig) throws NumberFormatException ++ public void spawn(LocalWorld world, Random random, int x, int z) + { +- res.Frequency = CheckValue(Props[0], 1, 100); +- res.Rarity = CheckValue(Props[1], 0, 100); +- res.MinAltitude = CheckValue(Props[2], 0, biomeConfig.worldConfig.WorldHeight); +- res.MaxAltitude = CheckValue(Props[3], 0, biomeConfig.worldConfig.WorldHeight, res.MinAltitude); ++ int y = random.nextInt(maxAltitude - minAltitude) + minAltitude; ++ world.PlaceDungeons(random, x, y, z); ++ } + +- return true; ++ @Override ++ public ResourceType getType() ++ { ++ return ResourceType.biomeConfigResource; } -- -- Map> nodeIdToLabels = -- new HashMap>(); -- for (Map.Entry nitle : -- newNodeToLabels.getNodeToLabels().entrySet()) { -- nodeIdToLabels.put(ConverterUtils.toNodeIdWithDefaultPort(nitle.getKey()), -- new HashSet(nitle.getValue().getNodeLabels())); -- } -- -- rm.getRMContext().getNodeLabelManager().replaceLabelsOnNode(nodeIdToLabels); -+ rm.getRMContext().getNodeLabelManager() -+ .replaceLabelsOnNode(newLabelsForNode); + @Override +- protected String WriteString(Resource res, String blockSources) ++ public String makeString() + { +- return res.Frequency + "","" + res.Rarity + "","" + res.MinAltitude + "","" + res.MaxAltitude; ++ return ""Dungeon("" + frequency + "","" + rarity + "","" + minAltitude + "","" + maxAltitude + "")""; + } + } +\ No newline at end of file +diff --git a/common/src/com/khorn/terraincontrol/generator/resourcegens/GrassGen.java b/common/src/com/khorn/terraincontrol/generator/resourcegens/GrassGen.java +index 0bed67e32..79c895dcc 100644 +--- a/common/src/com/khorn/terraincontrol/generator/resourcegens/GrassGen.java ++++ b/common/src/com/khorn/terraincontrol/generator/resourcegens/GrassGen.java +@@ -1,60 +1,74 @@ + package com.khorn.terraincontrol.generator.resourcegens; + +-import com.khorn.terraincontrol.configuration.BiomeConfig; + import com.khorn.terraincontrol.configuration.Resource; ++import com.khorn.terraincontrol.exception.InvalidResourceException; + import com.khorn.terraincontrol.DefaultMaterial; + import com.khorn.terraincontrol.LocalWorld; - return Response.status(Status.OK).build(); - } -- ++import java.util.ArrayList; ++import java.util.List; + import java.util.Random; + +-public class GrassGen extends ResourceGenBase ++public class GrassGen extends Resource + { ++ private int blockId; ++ private int blockData; ++ private List sourceBlocks; + - @GET - @Path(""/get-node-labels"") - @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) -@@ -897,7 +940,7 @@ public Response removeFromCluserNodeLabels(final NodeLabelsInfo oldNodeLabels, - @Context HttpServletRequest hsr) - throws Exception { - init(); -- ++ @Override ++ public void load(List args) throws InvalidResourceException ++ { ++ if (args.size() < 5) ++ { ++ throw new InvalidResourceException(""Too few arguments supplied""); ++ } ++ blockId = getBlockId(args.get(0)); ++ blockData = getInt(args.get(1), 0, 16); ++ frequency = getInt(args.get(2), 1, 500); ++ rarity = getInt(args.get(3), 1, 100); ++ sourceBlocks = new ArrayList(); ++ for (int i = 4; i < args.size(); i++) ++ { ++ sourceBlocks.add(getBlockId(args.get(i))); ++ } ++ } + - UserGroupInformation callerUGI = getCallerUserGroupInformation(hsr, true); - if (callerUGI == null) { - String msg = ""Unable to obtain user name, user not authenticated for"" -@@ -931,40 +974,6 @@ public NodeLabelsInfo getLabelsOnNode(@Context HttpServletRequest hsr, - rm.getRMContext().getNodeLabelManager().getLabelsOnNode(nid)); + @Override +- public void Process(LocalWorld world, Random rand, Resource res, int _x, int _z) ++ public void spawn(LocalWorld world, Random random, int x, int z) + { ++ // Handled by process(). ++ } - } -- -- @POST -- @Path(""/nodes/{nodeId}/replace-labels"") -- @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) -- public Response replaceLabelsOnNode(NodeLabelsInfo newNodeLabelsInfo, -- @Context HttpServletRequest hsr, @PathParam(""nodeId"") String nodeId) -- throws Exception { -- init(); -- -- UserGroupInformation callerUGI = getCallerUserGroupInformation(hsr, true); -- if (callerUGI == null) { -- String msg = ""Unable to obtain user name, user not authenticated for"" -- + "" post to .../nodes/nodeid/replace-labels""; -- throw new AuthorizationException(msg); -- } -- -- if (!rm.getRMContext().getNodeLabelManager().checkAccess(callerUGI)) { -- String msg = ""User "" + callerUGI.getShortUserName() + "" not authorized"" -- + "" for post to .../nodes/nodeid/replace-labels""; -- throw new AuthorizationException(msg); +- for (int t = 0; t < res.Frequency; t++) ++ @Override ++ public void process(LocalWorld world, Random random, int chunkX, int chunkZ) ++ { ++ for (int t = 0; t < frequency; t++) + { +- if (rand.nextInt(100) >= res.Rarity) ++ if (random.nextInt(100) >= rarity) + continue; +- int x = _x + rand.nextInt(16) + 8; +- int y = world.getHeight(); +- int z = _z + rand.nextInt(16) + 8; ++ int x = chunkX * 16 + random.nextInt(16) + 8; ++ int z = chunkZ * 16 + random.nextInt(16) + 8; ++ int y = world.getHighestBlockYAt(x, z); + + int i; + while ((((i = world.getTypeId(x, y, z)) == 0) || (i == DefaultMaterial.LEAVES.id)) && (y > 0)) + y--; + +- if ((!world.isEmpty(x, y + 1, z)) || (!res.CheckSourceId(world.getTypeId(x, y, z)))) ++ if ((!world.isEmpty(x, y + 1, z)) || (!sourceBlocks.contains(world.getTypeId(x, y, z)))) + continue; +- world.setBlock(x, y + 1, z, res.BlockId, res.BlockData, false, false, false); ++ world.setBlock(x, y + 1, z, blockId, blockData, false, false, false); + } + } + + @Override +- protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, int z) ++ public ResourceType getType() + { +- - } -- -- NodeId nid = ConverterUtils.toNodeIdWithDefaultPort(nodeId); -- -- Map> newLabelsForNode = new HashMap>(); -- -- newLabelsForNode.put(nid, new HashSet(newNodeLabelsInfo.getNodeLabels())); -- -- rm.getRMContext().getNodeLabelManager().replaceLabelsOnNode(newLabelsForNode); -- -- return Response.status(Status.OK).build(); - -- } +- @Override +- protected boolean ReadString(Resource res, String[] Props, BiomeConfig biomeConfig) throws NumberFormatException +- { +- res.BlockId = CheckBlock(Props[0]); +- res.BlockData = CheckValue(Props[1], 0, 16); +- res.Frequency = CheckValue(Props[2], 1, 500); +- res.Rarity = CheckValue(Props[3], 0, 100); +- +- res.SourceBlockId = new int[Props.length - 4]; +- for (int i = 4; i < Props.length; i++) +- res.SourceBlockId[i - 4] = CheckBlock(Props[i]); +- +- return true; ++ return ResourceType.biomeConfigResource; + } - protected Response killApp(RMApp app, UserGroupInformation callerUGI, - HttpServletRequest hsr) throws IOException, InterruptedException { -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java -index da04c9ec32b11..fe0b8a8d1ff35 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java -@@ -18,6 +18,7 @@ + @Override +- protected String WriteString(Resource res, String blockSources) ++ public String makeString() + { +- return res.BlockIdToName(res.BlockId) + "","" + res.BlockData + "","" + res.Frequency + "","" + res.Rarity + blockSources; ++ return ""Grass("" + makeMaterial(blockId) + "","" + blockData + "","" + frequency + "","" + rarity + makeMaterial(sourceBlocks) + "")""; + } + } +\ No newline at end of file +diff --git a/common/src/com/khorn/terraincontrol/generator/resourcegens/LiquidGen.java b/common/src/com/khorn/terraincontrol/generator/resourcegens/LiquidGen.java +index 00a6cb9f7..1688c7584 100644 +--- a/common/src/com/khorn/terraincontrol/generator/resourcegens/LiquidGen.java ++++ b/common/src/com/khorn/terraincontrol/generator/resourcegens/LiquidGen.java +@@ -1,85 +1,97 @@ + package com.khorn.terraincontrol.generator.resourcegens; - package org.apache.hadoop.yarn.server.resourcemanager; + import com.khorn.terraincontrol.LocalWorld; ++import com.khorn.terraincontrol.TerrainControl; -+import static org.junit.Assert.assertEquals; - import static org.junit.Assert.fail; +-import com.khorn.terraincontrol.configuration.BiomeConfig; + import com.khorn.terraincontrol.configuration.Resource; ++import com.khorn.terraincontrol.exception.InvalidResourceException; - import java.io.DataOutputStream; -@@ -44,6 +45,7 @@ - import org.apache.hadoop.security.authorize.ProxyUsers; - import org.apache.hadoop.security.authorize.ServiceAuthorizationManager; - import org.apache.hadoop.yarn.api.records.DecommissionType; -+import org.apache.hadoop.yarn.api.records.NodeId; - import org.apache.hadoop.yarn.conf.HAUtil; - import org.apache.hadoop.yarn.conf.YarnConfiguration; - import org.apache.hadoop.yarn.exceptions.YarnException; -@@ -53,6 +55,9 @@ - import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshServiceAclsRequest; - import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshSuperUserGroupsConfigurationRequest; - import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshUserToGroupsMappingsRequest; -+import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsRequest; -+import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeRequest; -+import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; - import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler; - import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration; - import org.junit.After; -@@ -60,6 +65,8 @@ - import org.junit.Before; - import org.junit.Test; ++import java.util.ArrayList; ++import java.util.List; + import java.util.Random; -+import com.google.common.collect.ImmutableMap; -+import com.google.common.collect.ImmutableSet; +-public class LiquidGen extends ResourceGenBase ++public class LiquidGen extends Resource + { ++ private int blockId; ++ private int blockData; ++ private List sourceBlocks; ++ private int minAltitude; ++ private int maxAltitude; ++ + @Override +- protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, int z) ++ public void spawn(LocalWorld world, Random rand, int x, int z) + { +- int y = rand.nextInt(res.MaxAltitude - res.MinAltitude) + res.MinAltitude; ++ int y = rand.nextInt(maxAltitude - minAltitude) + minAltitude; - public class TestRMAdminService { +- if (res.CheckSourceId(world.getTypeId(x, y + 1, z))) ++ if (sourceBlocks.contains(world.getTypeId(x, y + 1, z))) + return; +- if (res.CheckSourceId(world.getTypeId(x, y - 1, z))) ++ if (sourceBlocks.contains(world.getTypeId(x, y - 1, z))) + return; -@@ -754,6 +761,67 @@ public void testRMInitialsWithFileSystemBasedConfigurationProvider() - } - } +- if ((world.getTypeId(x, y, z) != 0) && (res.CheckSourceId(world.getTypeId(x, y, z)))) ++ if ((world.getTypeId(x, y, z) != 0) && (sourceBlocks.contains(world.getTypeId(x, y, z)))) + return; -+ @Test -+ public void testModifyLabelsOnNodesWithDistributedConfigurationDisabled() -+ throws IOException, YarnException { -+ // create RM and set it's ACTIVE -+ MockRM rm = new MockRM(); -+ ((RMContextImpl) rm.getRMContext()) -+ .setHAServiceState(HAServiceState.ACTIVE); -+ RMNodeLabelsManager labelMgr = rm.rmContext.getNodeLabelManager(); -+ -+ // by default, distributed configuration for node label is disabled, this -+ // should pass -+ labelMgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of(""x"", ""y"")); -+ rm.adminService.replaceLabelsOnNode(ReplaceLabelsOnNodeRequest -+ .newInstance(ImmutableMap.of(NodeId.newInstance(""host"", 0), -+ (Set) ImmutableSet.of(""x"")))); -+ rm.close(); -+ } -+ -+ @Test(expected = YarnException.class) -+ public void testModifyLabelsOnNodesWithDistributedConfigurationEnabled() -+ throws IOException, YarnException { -+ // create RM and set it's ACTIVE, and set distributed node label -+ // configuration to true -+ MockRM rm = new MockRM(); -+ rm.adminService.isDistributedNodeLabelConfiguration = true; -+ -+ ((RMContextImpl) rm.getRMContext()) -+ .setHAServiceState(HAServiceState.ACTIVE); -+ RMNodeLabelsManager labelMgr = rm.rmContext.getNodeLabelManager(); -+ -+ // by default, distributed configuration for node label is disabled, this -+ // should pass -+ labelMgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of(""x"", ""y"")); -+ rm.adminService.replaceLabelsOnNode(ReplaceLabelsOnNodeRequest -+ .newInstance(ImmutableMap.of(NodeId.newInstance(""host"", 0), -+ (Set) ImmutableSet.of(""x"")))); -+ rm.close(); -+ } -+ -+ @Test -+ public void testRemoveClusterNodeLabelsWithDistributedConfigurationEnabled() -+ throws IOException, YarnException { -+ // create RM and set it's ACTIVE -+ MockRM rm = new MockRM(); -+ ((RMContextImpl) rm.getRMContext()) -+ .setHAServiceState(HAServiceState.ACTIVE); -+ RMNodeLabelsManager labelMgr = rm.rmContext.getNodeLabelManager(); -+ rm.adminService.isDistributedNodeLabelConfiguration = true; -+ -+ // by default, distributed configuration for node label is disabled, this -+ // should pass -+ labelMgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of(""x"", ""y"")); -+ rm.adminService -+ .removeFromClusterNodeLabels(RemoveFromClusterNodeLabelsRequest -+ .newInstance((Set) ImmutableSet.of(""x""))); -+ -+ Set clusterNodeLabels = labelMgr.getClusterNodeLabelNames(); -+ assertEquals(1,clusterNodeLabels.size()); -+ rm.close(); -+ } -+ - private String writeConfigurationXML(Configuration conf, String confXMLName) - throws IOException { - DataOutputStream output = null; -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/nodelabels/NullRMNodeLabelsManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/nodelabels/NullRMNodeLabelsManager.java -index 9548029d08769..2e21d261f615f 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/nodelabels/NullRMNodeLabelsManager.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/nodelabels/NullRMNodeLabelsManager.java -@@ -40,7 +40,8 @@ public void initNodeLabelStore(Configuration conf) { - this.store = new NodeLabelsStore(this) { +- + int i = 0; + int j = 0; - @Override -- public void recover() throws IOException { -+ public void recover(boolean ignoreNodeToLabelsMappings) -+ throws IOException { - // do nothing - } + int tempBlock = world.getTypeId(x - 1, y, z); -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServices.java -index 298246ca301e2..e4614f8c9ec7e 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServices.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServices.java -@@ -51,6 +51,7 @@ - import org.apache.hadoop.yarn.server.resourcemanager.MockRM; - import org.apache.hadoop.yarn.server.resourcemanager.RMContextImpl; - import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager; -+import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; - import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics; - import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler; - import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler; -@@ -623,6 +624,7 @@ public void testAppsRace() throws Exception { - null, null, null, null, null); - when(mockRM.getRMContext()).thenReturn(rmContext); - when(mockRM.getClientRMService()).thenReturn(mockClientSvc); -+ rmContext.setNodeLabelManager(mock(RMNodeLabelsManager.class)); +- i = (res.CheckSourceId(tempBlock)) ? i + 1 : i; ++ i = (sourceBlocks.contains(tempBlock)) ? i + 1 : i; + j = (tempBlock == 0) ? j + 1 : j; - RMWebServices webSvc = new RMWebServices(mockRM, new Configuration(), - mock(HttpServletResponse.class)); -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesNodeLabels.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesNodeLabels.java -index 40c54a30a6a8d..2d5518dc03cf8 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesNodeLabels.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesNodeLabels.java -@@ -19,10 +19,10 @@ - package org.apache.hadoop.yarn.server.resourcemanager.webapp; + tempBlock = world.getTypeId(x + 1, y, z); - import static org.junit.Assert.assertEquals; -+import static org.junit.Assert.assertFalse; - import static org.junit.Assert.assertTrue; +- i = (res.CheckSourceId(tempBlock)) ? i + 1 : i; ++ i = (sourceBlocks.contains(tempBlock)) ? i + 1 : i; + j = (tempBlock == 0) ? j + 1 : j; - import java.io.IOException; --import java.io.StringReader; - import java.io.StringWriter; + tempBlock = world.getTypeId(x, y, z - 1); - import javax.ws.rs.core.MediaType; -@@ -51,7 +51,6 @@ - import com.sun.jersey.api.client.WebResource; - import com.sun.jersey.api.json.JSONJAXBContext; - import com.sun.jersey.api.json.JSONMarshaller; --import com.sun.jersey.api.json.JSONUnmarshaller; - import com.sun.jersey.core.util.MultivaluedMapImpl; - import com.sun.jersey.guice.spi.container.servlet.GuiceContainer; - import com.sun.jersey.test.framework.WebAppDescriptor; -@@ -66,13 +65,13 @@ public class TestRMWebServicesNodeLabels extends JerseyTestBase { +- i = (res.CheckSourceId(tempBlock)) ? i + 1 : i; ++ i = (sourceBlocks.contains(tempBlock)) ? i + 1 : i; + j = (tempBlock == 0) ? j + 1 : j; - private String userName; - private String notUserName; -+ private RMWebServices rmWebService; + tempBlock = world.getTypeId(x, y, z + 1); + +- i = (res.CheckSourceId(tempBlock)) ? i + 1 : i; ++ i = (sourceBlocks.contains(tempBlock)) ? i + 1 : i; + j = (tempBlock == 0) ? j + 1 : j; + +- + if ((i == 3) && (j == 1)) + { +- world.setBlock(x, y, z, res.BlockId, 0, true, true, true); +- //this.world.f = true; +- //Block.byId[res.BlockId].a(this.world, x, y, z, this.rand); +- //this.world.f = false; ++ world.setBlock(x, y, z, blockId, 0, true, true, true); ++ // this.world.f = true; ++ // Block.byId[res.BlockId].a(this.world, x, y, z, this.rand); ++ // this.world.f = false; + } + } - private Injector injector = Guice.createInjector(new ServletModule() { -+ @Override - protected void configureServlets() { - bind(JAXBContextResolver.class); -- bind(RMWebServices.class); -- bind(GenericExceptionHandler.class); - try { - userName = UserGroupInformation.getCurrentUser().getShortUserName(); - } catch (IOException ioe) { -@@ -83,6 +82,9 @@ protected void configureServlets() { - conf = new YarnConfiguration(); - conf.set(YarnConfiguration.YARN_ADMIN_ACL, userName); - rm = new MockRM(conf); -+ rmWebService = new RMWebServices(rm,conf); -+ bind(RMWebServices.class).toInstance(rmWebService); -+ bind(GenericExceptionHandler.class); - bind(ResourceManager.class).toInstance(rm); - filter(""/*"").through( - TestRMWebServicesAppsModification.TestRMCustomAuthFilter.class); -@@ -113,7 +115,6 @@ public void testNodeLabels() throws JSONException, Exception { - ClientResponse response; - JSONObject json; - JSONArray jarr; -- String responseString; +- protected boolean ReadString(Resource res, String[] Props, BiomeConfig biomeConfig) throws NumberFormatException ++ public void load(List args) throws InvalidResourceException + { +- res.BlockId = CheckBlock(Props[0]); +- res.Frequency = CheckValue(Props[1], 1, 5000); +- res.Rarity = CheckValue(Props[2], 0, 100); +- res.MinAltitude = CheckValue(Props[3], 0, biomeConfig.worldConfig.WorldHeight); +- res.MaxAltitude = CheckValue(Props[4], 0, biomeConfig.worldConfig.WorldHeight, res.MinAltitude); +- +- res.SourceBlockId = new int[Props.length - 5]; +- for (int i = 5; i < Props.length; i++) +- res.SourceBlockId[i - 5] = CheckBlock(Props[i]); ++ if (args.size() < 6) ++ { ++ throw new InvalidResourceException(""Too few arguments supplied""); ++ } ++ blockId = getBlockId(args.get(0)); ++ blockData = getBlockData(args.get(0)); ++ frequency = getInt(args.get(1), 1, 5000); ++ rarity = getInt(args.get(2), 1, 100); ++ minAltitude = getInt(args.get(3), TerrainControl.worldDepth, TerrainControl.worldHeight); ++ maxAltitude = getInt(args.get(4), minAltitude + 1, TerrainControl.worldHeight); ++ sourceBlocks = new ArrayList(); ++ for (int i = 5; i < args.size(); i++) ++ { ++ sourceBlocks.add(getBlockId(args.get(i))); ++ } ++ } - // Add a label - response = -@@ -386,6 +387,93 @@ public void testNodeLabels() throws JSONException, Exception { - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); - String res = response.getEntity(String.class); - assertTrue(res.equals(""null"")); -+ -+ // Following test cases are to test replace when distributed node label -+ // configuration is on -+ // Reset for testing : add cluster labels -+ response = -+ r.path(""ws"") -+ .path(""v1"") -+ .path(""cluster"") -+ .path(""add-node-labels"") -+ .queryParam(""user.name"", userName) -+ .accept(MediaType.APPLICATION_JSON) -+ .entity(""{\""nodeLabels\"":[\""x\"",\""y\""]}"", -+ MediaType.APPLICATION_JSON).post(ClientResponse.class); -+ // Reset for testing : Add labels to a node -+ response = -+ r.path(""ws"").path(""v1"").path(""cluster"").path(""nodes"").path(""nid:0"") -+ .path(""replace-labels"").queryParam(""user.name"", userName) -+ .accept(MediaType.APPLICATION_JSON) -+ .entity(""{\""nodeLabels\"": [\""y\""]}"", MediaType.APPLICATION_JSON) -+ .post(ClientResponse.class); -+ LOG.info(""posted node nodelabel""); -+ -+ //setting rmWebService for Distributed NodeLabel Configuration -+ rmWebService.isDistributedNodeLabelConfiguration = true; -+ -+ // Case1 : Replace labels using node-to-labels -+ ntli = new NodeToLabelsInfo(); -+ nli = new NodeLabelsInfo(); -+ nli.getNodeLabels().add(""x""); -+ ntli.getNodeToLabels().put(""nid:0"", nli); -+ response = -+ r.path(""ws"") -+ .path(""v1"") -+ .path(""cluster"") -+ .path(""replace-node-to-labels"") -+ .queryParam(""user.name"", userName) -+ .accept(MediaType.APPLICATION_JSON) -+ .entity(toJson(ntli, NodeToLabelsInfo.class), -+ MediaType.APPLICATION_JSON).post(ClientResponse.class); -+ -+ // Verify, using node-to-labels that previous operation has failed -+ response = -+ r.path(""ws"").path(""v1"").path(""cluster"").path(""get-node-to-labels"") -+ .queryParam(""user.name"", userName) -+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); -+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); -+ ntli = response.getEntity(NodeToLabelsInfo.class); -+ nli = ntli.getNodeToLabels().get(""nid:0""); -+ assertEquals(1, nli.getNodeLabels().size()); -+ assertFalse(nli.getNodeLabels().contains(""x"")); -+ -+ // Case2 : failure to Replace labels using replace-labels -+ response = -+ r.path(""ws"").path(""v1"").path(""cluster"").path(""nodes"").path(""nid:0"") -+ .path(""replace-labels"").queryParam(""user.name"", userName) -+ .accept(MediaType.APPLICATION_JSON) -+ .entity(""{\""nodeLabels\"": [\""x\""]}"", MediaType.APPLICATION_JSON) -+ .post(ClientResponse.class); -+ LOG.info(""posted node nodelabel""); -+ -+ // Verify, using node-to-labels that previous operation has failed -+ response = -+ r.path(""ws"").path(""v1"").path(""cluster"").path(""get-node-to-labels"") -+ .queryParam(""user.name"", userName) -+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); -+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); -+ ntli = response.getEntity(NodeToLabelsInfo.class); -+ nli = ntli.getNodeToLabels().get(""nid:0""); -+ assertEquals(1, nli.getNodeLabels().size()); -+ assertFalse(nli.getNodeLabels().contains(""x"")); +- return true; ++ @Override ++ public ResourceType getType() ++ { ++ return ResourceType.biomeConfigResource; + } + + @Override +- protected String WriteString(Resource res, String blockSources) ++ public String makeString() + { +- String blockId = res.BlockIdToName(res.BlockId); +- if (res.BlockData > 0) +- { +- blockId += ""."" + res.BlockData; +- } +- return blockId + "","" + res.Frequency + "","" + res.Rarity + "","" + res.MinAltitude + "","" + res.MaxAltitude + blockSources; ++ return ""Liquid("" + makeMaterial(blockId, blockData) + "","" + frequency + "","" + rarity + "","" + minAltitude + "","" + maxAltitude + makeMaterial(sourceBlocks) + "")""; + } + } +\ No newline at end of file +diff --git a/common/src/com/khorn/terraincontrol/generator/resourcegens/OreGen.java b/common/src/com/khorn/terraincontrol/generator/resourcegens/OreGen.java +index 5511da455..222d0fcec 100644 +--- a/common/src/com/khorn/terraincontrol/generator/resourcegens/OreGen.java ++++ b/common/src/com/khorn/terraincontrol/generator/resourcegens/OreGen.java +@@ -1,38 +1,48 @@ + package com.khorn.terraincontrol.generator.resourcegens; + +-import com.khorn.terraincontrol.configuration.BiomeConfig; +-import com.khorn.terraincontrol.configuration.Resource; ++import java.util.ArrayList; ++import java.util.List; ++import java.util.Random; + -+ // Case3 : Remove cluster label should be successfull -+ response = -+ r.path(""ws"").path(""v1"").path(""cluster"") -+ .path(""remove-node-labels"") -+ .queryParam(""user.name"", userName) -+ .accept(MediaType.APPLICATION_JSON) -+ .entity(""{\""nodeLabels\"":\""x\""}"", MediaType.APPLICATION_JSON) -+ .post(ClientResponse.class); -+ // Verify -+ response = -+ r.path(""ws"").path(""v1"").path(""cluster"") -+ .path(""get-node-labels"").queryParam(""user.name"", userName) -+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); -+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); -+ json = response.getEntity(JSONObject.class); -+ assertEquals(""y"", json.getString(""nodeLabels"")); - } + import com.khorn.terraincontrol.LocalWorld; ++import com.khorn.terraincontrol.TerrainControl; ++import com.khorn.terraincontrol.configuration.Resource; ++import com.khorn.terraincontrol.exception.InvalidResourceException; + import com.khorn.terraincontrol.util.MathHelper; - @SuppressWarnings(""rawtypes"") -@@ -396,13 +484,4 @@ private String toJson(Object nsli, Class klass) throws Exception { - jm.marshallToJSON(nsli, sw); - return sw.toString(); - } -- -- @SuppressWarnings({ ""rawtypes"", ""unchecked"" }) -- private Object fromJson(String json, Class klass) throws Exception { -- StringReader sr = new StringReader(json); -- JSONJAXBContext ctx = new JSONJAXBContext(klass); -- JSONUnmarshaller jm = ctx.createJSONUnmarshaller(); -- return jm.unmarshalFromJSON(sr, klass); -- } +-import java.util.Random; - - }" -62f5e2a99d4f5c8bebf2b7ad581cae83ac437d0b,orientdb,Minor optimization in RidBag--,p,https://github.com/JetBrains/intellij-community,⚠️ HTTP 404: Not Found -e98541030c5e0aadfdb194dbb55254f404219600,orientdb,Huge refactoring on GraphDB: - changed class- names in vertex and edge - Optimized memory consumption by removing nested- records - Optimized speed in ORecord.equals() and hashCode(): now avoid field- checks (experimental)--,p,https://github.com/JetBrains/intellij-community,⚠️ HTTP 404: Not Found -6d11a551dcf9bb2ed2bd10530b1fbaf6f6380804,ReactiveX-RxJava,Added create with initial capacity,a,https://github.com/ReactiveX/RxJava,"diff --git a/rxjava-core/src/main/java/rx/subjects/SubjectSubscriptionManager.java b/rxjava-core/src/main/java/rx/subjects/SubjectSubscriptionManager.java -index 4ea3e6e385..ff7033c0c5 100644 ---- a/rxjava-core/src/main/java/rx/subjects/SubjectSubscriptionManager.java -+++ b/rxjava-core/src/main/java/rx/subjects/SubjectSubscriptionManager.java -@@ -17,8 +17,6 @@ +-public class OreGen extends ResourceGenBase ++public class OreGen extends Resource + { ++ private int blockId; ++ private int blockData; ++ private int minAltitude; ++ private int maxAltitude; ++ private int maxSize; ++ private List sourceBlocks; ++ + @Override +- protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, int z) ++ public void spawn(LocalWorld world, Random rand, int x, int z) + { +- int y = rand.nextInt(res.MaxAltitude - res.MinAltitude) + res.MinAltitude; ++ int y = rand.nextInt(maxAltitude - minAltitude) + minAltitude; - import java.util.Arrays; - import java.util.Collection; --import java.util.Collections; --import java.util.List; - import java.util.concurrent.CountDownLatch; - import java.util.concurrent.atomic.AtomicReference; + float f = rand.nextFloat() * 3.141593F; -@@ -126,7 +124,7 @@ protected void terminate(Action1>> onTermi - */ - try { - // had to circumvent type check, we know what the array contains -- onTerminate.call((Collection)newState.observersList); -+ onTerminate.call((Collection)Arrays.asList(newState.observers)); - } finally { - // mark that termination is completed - newState.terminationLatch.countDown(); -@@ -141,25 +139,22 @@ public SubjectObserver[] rawSnapshot() { - return state.get().observers; - } +- double d1 = x + 8 + MathHelper.sin(f) * res.MaxSize / 8.0F; +- double d2 = x + 8 - MathHelper.sin(f) * res.MaxSize / 8.0F; +- double d3 = z + 8 + MathHelper.cos(f) * res.MaxSize / 8.0F; +- double d4 = z + 8 - MathHelper.cos(f) * res.MaxSize / 8.0F; ++ double d1 = x + 8 + MathHelper.sin(f) * maxSize / 8.0F; ++ double d2 = x + 8 - MathHelper.sin(f) * maxSize / 8.0F; ++ double d3 = z + 8 + MathHelper.cos(f) * maxSize / 8.0F; ++ double d4 = z + 8 - MathHelper.cos(f) * maxSize / 8.0F; -+ @SuppressWarnings(""rawtypes"") - protected static class State { - final boolean terminated; - final CountDownLatch terminationLatch; - final Subscription[] subscriptions; -- final SubjectObserver[] observers; -+ final SubjectObserver[] observers; - // to avoid lots of empty arrays - final Subscription[] EMPTY_S = new Subscription[0]; -- @SuppressWarnings(""rawtypes"") - // to avoid lots of empty arrays - final SubjectObserver[] EMPTY_O = new SubjectObserver[0]; -- @SuppressWarnings(""rawtypes"") -- final List> observersList; - private State(boolean isTerminated, CountDownLatch terminationLatch, - Subscription[] subscriptions, SubjectObserver[] observers) { - this.terminationLatch = terminationLatch; - this.terminated = isTerminated; - this.subscriptions = subscriptions; - this.observers = observers; -- this.observersList = Arrays.asList(this.observers); - } + double d5 = y + rand.nextInt(3) - 2; + double d6 = y + rand.nextInt(3) - 2; - State() { -@@ -167,7 +162,6 @@ private State(boolean isTerminated, CountDownLatch terminationLatch, - this.terminationLatch = null; - this.subscriptions = EMPTY_S; - this.observers = EMPTY_O; -- observersList = Collections.emptyList(); +- for (int i = 0; i <= res.MaxSize; i++) ++ for (int i = 0; i <= maxSize; i++) + { +- double d7 = d1 + (d2 - d1) * i / res.MaxSize; +- double d8 = d5 + (d6 - d5) * i / res.MaxSize; +- double d9 = d3 + (d4 - d3) * i / res.MaxSize; ++ double d7 = d1 + (d2 - d1) * i / maxSize; ++ double d8 = d5 + (d6 - d5) * i / maxSize; ++ double d9 = d3 + (d4 - d3) * i / maxSize; + +- double d10 = rand.nextDouble() * res.MaxSize / 16.0D; +- double d11 = (MathHelper.sin(i * 3.141593F / res.MaxSize) + 1.0F) * d10 + 1.0D; +- double d12 = (MathHelper.sin(i * 3.141593F / res.MaxSize) + 1.0F) * d10 + 1.0D; ++ double d10 = rand.nextDouble() * maxSize / 16.0D; ++ double d11 = (MathHelper.sin(i * 3.141593F / maxSize) + 1.0F) * d10 + 1.0D; ++ double d12 = (MathHelper.sin(i * 3.141593F / maxSize) + 1.0F) * d10 + 1.0D; + + int j = MathHelper.floor(d7 - d11 / 2.0D); + int k = MathHelper.floor(d8 - d12 / 2.0D); +@@ -55,15 +65,9 @@ protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, + for (int i5 = m; i5 <= i2; i5++) + { + double d15 = (i5 + 0.5D - d9) / (d11 / 2.0D); +- if ((d13 * d13 + d14 * d14 + d15 * d15 < 1.0D) && res.CheckSourceId(world.getTypeId(i3, i4, i5))) ++ if ((d13 * d13 + d14 * d14 + d15 * d15 < 1.0D) && sourceBlocks.contains(world.getTypeId(i3, i4, i5))) + { +- if (res.BlockData > 0) +- { +- world.setBlock(i3, i4, i5, res.BlockId, res.BlockData, false, false, false); +- } else +- { +- world.setBlock(i3, i4, i5, res.BlockId, 0, false, false, false); +- } ++ world.setBlock(i3, i4, i5, blockId, blockData, false, false, false); + } + } + } +@@ -74,40 +78,35 @@ protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, + } + + @Override +- protected boolean ReadString(Resource res, String[] Props, BiomeConfig biomeConfig) throws NumberFormatException ++ public void load(List args) throws InvalidResourceException + { +- if (Props[0].contains(""."")) ++ if (args.size() < 7) + { +- String[] block = Props[0].split(""\\.""); +- res.BlockId = CheckBlock(block[0]); +- res.BlockData = CheckValue(block[1], 0, 16); +- } else ++ throw new InvalidResourceException(""Too few arguments supplied""); ++ } ++ blockId = getBlockId(args.get(0)); ++ blockData = getBlockData(args.get(0)); ++ maxSize = getInt(args.get(1), 1, 128); ++ frequency = getInt(args.get(2), 1, 100); ++ rarity = getInt(args.get(3), 1, 100); ++ minAltitude = getInt(args.get(4), TerrainControl.worldDepth, TerrainControl.worldHeight); ++ maxAltitude = getInt(args.get(5), minAltitude + 1, TerrainControl.worldHeight); ++ sourceBlocks = new ArrayList(); ++ for (int i = 6; i < args.size(); i++) + { +- res.BlockId = CheckBlock(Props[0]); ++ sourceBlocks.add(getBlockId(args.get(i))); } ++ } - public State terminate() {" -7e1c2d454f76d7abc5c4c9af1b93aeae08231ba9,eclipse$bpmn2-modeler,"Working towards BRMS 5.3 profile - minimal functionality to support -BRMS 5.3 release.",p,,⚠️ Could not parse repo info -b0caefee80a42d6737b3a255316487b668fab104,Delta Spike,"DELTASPIKE-278 add category to Message API -",a,https://github.com/apache/deltaspike,"diff --git a/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/message/Message.java b/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/message/Message.java -index 04785736a..0cf3cb64e 100644 ---- a/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/message/Message.java -+++ b/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/message/Message.java -@@ -63,4 +63,30 @@ public interface Message extends Serializable - */ - String toString(MessageContext messageContext); +- res.MaxSize = CheckValue(Props[1], 1, 128); +- res.Frequency = CheckValue(Props[2], 1, 100); +- res.Rarity = CheckValue(Props[3], 0, 100); +- res.MinAltitude = CheckValue(Props[4], 0, biomeConfig.worldConfig.WorldHeight); +- res.MaxAltitude = CheckValue(Props[5], 0, biomeConfig.worldConfig.WorldHeight, res.MinAltitude); +- +- res.SourceBlockId = new int[Props.length - 6]; +- for (int i = 6; i < Props.length; i++) +- res.SourceBlockId[i - 6] = CheckBlock(Props[i]); +- +- return true; +- ++ @Override ++ public ResourceType getType() ++ { ++ return ResourceType.biomeConfigResource; + } -+ /** -+ * Renders the Message to a String, using the {@link MessageContext} -+ * which created the Message. -+ * While resolving the message we will -+ * first search for a messageTemplate with the given category by -+ * just adding a dot '.' and the category String to the -+ * {@link #getTemplate()}. -+ * If no such a template exists we will fallback to the version -+ * without the category String -+ */ -+ String toString(String category); -+ -+ /** -+ * Renders the Message to a String, using an -+ * arbitrary {@link MessageContext}. -+ * While resolving the message we will -+ * first search for a messageTemplate with the given category by -+ * just adding a dot '.' and the category String to the -+ * {@link #getTemplate()}. -+ * If no such a template exists we will fallback to the version -+ * without the category String -+ */ -+ String toString(MessageContext messageContext, String category); -+ -+ -+ - } -diff --git a/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/message/DefaultMessage.java b/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/message/DefaultMessage.java -index 10bb4d1ec..cae0f48ee 100644 ---- a/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/message/DefaultMessage.java -+++ b/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/message/DefaultMessage.java -@@ -84,8 +84,15 @@ public Serializable[] getArguments() + @Override +- protected String WriteString(Resource res, String blockSources) ++ public String makeString() + { +- String blockId = res.BlockIdToName(res.BlockId); +- if (res.BlockData > 0) +- { +- blockId += ""."" + res.BlockData; +- } +- return blockId + "","" + res.MaxSize + "","" + res.Frequency + "","" + res.Rarity + "","" + res.MinAltitude + "","" + res.MaxAltitude + blockSources; ++ return ""Ore("" + makeMaterial(blockId, blockData) + "","" + maxSize + "","" + frequency + "","" + rarity + "","" + minAltitude + "","" + maxAltitude + makeMaterial(sourceBlocks) + "")""; } + } +diff --git a/common/src/com/khorn/terraincontrol/generator/resourcegens/PlantGen.java b/common/src/com/khorn/terraincontrol/generator/resourcegens/PlantGen.java +index 9d5af4050..2aae19816 100644 +--- a/common/src/com/khorn/terraincontrol/generator/resourcegens/PlantGen.java ++++ b/common/src/com/khorn/terraincontrol/generator/resourcegens/PlantGen.java +@@ -1,69 +1,68 @@ + package com.khorn.terraincontrol.generator.resourcegens; + +-import com.khorn.terraincontrol.configuration.BiomeConfig; + import com.khorn.terraincontrol.configuration.Resource; ++import com.khorn.terraincontrol.exception.InvalidResourceException; + import com.khorn.terraincontrol.LocalWorld; ++import com.khorn.terraincontrol.TerrainControl; ++import java.util.ArrayList; ++import java.util.List; + import java.util.Random; +-public class PlantGen extends ResourceGenBase ++public class PlantGen extends Resource + { ++ private int blockId; ++ private int blockData; ++ private int minAltitude; ++ private int maxAltitude; ++ private List sourceBlocks; + @Override - public String toString() -+ { -+ return toString((String) null); -+ } -+ -+ @Override -+ public String toString(String category) +- protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, int z) ++ public void spawn(LocalWorld world, Random rand, int x, int z) { +- int y = rand.nextInt(res.MaxAltitude - res.MinAltitude) + res.MinAltitude; ++ int y = rand.nextInt(maxAltitude - minAltitude) + minAltitude; - // the string construction happens in 3 phases -@@ -141,11 +148,17 @@ private String markAsUnresolved(String template) + for (int i = 0; i < 64; i++) + { + int j = x + rand.nextInt(8) - rand.nextInt(8); + int k = y + rand.nextInt(4) - rand.nextInt(4); + int m = z + rand.nextInt(8) - rand.nextInt(8); +- if ((!world.isEmpty(j, k, m)) || (!res.CheckSourceId(world.getTypeId(j, k - 1, m)))) ++ if ((!world.isEmpty(j, k, m)) || (!sourceBlocks.contains(world.getTypeId(j, k - 1, m)))) + continue; + +- if (res.BlockData > 0) +- { +- world.setBlock(j, k, m, res.BlockId, res.BlockData, false, false, false); +- } else +- { +- world.setBlock(j, k, m, res.BlockId, 0, false, false, false); +- } ++ world.setBlock(j, k, m, blockId, blockData, false, false, false); + } + } @Override - public String toString(MessageContext messageContext) -+ { -+ return toString(messageContext, null); +- protected boolean ReadString(Resource res, String[] Props, BiomeConfig biomeConfig) throws NumberFormatException ++ public void load(List args) throws InvalidResourceException + { +- if (Props[0].contains(""."")) ++ if (args.size() < 6) + { +- String[] block = Props[0].split(""\\.""); +- res.BlockId = CheckBlock(block[0]); +- res.BlockData = CheckValue(block[1], 0, 16); +- } else ++ throw new InvalidResourceException(""Too few arguments supplied""); ++ } ++ blockId = getBlockId(args.get(0)); ++ blockData = getBlockData(args.get(0)); ++ frequency = getInt(args.get(1), 1, 100); ++ rarity = getInt(args.get(2), 1, 100); ++ minAltitude = getInt(args.get(3), TerrainControl.worldDepth, TerrainControl.worldHeight); ++ maxAltitude = getInt(args.get(4), minAltitude + 1, TerrainControl.worldHeight); ++ sourceBlocks = new ArrayList(); ++ for (int i = 5; i < args.size(); i++) + { +- res.BlockId = CheckBlock(Props[0]); ++ sourceBlocks.add(getBlockId(args.get(i))); + } + } -+ + +- res.Frequency = CheckValue(Props[1], 1, 100); +- res.Rarity = CheckValue(Props[2], 0, 100); +- res.MinAltitude = CheckValue(Props[3], 0, biomeConfig.worldConfig.WorldHeight); +- res.MaxAltitude = CheckValue(Props[4], 0, biomeConfig.worldConfig.WorldHeight, res.MinAltitude); +- +- res.SourceBlockId = new int[Props.length - 5]; +- for (int i = 5; i < Props.length; i++) +- res.SourceBlockId[i - 5] = CheckBlock(Props[i]); +- +- return true; + @Override -+ public String toString(MessageContext messageContext, String category) ++ public ResourceType getType() ++ { ++ return ResourceType.biomeConfigResource; + } + + @Override +- protected String WriteString(Resource res, String blockSources) ++ public String makeString() { - return messageContext.message() - .template(getTemplate()) - .argument(getArguments()) -- .toString(); -+ .toString(category); - }" -dc9e9cb4cc87f132a32a00e6589d807350f0b8e0,elasticsearch,Aggregations: change to default shard_size in- terms aggregation--The default shard size in the terms aggregation now uses BucketUtils.suggestShardSideQueueSize() to set the shard size if the user does not specify it as a parameter.--Closes -6857-,p,https://github.com/elastic/elasticsearch,"diff --git a/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java b/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java -index c4b57064e80eb..c38f136dd9b29 100644 ---- a/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java -+++ b/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java -@@ -21,6 +21,7 @@ - import org.elasticsearch.common.xcontent.XContentParser; - import org.elasticsearch.search.aggregations.Aggregator; - import org.elasticsearch.search.aggregations.AggregatorFactory; -+import org.elasticsearch.search.aggregations.bucket.BucketUtils; - import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; - import org.elasticsearch.search.aggregations.support.ValuesSourceParser; - import org.elasticsearch.search.internal.SearchContext; -@@ -32,7 +33,6 @@ - */ - public class TermsParser implements Aggregator.Parser { +- String blockId = res.BlockIdToName(res.BlockId); +- if (res.BlockData > 0) +- { +- blockId += ""."" + res.BlockData; +- } +- return blockId + "","" + res.Frequency + "","" + res.Rarity + "","" + res.MinAltitude + "","" + res.MaxAltitude + blockSources; ++ return ""Plant("" + makeMaterial(blockId, blockData) + "","" + frequency + "","" + rarity + "","" + minAltitude + "","" + maxAltitude + makeMaterial(sourceBlocks) + "")""; + } + } +\ No newline at end of file +diff --git a/common/src/com/khorn/terraincontrol/generator/resourcegens/ReedGen.java b/common/src/com/khorn/terraincontrol/generator/resourcegens/ReedGen.java +index 89cf0f368..e10d6f272 100644 +--- a/common/src/com/khorn/terraincontrol/generator/resourcegens/ReedGen.java ++++ b/common/src/com/khorn/terraincontrol/generator/resourcegens/ReedGen.java +@@ -1,65 +1,73 @@ + package com.khorn.terraincontrol.generator.resourcegens; + + import com.khorn.terraincontrol.LocalWorld; +-import com.khorn.terraincontrol.configuration.BiomeConfig; ++import com.khorn.terraincontrol.TerrainControl; + import com.khorn.terraincontrol.configuration.Resource; ++import com.khorn.terraincontrol.exception.InvalidResourceException; -- ++import java.util.ArrayList; ++import java.util.List; + import java.util.Random; + +-public class ReedGen extends ResourceGenBase ++public class ReedGen extends Resource + { + ++ private int blockId; ++ private int blockData; ++ private int minAltitude; ++ private int maxAltitude; ++ private List sourceBlocks; ++ @Override - public String type() { - return StringTerms.TYPE.name(); -@@ -41,19 +41,22 @@ public String type() { - @Override - public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { - TermsParametersParser aggParser = new TermsParametersParser(); -- ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, StringTerms.TYPE, context) -- .scriptable(true) -- .formattable(true) -- .requiresSortedValues(true) -- .requiresUniqueValues(true) -- .build(); -+ ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, StringTerms.TYPE, context).scriptable(true).formattable(true) -+ .requiresSortedValues(true).requiresUniqueValues(true).build(); - IncludeExclude.Parser incExcParser = new IncludeExclude.Parser(aggregationName, StringTerms.TYPE, context); - aggParser.parse(aggregationName, parser, context, vsParser, incExcParser); - -+ InternalOrder order = resolveOrder(aggParser.getOrderKey(), aggParser.isOrderAsc()); - TermsAggregator.BucketCountThresholds bucketCountThresholds = aggParser.getBucketCountThresholds(); -+ if (!(order == InternalOrder.TERM_ASC || order == InternalOrder.TERM_DESC) -+ && bucketCountThresholds.getShardSize() == aggParser.getDefaultBucketCountThresholds().getShardSize()) { -+ // The user has not made a shardSize selection. Use default heuristic to avoid any wrong-ranking caused by distributed counting -+ bucketCountThresholds.setShardSize(BucketUtils.suggestShardSideQueueSize(bucketCountThresholds.getRequiredSize(), -+ context.numberOfShards())); -+ } - bucketCountThresholds.ensureValidity(); -- InternalOrder order = resolveOrder(aggParser.getOrderKey(), aggParser.isOrderAsc()); -- return new TermsAggregatorFactory(aggregationName, vsParser.config(), order, bucketCountThresholds, aggParser.getIncludeExclude(), aggParser.getExecutionHint(), aggParser.getCollectionMode()); -+ return new TermsAggregatorFactory(aggregationName, vsParser.config(), order, bucketCountThresholds, aggParser.getIncludeExclude(), -+ aggParser.getExecutionHint(), aggParser.getCollectionMode()); +- protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, int z) ++ public void spawn(LocalWorld world, Random rand, int x, int z) + { + int y = world.getHighestBlockYAt(x, z); +- if (y > res.MaxAltitude || y < res.MinAltitude || (!world.getMaterial(x - 1, y - 1, z).isLiquid() && !world.getMaterial(x + 1, y - 1, z).isLiquid() && !world.getMaterial(x, y - 1, z - 1).isLiquid() && !world.getMaterial(x, y - 1, z + 1).isLiquid())) ++ if (y > maxAltitude ++ || y < minAltitude ++ || (!world.getMaterial(x - 1, y - 1, z).isLiquid() && !world.getMaterial(x + 1, y - 1, z).isLiquid() && !world.getMaterial(x, y - 1, z - 1).isLiquid() && !world.getMaterial(x, y - 1, ++ z + 1).isLiquid())) + { + return; + } +- if (!res.CheckSourceId(world.getTypeId(x, y - 1, z))) ++ if (!sourceBlocks.contains(world.getTypeId(x, y - 1, z))) + { + return; + } +- ++ + int n = 1 + rand.nextInt(2); + for (int i1 = 0; i1 < n; i1++) +- world.setBlock(x, y + i1, z, res.BlockId, res.BlockData, false, false, false); ++ world.setBlock(x, y + i1, z, blockId, blockData, false, false, false); } - static InternalOrder resolveOrder(String key, boolean asc) { -diff --git a/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsTests.java b/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsTests.java -index 7251617f374ee..4bdaecc646d0c 100644 ---- a/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsTests.java -+++ b/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsTests.java -@@ -45,6 +45,31 @@ public void noShardSize_string() throws Exception { - .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false))) - .execute().actionGet(); - -+ Terms terms = response.getAggregations().get(""keys""); -+ Collection buckets = terms.getBuckets(); -+ assertThat(buckets.size(), equalTo(3)); -+ Map expected = ImmutableMap.builder() -+ .put(""1"", 8l) -+ .put(""3"", 8l) -+ .put(""2"", 5l) -+ .build(); -+ for (Terms.Bucket bucket : buckets) { -+ assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsText().string()))); -+ } -+ } -+ -+ @Test -+ public void shardSizeEqualsSize_string() throws Exception { -+ createIdx(""type=string,index=not_analyzed""); -+ -+ indexData(); -+ -+ SearchResponse response = client().prepareSearch(""idx"").setTypes(""type"") -+ .setQuery(matchAllQuery()) -+ .addAggregation(terms(""keys"").field(""key"").size(3).shardSize(3) -+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false))) -+ .execute().actionGet(); -+ - Terms terms = response.getAggregations().get(""keys""); - Collection buckets = terms.getBuckets(); - assertThat(buckets.size(), equalTo(3)); -@@ -109,6 +134,31 @@ public void withShardSize_string_singleShard() throws Exception { - assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKey()))); + @Override +- protected boolean ReadString(Resource res, String[] Props, BiomeConfig biomeConfig) throws NumberFormatException ++ public void load(List args) throws InvalidResourceException + { +- if (Props[0].contains(""."")) ++ if (args.size() < 6) + { +- String[] block = Props[0].split(""\\.""); +- res.BlockId = CheckBlock(block[0]); +- res.BlockData = CheckValue(block[1], 0, 16); +- } else ++ throw new InvalidResourceException(""Too few arguments supplied""); ++ } ++ blockId = getBlockId(args.get(0)); ++ blockData = getBlockData(args.get(0)); ++ frequency = getInt(args.get(1), 1, 100); ++ rarity = getInt(args.get(2), 1, 100); ++ minAltitude = getInt(args.get(3), TerrainControl.worldDepth, TerrainControl.worldHeight); ++ maxAltitude = getInt(args.get(4), minAltitude + 1, TerrainControl.worldHeight); ++ sourceBlocks = new ArrayList(); ++ for (int i = 5; i < args.size(); i++) + { +- res.BlockId = CheckBlock(Props[0]); ++ sourceBlocks.add(getBlockId(args.get(i))); } - } -+ -+ @Test -+ public void noShardSizeTermOrder_string() throws Exception { -+ createIdx(""type=string,index=not_analyzed""); -+ -+ indexData(); -+ -+ SearchResponse response = client().prepareSearch(""idx"").setTypes(""type"") -+ .setQuery(matchAllQuery()) -+ .addAggregation(terms(""keys"").field(""key"").size(3) -+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.term(true))) -+ .execute().actionGet(); -+ -+ Terms terms = response.getAggregations().get(""keys""); -+ Collection buckets = terms.getBuckets(); -+ assertThat(buckets.size(), equalTo(3)); -+ Map expected = ImmutableMap.builder() -+ .put(""1"", 8l) -+ .put(""2"", 5l) -+ .put(""3"", 8l) -+ .build(); -+ for (Terms.Bucket bucket : buckets) { -+ assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsText().string()))); -+ } + } - @Test - public void noShardSize_long() throws Exception { -@@ -123,6 +173,32 @@ public void noShardSize_long() throws Exception { - .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false))) - .execute().actionGet(); +- res.Frequency = CheckValue(Props[1], 1, 100); +- res.Rarity = CheckValue(Props[2], 0, 100); +- res.MinAltitude = CheckValue(Props[3], 0, biomeConfig.worldConfig.WorldHeight); +- res.MaxAltitude = CheckValue(Props[4], 0, biomeConfig.worldConfig.WorldHeight, res.MinAltitude); +- +- res.SourceBlockId = new int[Props.length - 5]; +- for (int i = 5; i < Props.length; i++) +- res.SourceBlockId[i - 5] = CheckBlock(Props[i]); +- +- return true; ++ @Override ++ public ResourceType getType() ++ { ++ return ResourceType.biomeConfigResource; + } -+ Terms terms = response.getAggregations().get(""keys""); -+ Collection buckets = terms.getBuckets(); -+ assertThat(buckets.size(), equalTo(3)); -+ Map expected = ImmutableMap.builder() -+ .put(1, 8l) -+ .put(3, 8l) -+ .put(2, 5l) -+ .build(); -+ for (Terms.Bucket bucket : buckets) { -+ assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue()))); -+ } -+ } -+ -+ @Test -+ public void shardSizeEqualsSize_long() throws Exception { -+ -+ createIdx(""type=long""); -+ -+ indexData(); -+ -+ SearchResponse response = client().prepareSearch(""idx"").setTypes(""type"") -+ .setQuery(matchAllQuery()) -+ .addAggregation(terms(""keys"").field(""key"").size(3).shardSize(3) -+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false))) -+ .execute().actionGet(); -+ - Terms terms = response.getAggregations().get(""keys""); - Collection buckets = terms.getBuckets(); - assertThat(buckets.size(), equalTo(3)); -@@ -188,6 +264,32 @@ public void withShardSize_long_singleShard() throws Exception { - } + @Override +- protected String WriteString(Resource res, String blockSources) ++ public String makeString() + { +- String blockId = res.BlockIdToName(res.BlockId); +- if (res.BlockData > 0) +- { +- blockId += ""."" + res.BlockData; +- } +- return blockId + "","" + res.Frequency + "","" + res.Rarity + "","" + res.MinAltitude + "","" + res.MaxAltitude + blockSources; ++ return ""Reed("" + makeMaterial(blockId, blockData) + "","" + frequency + "","" + rarity + "","" + minAltitude + "","" + maxAltitude + makeMaterial(sourceBlocks) + "")""; } + } +\ No newline at end of file +diff --git a/common/src/com/khorn/terraincontrol/generator/resourcegens/ResourceGenBase.java b/common/src/com/khorn/terraincontrol/generator/resourcegens/ResourceGenBase.java +deleted file mode 100644 +index 3b0e7dcc0..000000000 +--- a/common/src/com/khorn/terraincontrol/generator/resourcegens/ResourceGenBase.java ++++ /dev/null +@@ -1,92 +0,0 @@ +-package com.khorn.terraincontrol.generator.resourcegens; +- +-import com.khorn.terraincontrol.DefaultMaterial; +-import com.khorn.terraincontrol.configuration.BiomeConfig; +-import com.khorn.terraincontrol.configuration.Resource; +-import com.khorn.terraincontrol.LocalWorld; +- +-import java.util.Random; +- +-public abstract class ResourceGenBase +-{ +- public void Process(LocalWorld world, Random rand, Resource res, int _x, int _z) +- { +- for (int t = 0; t < res.Frequency; t++) +- { +- if (rand.nextInt(100) > res.Rarity) +- continue; +- int x = _x + rand.nextInt(16) + 8; +- int z = _z + rand.nextInt(16) + 8; +- SpawnResource(world, rand, res, x, z); +- } +- +- } +- +- protected abstract void SpawnResource(LocalWorld world, Random rand, Resource res, int x, int z); +- +- public boolean ReadFromString(Resource res, String[] line, BiomeConfig biomeConfig) +- { +- if (line.length < res.Type.MinProperties) +- return false; +- try +- { +- return this.ReadString(res, line, biomeConfig); +- +- } catch (NumberFormatException e) +- { +- return false; +- } +- } +- +- // ToDo make it more logical. +- public String WriteToString(Resource res) +- { +- String sources = """"; +- for (int id : res.SourceBlockId) +- sources += "","" + res.BlockIdToName(id); +- +- return res.Type.name() + ""("" + this.WriteString(res, sources) + "")""; +- } +- +- public String WriteSettingOnly(Resource res) +- { +- String sources = """"; +- for (int id : res.SourceBlockId) +- sources += "","" + res.BlockIdToName(id); +- return this.WriteString(res, sources); +- } +- +- protected abstract String WriteString(Resource res, String blockSources); +- +- protected abstract boolean ReadString(Resource res, String[] Props, BiomeConfig biomeConfig) throws NumberFormatException; +- +- protected int CheckValue(String str, int min, int max) throws NumberFormatException +- { +- int value = Integer.valueOf(str); +- if (value > max) +- return max; +- else if (value < min) +- return min; +- else +- return value; +- } +- +- protected int CheckValue(String str, int min, int max, int minValue) throws NumberFormatException +- { +- int value = CheckValue(str, min, max); +- +- if (value < minValue) +- return minValue + 1; +- else +- return value; +- } +- +- protected int CheckBlock(String block) throws NumberFormatException +- { +- DefaultMaterial mat = DefaultMaterial.getMaterial(block); +- if (mat != null) +- return mat.id; +- +- return CheckValue(block, 0, 256); +- } +-} +\ No newline at end of file +diff --git a/common/src/com/khorn/terraincontrol/generator/resourcegens/ResourceType.java b/common/src/com/khorn/terraincontrol/generator/resourcegens/ResourceType.java +index 73097162a..1489912f6 100644 +--- a/common/src/com/khorn/terraincontrol/generator/resourcegens/ResourceType.java ++++ b/common/src/com/khorn/terraincontrol/generator/resourcegens/ResourceType.java +@@ -1,49 +1,6 @@ + package com.khorn.terraincontrol.generator.resourcegens; -+ @Test -+ public void noShardSizeTermOrder_long() throws Exception { +-import com.khorn.terraincontrol.customobjects.CustomObjectGen; +- + public enum ResourceType + { +- Ore(OreGen.class, false, 7), +- UnderWaterOre(UnderWaterOreGen.class, 5), +- Plant(PlantGen.class, 6), +- Liquid(LiquidGen.class, 6), +- Grass(GrassGen.class, 5), +- Reed(ReedGen.class, 6), +- Cactus(CactusGen.class, 6), +- Dungeon(DungeonGen.class, 4), +- Tree(TreeGen.class, 3), +- Sapling(TreeGen.class, 3), +- CustomObject(CustomObjectGen.class, 0), +- UnderGroundLake(UndergroundLakeGen.class, 6), +- AboveWaterRes(AboveWaterGen.class, 3), +- Vines(VinesGen.class, 4), +- SmallLake(SmallLakeGen.class,5); +- +- public ResourceGenBase Generator; +- public final boolean CreateNewChunks; +- public final int MinProperties; +- +- +- private ResourceType(Class c, int props) +- { +- this(c, true, props); +- } +- +- private ResourceType(Class c, boolean createNewChunks, int props) +- { +- this.CreateNewChunks = createNewChunks; +- this.MinProperties = props; +- try +- { +- Generator = c.newInstance(); +- +- } catch (InstantiationException e) +- { +- e.printStackTrace(); +- } catch (IllegalAccessException e) +- { +- e.printStackTrace(); +- } +- } ++ biomeConfigResource, saplingResource + } +diff --git a/common/src/com/khorn/terraincontrol/generator/resourcegens/ResourcesManager.java b/common/src/com/khorn/terraincontrol/generator/resourcegens/ResourcesManager.java +new file mode 100644 +index 000000000..349a07313 +--- /dev/null ++++ b/common/src/com/khorn/terraincontrol/generator/resourcegens/ResourcesManager.java +@@ -0,0 +1,88 @@ ++package com.khorn.terraincontrol.generator.resourcegens; + -+ createIdx(""type=long""); ++import java.util.List; ++import java.util.Map; + -+ indexData(); ++import com.khorn.terraincontrol.TerrainControl; ++import com.khorn.terraincontrol.configuration.BiomeConfig; ++import com.khorn.terraincontrol.configuration.Resource; ++import com.khorn.terraincontrol.configuration.WorldConfig; ++import com.khorn.terraincontrol.exception.InvalidResourceException; + -+ SearchResponse response = client().prepareSearch(""idx"").setTypes(""type"") -+ .setQuery(matchAllQuery()) -+ .addAggregation(terms(""keys"").field(""key"").size(3) -+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.term(true))) -+ .execute().actionGet(); ++public class ResourcesManager ++{ ++ private Map> resourceTypes; + -+ Terms terms = response.getAggregations().get(""keys""); -+ Collection buckets = terms.getBuckets(); -+ assertThat(buckets.size(), equalTo(3)); -+ Map expected = ImmutableMap.builder() -+ .put(1, 8l) -+ .put(2, 5l) -+ .put(3, 8l) -+ .build(); -+ for (Terms.Bucket bucket : buckets) { -+ assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue()))); -+ } ++ public ResourcesManager(Map> resourceTypes) ++ { ++ // Also store in this class ++ this.resourceTypes = resourceTypes; ++ ++ // Add vanilla resources ++ put(""AboveWaterRes"", AboveWaterGen.class); ++ put(""Cactus"", CactusGen.class); ++ put(""CustomObject"", CustomObjectGen.class); ++ put(""Dungeon"", DungeonGen.class); ++ put(""Grass"", GrassGen.class); ++ put(""Liquid"", LiquidGen.class); ++ put(""Ore"", OreGen.class); ++ put(""Plant"", PlantGen.class); ++ put(""Reed"", ReedGen.class); ++ put(""SmallLake"", SmallLakeGen.class); ++ put(""Tree"", TreeGen.class); ++ put(""UndergroundLake"", UndergroundLakeGen.class); ++ put(""UnderWaterOre"", UnderWaterOreGen.class); ++ put(""Vines"", VinesGen.class); ++ } ++ ++ public void put(String name, Class value) ++ { ++ resourceTypes.put(name.toLowerCase(), value); + } + - @Test - public void noShardSize_double() throws Exception { - -@@ -201,6 +303,32 @@ public void noShardSize_double() throws Exception { - .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false))) - .execute().actionGet(); - -+ Terms terms = response.getAggregations().get(""keys""); -+ Collection buckets = terms.getBuckets(); -+ assertThat(buckets.size(), equalTo(3)); -+ Map expected = ImmutableMap.builder() -+ .put(1, 8l) -+ .put(3, 8l) -+ .put(2, 5l) -+ .build(); -+ for (Terms.Bucket bucket : buckets) { -+ assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue()))); ++ /** ++ * Creates a resource with the specified name. ++ * ++ * @param name ++ * Name of the resource, like Ore ++ * @param args ++ * String representation of the args. ++ * @return The resource, or null of no matching resource could be found. ++ */ ++ public Resource getResource(String name, BiomeConfig biomeConfig, List args) ++ { ++ if (resourceTypes.containsKey(name.toLowerCase())) ++ { ++ Resource resource; ++ try ++ { ++ resource = resourceTypes.get(name.toLowerCase()).newInstance(); ++ } catch (InstantiationException e) ++ { ++ TerrainControl.log(""Reflection error while loading the resources: "" + e.getMessage()); ++ e.printStackTrace(); ++ return null; ++ } catch (IllegalAccessException e) ++ { ++ TerrainControl.log(""Reflection error while loading the resources: "" + e.getMessage()); ++ e.printStackTrace(); ++ return null; ++ } ++ resource.setWorldConfig(biomeConfig.worldConfig); ++ try ++ { ++ resource.load(args); ++ } catch (InvalidResourceException e) ++ { ++ TerrainControl.log(""Invalid resource "" + name + "" in "" + biomeConfig.Name + "": "" + e.getMessage()); ++ return null; ++ } ++ ++ return resource; ++ + } -+ } + -+ @Test -+ public void shardSizeEqualsSize_double() throws Exception { ++ return null; ++ } + -+ createIdx(""type=double""); ++} +diff --git a/common/src/com/khorn/terraincontrol/generator/resourcegens/SaplingGen.java b/common/src/com/khorn/terraincontrol/generator/resourcegens/SaplingGen.java +new file mode 100644 +index 000000000..7531cb845 +--- /dev/null ++++ b/common/src/com/khorn/terraincontrol/generator/resourcegens/SaplingGen.java +@@ -0,0 +1,102 @@ ++package com.khorn.terraincontrol.generator.resourcegens; + -+ indexData(); ++import java.util.ArrayList; ++import java.util.List; ++import java.util.Random; + -+ SearchResponse response = client().prepareSearch(""idx"").setTypes(""type"") -+ .setQuery(matchAllQuery()) -+ .addAggregation(terms(""keys"").field(""key"").size(3).shardSize(3) -+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false))) -+ .execute().actionGet(); ++import com.khorn.terraincontrol.LocalWorld; ++import com.khorn.terraincontrol.TerrainControl; ++import com.khorn.terraincontrol.configuration.Resource; ++import com.khorn.terraincontrol.customobjects.CustomObject; ++import com.khorn.terraincontrol.exception.InvalidResourceException; + - Terms terms = response.getAggregations().get(""keys""); - Collection buckets = terms.getBuckets(); - assertThat(buckets.size(), equalTo(3)); -@@ -265,4 +393,30 @@ public void withShardSize_double_singleShard() throws Exception { - assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue()))); - } - } ++public class SaplingGen extends Resource ++{ ++ public List trees; ++ public List treeNames; ++ public List treeChances; ++ public int saplingType; + -+ @Test -+ public void noShardSizeTermOrder_double() throws Exception { ++ @Override ++ public void process(LocalWorld world, Random random, int chunkX, int chunkZ) ++ { ++ // Left blank, as it shouldn't spawn anything. ++ } + -+ createIdx(""type=double""); ++ @Override ++ public void load(List args) throws InvalidResourceException ++ { ++ assureSize(3, args); + -+ indexData(); ++ if (args.get(0).equalsIgnoreCase(""All"")) ++ { ++ saplingType = -1; ++ } else ++ { ++ saplingType = getInt(args.get(0), -1, 3); ++ } + -+ SearchResponse response = client().prepareSearch(""idx"").setTypes(""type"") -+ .setQuery(matchAllQuery()) -+ .addAggregation(terms(""keys"").field(""key"").size(3) -+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.term(true))) -+ .execute().actionGet(); ++ trees = new ArrayList(); ++ treeNames = new ArrayList(); ++ treeChances = new ArrayList(); + -+ Terms terms = response.getAggregations().get(""keys""); -+ Collection buckets = terms.getBuckets(); -+ assertThat(buckets.size(), equalTo(3)); -+ Map expected = ImmutableMap.builder() -+ .put(1, 8l) -+ .put(2, 5l) -+ .put(3, 8l) -+ .build(); -+ for (Terms.Bucket bucket : buckets) { -+ assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue()))); ++ for (int i = 1; i < args.size() - 1; i += 2) ++ { ++ CustomObject object = TerrainControl.getCustomObjectManager().getObjectFromString(args.get(i), worldConfig); ++ if (object == null) ++ { ++ throw new InvalidResourceException(""Custom object "" + args.get(i) + "" not found!""); ++ } ++ if(!object.canSpawnAsTree()) ++ { ++ throw new InvalidResourceException(""Custom object "" + args.get(i) + "" is not a tree!""); ++ } ++ trees.add(object); ++ treeNames.add(args.get(i)); ++ treeChances.add(getInt(args.get(i + 1), 1, 100)); + } + } - }" -d7929a40521731c53f510996bf9918ff3b158e3d,Delta Spike,"DELTASPIKE-378 add ProjectStageAware property handling - -Main entry point for this feature is -ConfigResolver#getProjectStageAwarePropertyValue -",a,https://github.com/apache/deltaspike,"diff --git a/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/config/ConfigResolver.java b/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/config/ConfigResolver.java -index 43bb6602f..05cbc59df 100644 ---- a/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/config/ConfigResolver.java -+++ b/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/config/ConfigResolver.java -@@ -31,9 +31,11 @@ - - import javax.enterprise.inject.Typed; - -+import org.apache.deltaspike.core.api.projectstage.ProjectStage; - import org.apache.deltaspike.core.spi.config.ConfigSource; - import org.apache.deltaspike.core.spi.config.ConfigSourceProvider; - import org.apache.deltaspike.core.util.ClassUtils; -+import org.apache.deltaspike.core.util.ProjectStageProducer; - import org.apache.deltaspike.core.util.ServiceUtils; - - /** -@@ -56,6 +58,8 @@ public final class ConfigResolver - private static Map configSources - = new ConcurrentHashMap(); - -+ private static volatile ProjectStage projectStage = null; + - private ConfigResolver() - { - // this is a utility class which doesn't get instantiated. -@@ -146,6 +150,35 @@ public static String getPropertyValue(String key) - return null; - } - -+ /** -+ *

Search for the configured value in all {@link ConfigSource}s and take the -+ * current {@link org.apache.deltaspike.core.api.projectstage.ProjectStage} -+ * into account.

-+ * -+ *

It first will search if there is a configured value of the given key prefixed -+ * with the current ProjectStage (e.g. 'myproject.myconfig.Production') and if this didn't -+ * find anything it will lookup the given key without any prefix.

-+ * -+ *

Attention This method must only be used after all ConfigSources -+ * got registered and it also must not be used to determine the ProjectStage itself.

-+ * @param key -+ * @param defaultValue -+ * @return the configured value or if non found the defaultValue -+ * -+ */ -+ public static String getProjectStageAwarePropertyValue(String key, String defaultValue) ++ @Override ++ public ResourceType getType() + { -+ ProjectStage ps = getProjectStage(); ++ return ResourceType.saplingResource; ++ } + -+ String value = getPropertyValue(key + '.' + ps, defaultValue); -+ if (value == null) ++ @Override ++ public String makeString() ++ { ++ String output = ""Sapling("" + saplingType; ++ if (saplingType == -1) ++ { ++ output = ""Sapling(All""; ++ } ++ for (int i = 0; i < treeNames.size(); i++) + { -+ value = getPropertyValue(key, defaultValue); ++ output += "","" + treeNames.get(i) + "","" + treeChances.get(i); + } ++ return output + "")""; ++ } + -+ return value; ++ @Override ++ public void spawn(LocalWorld world, Random random, int x, int z) ++ { ++ // Left blank, as process() already handles this + } + - /** - * Resolve all values for the given key, from all registered ConfigSources ordered by their - * ordinal value in ascending ways. If more {@link ConfigSource}s have the same ordinal, their -@@ -264,4 +297,17 @@ public int compare(ConfigSource configSource1, ConfigSource configSource2) - return configSources; - } - -+ private static ProjectStage getProjectStage() ++ public boolean growSapling(LocalWorld world, Random random, int x, int y, int z) + { -+ if (projectStage == null) ++ for (int treeNumber = 0; treeNumber < trees.size(); treeNumber++) + { -+ synchronized (ConfigResolver.class) ++ if (random.nextInt(100) < treeChances.get(treeNumber)) + { -+ projectStage = ProjectStageProducer.getInstance().getProjectStage(); ++ if (trees.get(treeNumber).spawnAsTree(world, random, x, z)) ++ { ++ // Success! ++ return true; ++ } + } + } -+ -+ return projectStage; ++ return false; + } ++} +diff --git a/common/src/com/khorn/terraincontrol/generator/resourcegens/SmallLakeGen.java b/common/src/com/khorn/terraincontrol/generator/resourcegens/SmallLakeGen.java +index 40baafeb7..09105f5ce 100644 +--- a/common/src/com/khorn/terraincontrol/generator/resourcegens/SmallLakeGen.java ++++ b/common/src/com/khorn/terraincontrol/generator/resourcegens/SmallLakeGen.java +@@ -1,25 +1,30 @@ + package com.khorn.terraincontrol.generator.resourcegens; + ++import java.util.List; ++import java.util.Random; + - } -diff --git a/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/util/ProjectStageProducer.java b/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/util/ProjectStageProducer.java -index ba4ac6b28..f2e39bf53 100644 ---- a/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/util/ProjectStageProducer.java -+++ b/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/util/ProjectStageProducer.java -@@ -47,6 +47,7 @@ - * } - * - * -+ *

Please note that there can only be one ProjectStage per EAR.

- */ - @ApplicationScoped - public class ProjectStageProducer implements Serializable -diff --git a/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/ConfigResolverTest.java b/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/ConfigResolverTest.java -index 809ccc1df..70c00e802 100644 ---- a/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/ConfigResolverTest.java -+++ b/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/ConfigResolverTest.java -@@ -19,6 +19,8 @@ - package org.apache.deltaspike.test.api.config; + import com.khorn.terraincontrol.DefaultMaterial; + import com.khorn.terraincontrol.LocalWorld; +-import com.khorn.terraincontrol.configuration.BiomeConfig; ++import com.khorn.terraincontrol.TerrainControl; + import com.khorn.terraincontrol.configuration.Resource; ++import com.khorn.terraincontrol.exception.InvalidResourceException; - import org.apache.deltaspike.core.api.config.ConfigResolver; -+import org.apache.deltaspike.core.api.projectstage.ProjectStage; -+import org.apache.deltaspike.core.util.ProjectStageProducer; - import org.junit.Assert; - import org.junit.Test; +-import java.util.Random; +- +-public class SmallLakeGen extends ResourceGenBase ++public class SmallLakeGen extends Resource + { -@@ -50,4 +52,12 @@ public void testStandaloneConfigSource() - Assert.assertNull(ConfigResolver.getPropertyValue(""notexisting"")); - Assert.assertEquals(""testvalue"", ConfigResolver.getPropertyValue(""testkey"")); - } -+ -+ @Test -+ public void testGetProjectStageAwarePropertyValue() -+ { -+ ProjectStageProducer.setProjectStage(ProjectStage.UnitTest); -+ Assert.assertNull(ConfigResolver.getProjectStageAwarePropertyValue(""notexisting"", null)); -+ Assert.assertEquals(""unittestvalue"", ConfigResolver.getProjectStageAwarePropertyValue(""testkey"", null)); -+ } - } -diff --git a/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/TestConfigSource.java b/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/TestConfigSource.java -index 581c837e3..ed9dc8667 100644 ---- a/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/TestConfigSource.java -+++ b/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/TestConfigSource.java -@@ -33,6 +33,15 @@ public class TestConfigSource implements ConfigSource + private final boolean[] BooleanBuffer = new boolean[2048]; +- ++ private int blockId; ++ private int blockData; ++ private int minAltitude; ++ private int maxAltitude; - private int ordinal = 700; + @Override +- protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, int z) ++ public void spawn(LocalWorld world, Random rand, int x, int z) + { + x -= 8; + z -= 8; + +- int y = rand.nextInt(res.MaxAltitude - res.MinAltitude) + res.MinAltitude; ++ int y = rand.nextInt(maxAltitude - minAltitude) + minAltitude; + + // Search any free space + while ((y > 5) && (world.isEmpty(x, y, z))) +@@ -31,7 +36,6 @@ protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, + // y = floor + y -= 4; + +- + synchronized (BooleanBuffer) + { + boolean[] BooleanBuffer = new boolean[2048]; +@@ -67,14 +71,17 @@ protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, + { + for (i2 = 0; i2 < 8; i2++) + { +- boolean flag = (!BooleanBuffer[((j * 16 + i1) * 8 + i2)]) && (((j < 15) && (BooleanBuffer[(((j + 1) * 16 + i1) * 8 + i2)])) || ((j > 0) && (BooleanBuffer[(((j - 1) * 16 + i1) * 8 + i2)])) || ((i1 < 15) && (BooleanBuffer[((j * 16 + (i1 + 1)) * 8 + i2)])) || ((i1 > 0) && (BooleanBuffer[((j * 16 + (i1 - 1)) * 8 + i2)])) || ((i2 < 7) && (BooleanBuffer[((j * 16 + i1) * 8 + (i2 + 1))])) || ((i2 > 0) && (BooleanBuffer[((j * 16 + i1) * 8 + (i2 - 1))]))); ++ boolean flag = (!BooleanBuffer[((j * 16 + i1) * 8 + i2)]) ++ && (((j < 15) && (BooleanBuffer[(((j + 1) * 16 + i1) * 8 + i2)])) || ((j > 0) && (BooleanBuffer[(((j - 1) * 16 + i1) * 8 + i2)])) ++ || ((i1 < 15) && (BooleanBuffer[((j * 16 + (i1 + 1)) * 8 + i2)])) || ((i1 > 0) && (BooleanBuffer[((j * 16 + (i1 - 1)) * 8 + i2)])) ++ || ((i2 < 7) && (BooleanBuffer[((j * 16 + i1) * 8 + (i2 + 1))])) || ((i2 > 0) && (BooleanBuffer[((j * 16 + i1) * 8 + (i2 - 1))]))); + + if (flag) + { + DefaultMaterial localMaterial = world.getMaterial(x + j, y + i2, z + i1); + if ((i2 >= 4) && (localMaterial.isLiquid())) + return; +- if ((i2 < 4) && (!localMaterial.isSolid()) && (world.getTypeId(x + j, y + i2, z + i1) != res.BlockId)) ++ if ((i2 < 4) && (!localMaterial.isSolid()) && (world.getTypeId(x + j, y + i2, z + i1) != blockId)) + return; + } + } +@@ -90,7 +97,7 @@ protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, + { + if (BooleanBuffer[((j * 16 + i1) * 8 + i2)]) + { +- world.setBlock(x + j, y + i2, z + i1, res.BlockId, res.BlockData); ++ world.setBlock(x + j, y + i2, z + i1, blockId, blockData); + BooleanBuffer[((j * 16 + i1) * 8 + i2)] = false; + } + } +@@ -104,77 +111,31 @@ protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, + } + } + } +- /* +- for (j = 0; j < 16; j++) { +- for (i1 = 0; i1 < 16; i1++) { +- for (i2 = 4; i2 < 8; i2++) { +- if ((BooleanBuffer[((j * 16 + i1) * 8 + i2)] == 0) || +- (paramWorld.getTypeId(x + j, y + i2 - 1, z + i1) != Block.DIRT.id) || (paramWorld.a(EnumSkyBlock.SKY, x + j, y + i2, z + i1) <= 0)) continue; +- BiomeBase localBiomeBase = paramWorld.getBiome(x + j, z + i1); +- if (localBiomeBase.A == Block.MYCEL.id) paramWorld.setRawTypeId(x + j, y + i2 - 1, z + i1, Block.MYCEL.id); else { +- paramWorld.setRawTypeId(x + j, y + i2 - 1, z + i1, Block.GRASS.id); +- } +- } +- } +- +- } +- +- if (Block.byId[this.a].material == Material.LAVA) { +- for (j = 0; j < 16; j++) { +- for (i1 = 0; i1 < 16; i1++) { +- for (i2 = 0; i2 < 8; i2++) { +- int i4 = (BooleanBuffer[((j * 16 + i1) * 8 + i2)] == 0) && (((j < 15) && (BooleanBuffer[(((j + 1) * 16 + i1) * 8 + i2)] != 0)) || ((j > 0) && (BooleanBuffer[(((j - 1) * 16 + i1) * 8 + i2)] != 0)) || ((i1 < 15) && (BooleanBuffer[((j * 16 + (i1 + 1)) * 8 + i2)] != 0)) || ((i1 > 0) && (BooleanBuffer[((j * 16 + (i1 - 1)) * 8 + i2)] != 0)) || ((i2 < 7) && (BooleanBuffer[((j * 16 + i1) * 8 + (i2 + 1))] != 0)) || ((i2 > 0) && (BooleanBuffer[((j * 16 + i1) * 8 + (i2 - 1))] != 0))) ? 1 : 0; +- +- if ((i4 == 0) || +- ((i2 >= 4) && (rand.nextInt(2) == 0)) || (!paramWorld.getMaterial(x + j, y + i2, z + i1).isBuildable())) continue; +- paramWorld.setRawTypeId(x + j, y + i2, z + i1, Block.STONE.id); +- } +- } +- +- } +- +- } +- +- if (Block.byId[this.a].material == Material.WATER) { +- for (j = 0; j < 16; j++) { +- for (i1 = 0; i1 < 16; i1++) { +- i2 = 4; +- if (!paramWorld.s(x + j, y + i2, z + i1)) continue; paramWorld.setRawTypeId(x + j, y + i2, z + i1, Block.ICE.id); +- } +- } +- } */ + + } + } -+ private Map props = new HashMap(); -+ -+ -+ public TestConfigSource() -+ { -+ props.put(""testkey"", ""testvalue""); -+ props.put(""testkey.UnitTest"", ""unittestvalue""); -+ } -+ @Override - public String getConfigName() +- protected boolean ReadString(Resource res, String[] Props, BiomeConfig biomeConfig) throws NumberFormatException ++ public void load(List args) throws InvalidResourceException { -@@ -48,15 +57,13 @@ public int getOrdinal() +- if (Props[0].contains(""."")) +- { +- String[] block = Props[0].split(""\\.""); +- res.BlockId = CheckBlock(block[0]); +- res.BlockData = CheckValue(block[1], 0, 16); +- } else +- { +- res.BlockId = CheckBlock(Props[0]); +- } ++ assureSize(5, args); ++ blockId = getBlockId(args.get(0)); ++ blockData = getBlockData(args.get(0)); ++ frequency = getInt(args.get(1), 1, 100); ++ rarity = getInt(args.get(2), 1, 100); ++ minAltitude = getInt(args.get(3), TerrainControl.worldDepth, TerrainControl.worldHeight); ++ maxAltitude = getInt(args.get(4), minAltitude + 1, TerrainControl.worldHeight); ++ } + +- res.Frequency = CheckValue(Props[1], 1, 100); +- res.Rarity = CheckValue(Props[2], 0, 100); +- res.MinAltitude = CheckValue(Props[3], 0, biomeConfig.worldConfig.WorldHeight); +- res.MaxAltitude = CheckValue(Props[4], 0, biomeConfig.worldConfig.WorldHeight, res.MinAltitude); +- return true; ++ @Override ++ public ResourceType getType() ++ { ++ return ResourceType.biomeConfigResource; + } + @Override - public String getPropertyValue(String key) +- protected String WriteString(Resource res, String blockSources) ++ public String makeString() { -- return ""testkey"".equals(key) ? ""testvalue"" : null; -+ return props.get(key); +- String blockId = res.BlockIdToName(res.BlockId); +- if (res.BlockData > 0) +- { +- blockId += ""."" + res.BlockData; +- } +- return blockId + "","" + res.Frequency + "","" + res.Rarity + "","" + res.MinAltitude + "","" + res.MaxAltitude; ++ return ""SmallLake("" + makeMaterial(blockId, blockData) + "","" + frequency + "","" + rarity + "","" + minAltitude + "","" + maxAltitude + "")""; } + } +diff --git a/common/src/com/khorn/terraincontrol/generator/resourcegens/TreeGen.java b/common/src/com/khorn/terraincontrol/generator/resourcegens/TreeGen.java +index 842080d89..c9d1ad663 100644 +--- a/common/src/com/khorn/terraincontrol/generator/resourcegens/TreeGen.java ++++ b/common/src/com/khorn/terraincontrol/generator/resourcegens/TreeGen.java +@@ -1,304 +1,90 @@ + package com.khorn.terraincontrol.generator.resourcegens; + +-import com.khorn.terraincontrol.configuration.BiomeConfig; +-import com.khorn.terraincontrol.configuration.Resource; +-import com.khorn.terraincontrol.LocalWorld; +-import com.khorn.terraincontrol.customobjects.BODefaultValues; +-import com.khorn.terraincontrol.customobjects.CustomObjectCompiled; +-import com.khorn.terraincontrol.customobjects.CustomObjectGen; +-import com.khorn.terraincontrol.customobjects.ObjectsStore; +- + import java.util.ArrayList; +-import java.util.HashMap; ++import java.util.List; + import java.util.Random; + +-public class TreeGen extends ResourceGenBase +-{ +- @Override +- protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, int z) +- { ++import com.khorn.terraincontrol.LocalWorld; ++import com.khorn.terraincontrol.TerrainControl; ++import com.khorn.terraincontrol.configuration.Resource; ++import com.khorn.terraincontrol.customobjects.CustomObject; ++import com.khorn.terraincontrol.exception.InvalidResourceException; + +- } ++public class TreeGen extends Resource ++{ ++ private List trees; ++ private List treeNames; ++ private List treeChances; @Override - public Map getProperties() +- public void Process(LocalWorld world, Random rand, Resource res, int x, int z) ++ public void process(LocalWorld world, Random random, int chunkX, int chunkZ) { -- Map map = new HashMap(); -- map.put(""testkey"", ""testvalue""); -- return map; -+ return props; +- for (int i = 0; i < res.Frequency; i++) ++ for (int i = 0; i < frequency; i++) + { +- +- int _x = x + rand.nextInt(16) + 8; +- int _z = z + rand.nextInt(16) + 8; +- int _y = world.getHighestBlockYAt(_x, _z); +- SpawnTree(world, rand, res, _x, _y, _z); +- } +- } +- +- public boolean SpawnTree(LocalWorld world, Random rand, Resource res, int x, int y, int z) +- { +- boolean treeSpawned = false; +- +- for (int t = 0; t < res.TreeTypes.length && !treeSpawned; t++) +- { +- if (rand.nextInt(100) < res.TreeChances[t]) ++ for (int treeNumber = 0; treeNumber < trees.size(); treeNumber++) + { +- switch (res.TreeTypes[t]) ++ if (random.nextInt(100) < treeChances.get(treeNumber)) + { +- case CustomTree: +- CustomObjectCompiled SelectedObject = res.CUObjects[t]; +- // TODO Branch check ?!!! +- +- if (!CustomObjectGen.ObjectCanSpawn(world, x, y, z, SelectedObject)) +- continue; +- +- treeSpawned = CustomObjectGen.GenerateCustomObject(world, rand, x, y, z, SelectedObject); +- +- if (treeSpawned) +- CustomObjectGen.GenerateCustomObjectFromGroup(world, rand, x, y, z, SelectedObject); +- break; +- case CustomTreeWorld: +- treeSpawned = SpawnCustomTreeFromArray(world, rand, x, y, z, res.CUObjectsWorld); +- break; +- case CustomTreeBiome: +- treeSpawned = SpawnCustomTreeFromArray(world, rand, x, y, z, res.CUObjectsBiome); +- break; +- default: +- treeSpawned = world.PlaceTree(res.TreeTypes[t], rand, x, y, z); ++ int x = chunkX * 16 + random.nextInt(16); ++ int z = chunkZ * 16 + random.nextInt(16); ++ if (trees.get(treeNumber).spawnAsTree(world, random, x, z)) ++ { ++ // Success, on to the next tree! + break; ++ } + } + } + } +- return treeSpawned; +- +- } +- +- private boolean SpawnCustomTreeFromArray(LocalWorld world, Random rand, int x, int y, int z, CustomObjectCompiled[] CUObjects) +- { +- +- if (CUObjects.length == 0) +- return false; +- +- boolean objectSpawned = false; +- int spawnAttempts = 0; +- while (!objectSpawned) +- { +- if (spawnAttempts > world.getSettings().objectSpawnRatio) +- return false; +- +- spawnAttempts++; +- +- CustomObjectCompiled SelectedObject = CUObjects[rand.nextInt(CUObjects.length)]; +- if (SelectedObject.Branch) +- continue; +- +- if (rand.nextInt(100) < SelectedObject.Rarity) +- { +- +- if (!CustomObjectGen.ObjectCanSpawn(world, x, y, z, SelectedObject)) +- continue; +- +- objectSpawned = CustomObjectGen.GenerateCustomObject(world, rand, x, y, z, SelectedObject); +- +- if (objectSpawned) +- CustomObjectGen.GenerateCustomObjectFromGroup(world, rand, x, y, z, SelectedObject); +- } +- } +- +- return objectSpawned; } - @Override" -aec021e668ad6786d20feaadf119f5407c2b3191,kotlin,Implemented better rendering for parameters with- default values in decompiler and descriptor renderer.-- -KT-1582 fixed-,p,https://github.com/JetBrains/intellij-community,⚠️ HTTP 404: Not Found -1bd1c6fd062d26e776cabbaa2bab0ad009453af6,Valadoc,"libvaladoc: Allow conditional spaces in headlines -",a,https://github.com/GNOME/vala/,⚠️ Could not parse repo info -6236c422615cfe33795267214077551f3d9ffa6f,camel,CAMEL-1977: Http based components should filter- out camel internal headers.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@814567 13f79535-47bb-0310-9956-ffa450edef68-,c,https://github.com/apache/camel,"diff --git a/components/camel-http/src/main/java/org/apache/camel/component/http/HttpHeaderFilterStrategy.java b/components/camel-http/src/main/java/org/apache/camel/component/http/HttpHeaderFilterStrategy.java -index a32be75c50c52..37e81e5c902f3 100644 ---- a/components/camel-http/src/main/java/org/apache/camel/component/http/HttpHeaderFilterStrategy.java -+++ b/components/camel-http/src/main/java/org/apache/camel/component/http/HttpHeaderFilterStrategy.java -@@ -45,6 +45,7 @@ protected void initialize() { - setLowerCase(true); - - // filter headers begin with ""Camel"" or ""org.apache.camel"" -- setOutFilterPattern(""(Camel|org\\.apache\\.camel)[\\.|a-z|A-z|0-9]*""); -+ // must ignore case for Http based transports -+ setOutFilterPattern(""(?i)(Camel|org\\.apache\\.camel)[\\.|a-z|A-z|0-9]*""); - } - } -diff --git a/components/camel-jetty/src/test/java/org/apache/camel/component/jetty/HttpFilterCamelHeadersTest.java b/components/camel-jetty/src/test/java/org/apache/camel/component/jetty/HttpFilterCamelHeadersTest.java -new file mode 100644 -index 0000000000000..40861c7fcc891 ---- /dev/null -+++ b/components/camel-jetty/src/test/java/org/apache/camel/component/jetty/HttpFilterCamelHeadersTest.java -@@ -0,0 +1,80 @@ -+/** -+ * Licensed to the Apache Software Foundation (ASF) under one or more -+ * contributor license agreements. See the NOTICE file distributed with -+ * this work for additional information regarding copyright ownership. -+ * The ASF licenses this file to You under the Apache License, Version 2.0 -+ * (the ""License""); you may not use this file except in compliance with -+ * the License. You may obtain a copy of the License at -+ * -+ * http://www.apache.org/licenses/LICENSE-2.0 -+ * -+ * Unless required by applicable law or agreed to in writing, software -+ * distributed under the License is distributed on an ""AS IS"" BASIS, -+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -+ * See the License for the specific language governing permissions and -+ * limitations under the License. -+ */ -+package org.apache.camel.component.jetty; -+ -+import java.util.Map; -+ -+import org.apache.camel.Exchange; -+import org.apache.camel.Processor; -+import org.apache.camel.builder.RouteBuilder; -+import org.apache.camel.impl.JndiRegistry; -+import org.apache.camel.test.junit4.CamelTestSupport; -+import org.junit.Test; -+ -+/** -+ * @version $Revision$ -+ */ -+public class HttpFilterCamelHeadersTest extends CamelTestSupport { -+ -+ @Test -+ public void testFilterCamelHeaders() throws Exception { -+ Exchange out = template.send(""http://localhost:9090/test/filter"", new Processor() { -+ public void process(Exchange exchange) throws Exception { -+ exchange.getIn().setBody(""Claus""); -+ exchange.getIn().setHeader(""bar"", 123); -+ } -+ }); -+ -+ assertNotNull(out); -+ assertEquals(""Hi Claus"", out.getOut().getBody(String.class)); -+ -+ // there should be no internal Camel headers -+ // except for the response code -+ Map headers = out.getOut().getHeaders(); -+ for (String key : headers.keySet()) { -+ if (!key.equalsIgnoreCase(Exchange.HTTP_RESPONSE_CODE)) { -+ assertTrue(""Should not contain any Camel internal headers"", !key.toLowerCase().startsWith(""camel"")); -+ } else { -+ assertEquals(200, headers.get(Exchange.HTTP_RESPONSE_CODE)); -+ } -+ } -+ } -+ -+ @Override -+ protected JndiRegistry createRegistry() throws Exception { -+ JndiRegistry jndi = super.createRegistry(); -+ jndi.bind(""foo"", new MyFooBean()); -+ return jndi; -+ } -+ -+ @Override -+ protected RouteBuilder createRouteBuilder() throws Exception { -+ return new RouteBuilder() { -+ @Override -+ public void configure() throws Exception { -+ from(""jetty:http://localhost:9090/test/filter"").beanRef(""foo""); -+ } -+ }; -+ } -+ -+ public static class MyFooBean { -+ -+ public String hello(String name) { -+ return ""Hi "" + name; -+ } -+ } -+}" -91517fad3eae6b93ded1cb16a518b1a37ec06e5c,restlet-framework-java,Fixed potential NPE when the product name is null.- Reported by Vincent Ricard.--,c,https://github.com/restlet/restlet-framework-java,"diff --git a/modules/com.noelios.restlet/src/com/noelios/restlet/Engine.java b/modules/com.noelios.restlet/src/com/noelios/restlet/Engine.java -index 99e1c21761..7dbcab4ed7 100644 ---- a/modules/com.noelios.restlet/src/com/noelios/restlet/Engine.java -+++ b/modules/com.noelios.restlet/src/com/noelios/restlet/Engine.java -@@ -582,7 +582,7 @@ public String formatUserAgent(List products) - .hasNext();) { - final Product product = iterator.next(); - if ((product.getName() == null) -- && (product.getName().length() == 0)) { -+ || (product.getName().length() == 0)) { - throw new IllegalArgumentException( - ""Product name cannot be null.""); - } -diff --git a/modules/org.restlet.gwt/src/org/restlet/gwt/internal/Engine.java b/modules/org.restlet.gwt/src/org/restlet/gwt/internal/Engine.java -index b52fa867bb..df932134a8 100644 ---- a/modules/org.restlet.gwt/src/org/restlet/gwt/internal/Engine.java -+++ b/modules/org.restlet.gwt/src/org/restlet/gwt/internal/Engine.java -@@ -208,7 +208,7 @@ public String formatUserAgent(List products) - .hasNext();) { - final Product product = iterator.next(); - if ((product.getName() == null) -- && (product.getName().length() == 0)) { -+ || (product.getName().length() == 0)) { - throw new IllegalArgumentException( - ""Product name cannot be null.""); - }" -10d01bb9da84d588f57a824ef9dc048562af2f7a,Mylyn Reviews,"bug 386204: Allow users to include subtasks in changeset view - -Refactored part and introduced a model, which allows including subtasks. - -https://bugs.eclipse.org/bugs/show_bug.cgi?id=386204 - -Change-Id: I94af1aa42624b69f280d8dca54c8020f877a9f45 -",a,https://github.com/eclipse-mylyn/org.eclipse.mylyn.reviews,"diff --git a/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/ChangesetPart.java b/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/ChangesetPart.java -index b1787d9b..2ca7ca10 100644 ---- a/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/ChangesetPart.java -+++ b/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/ChangesetPart.java -@@ -14,25 +14,37 @@ - import java.util.List; - - import org.eclipse.core.runtime.CoreException; -+import org.eclipse.core.runtime.IStatus; - import org.eclipse.core.runtime.NullProgressMonitor; -+import org.eclipse.core.runtime.Status; - import org.eclipse.jface.action.MenuManager; -+import org.eclipse.jface.action.ToolBarManager; -+import org.eclipse.jface.dialogs.IMessageProvider; - import org.eclipse.jface.viewers.ArrayContentProvider; --import org.eclipse.jface.viewers.ILabelProviderListener; --import org.eclipse.jface.viewers.ITableLabelProvider; - import org.eclipse.jface.viewers.TableViewer; - import org.eclipse.jface.viewers.TableViewerColumn; -+import org.eclipse.mylyn.internal.tasks.ui.editors.Messages; -+import org.eclipse.mylyn.internal.tasks.ui.util.TasksUiInternal; - import org.eclipse.mylyn.tasks.core.ITask; -+import org.eclipse.mylyn.tasks.core.ITaskContainer; - import org.eclipse.mylyn.tasks.ui.editors.AbstractTaskEditorPart; - import org.eclipse.mylyn.versions.core.ChangeSet; - import org.eclipse.mylyn.versions.tasks.core.IChangeSetMapping; - import org.eclipse.mylyn.versions.tasks.core.TaskChangeSet; -+import org.eclipse.mylyn.versions.tasks.ui.internal.IChangesetModel; -+import org.eclipse.mylyn.versions.tasks.ui.internal.IncludeSubTasksAction; -+import org.eclipse.mylyn.versions.tasks.ui.internal.TaskChangesetLabelProvider; -+import org.eclipse.mylyn.versions.tasks.ui.internal.TaskVersionsUiPlugin; - import org.eclipse.swt.SWT; --import org.eclipse.swt.graphics.Image; -+import org.eclipse.swt.custom.BusyIndicator; - import org.eclipse.swt.layout.FillLayout; - import org.eclipse.swt.layout.GridData; - import org.eclipse.swt.layout.GridLayout; - import org.eclipse.swt.widgets.Composite; -+import org.eclipse.swt.widgets.Display; - import org.eclipse.swt.widgets.Menu; -+import org.eclipse.ui.forms.events.HyperlinkAdapter; -+import org.eclipse.ui.forms.events.HyperlinkEvent; - import org.eclipse.ui.forms.widgets.FormToolkit; - import org.eclipse.ui.forms.widgets.Section; - -@@ -43,40 +55,8 @@ - */ - @SuppressWarnings(""restriction"") - public class ChangesetPart extends AbstractTaskEditorPart { -- private static final class TaskChangesetLabelProvider implements -- ITableLabelProvider { -- public void addListener(ILabelProviderListener listener) { -- } + @Override +- protected boolean ReadString(Resource res, String[] Props, BiomeConfig biomeConfig) throws NumberFormatException ++ public void load(List args) throws InvalidResourceException + { +- if (res.Type == ResourceType.Sapling) +- { +- if (Props[0].equals(""All"")) +- res.BlockData = -1; +- else +- res.BlockData = CheckValue(Props[0], 0, 3); - -- public void dispose() { -- } +- } else +- res.Frequency = CheckValue(Props[0], 1, 100); - -- public boolean isLabelProperty(Object element, String property) { -- return false; -- } +- ArrayList treeTypes = new ArrayList(); +- ArrayList treeChances = new ArrayList(); - -- public void removeListener(ILabelProviderListener listener) { -- } +- ArrayList customTrees = new ArrayList(); +- HashMap> Groups = new HashMap>(); - -- public Image getColumnImage(Object element, int columnIndex) { -- return null; -- } +- boolean hasCustomTreeWorld = false; +- boolean hasCustomTreeBiome = false; - -- public String getColumnText(Object element, int columnIndex) { -- TaskChangeSet cs = ((TaskChangeSet) element); -- switch (columnIndex) { -- case 0: -- return cs.getChangeset().getId(); -- case 1: -- return cs.getChangeset().getMessage(); -- case 2: -- return cs.getChangeset().getAuthor().getEmail(); -- case 3: -- return cs.getChangeset().getDate().toString(); -- } -- return element.toString() + "" "" + columnIndex; -- } -- } -+ private TableViewer table; -+ private ChangesetModel model = new ChangesetModel(); - - public ChangesetPart() { - setPartName(""Changeset""); -@@ -87,7 +67,7 @@ public ChangesetPart() { - public void createControl(Composite parent, FormToolkit toolkit) { - Section createSection = createSection(parent, toolkit); - Composite composite = createContentComposite(toolkit, createSection); -- -+ - createTable(composite); - } +- for (int index = 1; (index + 1) < Props.length; index += 2) +- { +- String tree = Props[index]; +- boolean defaultTreeFound = false; +- for (TreeType type : TreeType.values()) +- { +- if (type == TreeType.CustomTree || type == TreeType.CustomTreeWorld || type == TreeType.CustomTreeBiome) +- continue; +- +- if (type.name().equals(tree)) +- { +- defaultTreeFound = true; +- +- treeTypes.add(type); +- treeChances.add(CheckValue(Props[index + 1], 0, 100)); +- break; +- } +- } +- if (defaultTreeFound) +- continue; +- +- // Check custom objects +- +- if (tree.equals(BODefaultValues.BO_Use_World.stringValue())) +- { +- treeTypes.add(TreeType.CustomTreeWorld); +- treeChances.add(CheckValue(Props[index + 1], 0, 100)); +- hasCustomTreeWorld = true; +- continue; +- } +- +- if (tree.equals(BODefaultValues.BO_Use_Biome.stringValue())) +- { +- treeTypes.add(TreeType.CustomTreeBiome); +- treeChances.add(CheckValue(Props[index + 1], 0, 100)); +- hasCustomTreeBiome = true; +- continue; +- } +- +- CustomObjectCompiled obj = ObjectsStore.CompileString(tree, biomeConfig.worldConfig.CustomObjectsDirectory); +- if (obj == null) +- obj = ObjectsStore.CompileString(tree, ObjectsStore.GlobalDirectory); +- if (obj != null) +- { +- customTrees.add(obj); +- treeTypes.add(TreeType.CustomTree); +- treeChances.add(CheckValue(Props[index + 1], 0, 100)); ++ assureSize(3, args); -@@ -113,7 +93,7 @@ private Section createSection(Composite parent, FormToolkit toolkit) { - } +- if (!obj.GroupId.equals("""")) +- { +- if (!Groups.containsKey(obj.GroupId)) +- Groups.put(obj.GroupId, new ArrayList()); ++ frequency = getInt(args.get(0), 1, 100); - private void createTable(Composite composite) { -- TableViewer table = new TableViewer(composite); -+ table = new TableViewer(composite); - table.getTable().setLinesVisible(true); - table.getTable().setHeaderVisible(true); - addColumn(table, ""Id""); -@@ -122,11 +102,22 @@ private void createTable(Composite composite) { - addColumn(table, ""Date""); - table.setContentProvider(ArrayContentProvider.getInstance()); - table.setLabelProvider(new TaskChangesetLabelProvider()); -- table.setInput(getInput()); -+ refreshInput(); -+ registerContextMenu(table); -+ } -+ -+ @Override -+ protected void fillToolBar(ToolBarManager toolBarManager) { -+ super.fillToolBar(toolBarManager); -+ toolBarManager.add(new IncludeSubTasksAction(model)); -+ } -+ -+ private void registerContextMenu(TableViewer table) { - MenuManager menuManager = new MenuManager(); - menuManager.setRemoveAllWhenShown(true); - getTaskEditorPage().getEditorSite().registerContextMenu( -- ""org.eclipse.mylyn.versions.changesets"", menuManager, table, true); -+ ""org.eclipse.mylyn.versions.changesets"", menuManager, table, -+ true); - Menu menu = menuManager.createContextMenu(table.getControl()); - table.getTable().setMenu(menu); - } -@@ -138,40 +129,88 @@ private void addColumn(TableViewer table, String name) { - tableViewerColumn.getColumn().setWidth(100); - } +- Groups.get(obj.GroupId).add(obj); +- +- } +- } ++ trees = new ArrayList(); ++ treeNames = new ArrayList(); ++ treeChances = new ArrayList(); -- private List getInput() { -- int score = Integer.MIN_VALUE; -+ private AbstractChangesetMappingProvider determineBestProvider( -+ final ITask task) { - AbstractChangesetMappingProvider bestProvider = null; -- final ITask task = getModel().getTask(); - -+ int score = Integer.MIN_VALUE; - for (AbstractChangesetMappingProvider mappingProvider : TaskChangesetUtil - .getMappingProviders()) { -- if (score < mappingProvider.getScoreFor(task)) -- ; -- { -+ if (score < mappingProvider.getScoreFor(task)) { - bestProvider = mappingProvider; - } - } -- final List changesets = new ArrayList(); -- try { -+ return bestProvider; -+ } +- } +- if (treeChances.size() == 0) +- return false; +- +- +- if (hasCustomTreeBiome) ++ for (int i = 1; i < args.size() - 1; i += 2) + { +- ArrayList customTreesBiome = new ArrayList(); +- for (CustomObjectCompiled objectCompiled : biomeConfig.CustomObjectsCompiled) ++ CustomObject object = TerrainControl.getCustomObjectManager().getObjectFromString(args.get(i), worldConfig); ++ if (object == null) + { +- if (!objectCompiled.Tree) +- continue; +- customTreesBiome.add(objectCompiled); +- if (!objectCompiled.GroupId.equals("""")) +- { +- if (!Groups.containsKey(objectCompiled.GroupId)) +- Groups.put(objectCompiled.GroupId, new ArrayList()); +- +- Groups.get(objectCompiled.GroupId).add(objectCompiled); +- +- } +- ++ throw new InvalidResourceException(""Custom object "" + args.get(i) + "" not found!""); + } +- +- res.CUObjectsBiome = customTreesBiome.toArray(res.CUObjectsBiome); +- +- } +- +- if (hasCustomTreeWorld) +- { +- ArrayList customTreesWorld = new ArrayList(); +- for (CustomObjectCompiled objectCompiled : biomeConfig.worldConfig.CustomObjectsCompiled) ++ if(!object.canSpawnAsTree()) + { +- if (objectCompiled.CheckBiome(biomeConfig.Name)) +- { +- if (!objectCompiled.Tree) +- continue; +- customTreesWorld.add(objectCompiled); +- if (!objectCompiled.GroupId.equals("""")) +- { +- if (!Groups.containsKey(objectCompiled.GroupId)) +- Groups.put(objectCompiled.GroupId, new ArrayList()); +- +- Groups.get(objectCompiled.GroupId).add(objectCompiled); +- +- } +- } +- ++ throw new InvalidResourceException(""Custom object "" + args.get(i) + "" is not a tree!""); + } +- +- res.CUObjectsWorld = customTreesWorld.toArray(res.CUObjectsBiome); +- ++ trees.add(object); ++ treeNames.add(args.get(i)); ++ treeChances.add(getInt(args.get(i + 1), 1, 100)); + } ++ } -- IChangeSetMapping changesetsMapping = new IChangeSetMapping() { -+ private IChangeSetMapping createChangeSetMapping(final ITask task, -+ final List changesets) { -+ return new IChangeSetMapping() { +- for (CustomObjectCompiled objectCompiled : res.CUObjectsBiome) +- if (Groups.containsKey(objectCompiled.GroupId)) +- objectCompiled.GroupObjects = Groups.get(objectCompiled.GroupId).toArray(objectCompiled.GroupObjects); +- +- for (CustomObjectCompiled objectCompiled : res.CUObjectsWorld) +- if (Groups.containsKey(objectCompiled.GroupId)) +- objectCompiled.GroupObjects = Groups.get(objectCompiled.GroupId).toArray(objectCompiled.GroupObjects); +- +- for (CustomObjectCompiled objectCompiled : customTrees) +- if (Groups.containsKey(objectCompiled.GroupId)) +- objectCompiled.GroupObjects = Groups.get(objectCompiled.GroupId).toArray(objectCompiled.GroupObjects); +- +- Groups.clear(); +- +- +- res.TreeTypes = new TreeType[treeChances.size()]; +- res.TreeChances = new int[treeChances.size()]; +- res.CUObjects = new CustomObjectCompiled[treeChances.size()]; +- res.CUObjectsNames = new String[treeChances.size()]; ++ @Override ++ public ResourceType getType() ++ { ++ return ResourceType.biomeConfigResource; ++ } -- public ITask getTask() { -- return task; -- } -+ public ITask getTask() { -+ return task; -+ } -+ -+ public void addChangeSet(ChangeSet changeset) { -+ changesets.add(new TaskChangeSet(task, changeset)); -+ } -+ }; -+ } -+ -+ private void refreshInput() { -+ table.setInput(model.getInput()); -+ } -+ -+ private class ChangesetModel implements IChangesetModel { -+ -+ private boolean includeSubTasks; +- int customIndex = 0; +- for (int t = 0; t < treeTypes.size(); t++) ++ @Override ++ public String makeString() ++ { ++ String output = ""Tree("" + frequency; ++ for (int i = 0; i < treeNames.size(); i++) + { +- res.TreeTypes[t] = treeTypes.get(t); +- res.TreeChances[t] = treeChances.get(t); +- if (treeTypes.get(t) == TreeType.CustomTree) +- res.CUObjects[t] = customTrees.get(customIndex++); ++ output += "","" + treeNames.get(i) + "","" + treeChances.get(i); + } +- +- return true; ++ return output + "")""; + } -- public void addChangeSet(ChangeSet changeset) { -- changesets.add(new TaskChangeSet(task, changeset)); -+ public boolean isIncludeSubTasks() { -+ return includeSubTasks; -+ } -+ -+ public void setIncludeSubTasks(boolean includeSubTasks) { -+ boolean isChanged = this.includeSubTasks ^ includeSubTasks; -+ this.includeSubTasks = includeSubTasks; -+ if (isChanged) { -+ refreshInput(); -+ } -+ } -+ -+ public List getInput() { -+ final ITask task = getModel().getTask(); -+ -+ AbstractChangesetMappingProvider bestProvider = determineBestProvider(task); -+ final List changesets = new ArrayList(); -+ -+ final List changesetsMapping = new ArrayList(); -+ changesetsMapping.add(createChangeSetMapping(task, changesets)); -+ ; -+ if (includeSubTasks) { -+ if (task instanceof ITaskContainer) { -+ ITaskContainer taskContainer = (ITaskContainer) task; -+ for (ITask subTask : taskContainer.getChildren()) { -+ changesetsMapping.add(createChangeSetMapping(subTask, -+ changesets)); -+ } - } -- }; -- // FIXME progress monitor -- bestProvider.getChangesetsForTask(changesetsMapping, -- new NullProgressMonitor()); -- } catch (CoreException e) { -- // FIXME Auto-generated catch block -- e.printStackTrace(); -+ } -+ final AbstractChangesetMappingProvider provider = bestProvider; -+ BusyIndicator.showWhile(Display.getDefault(), new Runnable() { -+ -+ public void run() { -+ try { -+ for (IChangeSetMapping csm : changesetsMapping) { -+ provider.getChangesetsForTask(csm, -+ new NullProgressMonitor()); -+ } -+ } catch (CoreException e) { -+ getTaskEditorPage().getTaskEditor().setMessage(""An exception occurred "" + e.getMessage(), IMessageProvider.ERROR); -+ } -+ } -+ -+ }); -+ -+ return changesets; - } -- return changesets; - } + @Override +- protected String WriteString(Resource res, String blockSources) ++ public void spawn(LocalWorld world, Random random, int x, int z) + { +- String output; +- if (res.Type == ResourceType.Sapling) +- { +- if (res.BlockData == -1) +- output = ""All""; +- else +- output = """" + res.BlockData; - +- } else +- output = String.valueOf(res.Frequency); +- for (int i = 0; i < res.TreeChances.length; i++) +- { +- output += "",""; +- +- if (res.TreeTypes[i] == TreeType.CustomTreeWorld) +- output += BODefaultValues.BO_Use_World.stringValue() + "","" + res.TreeChances[i]; +- else if (res.TreeTypes[i] == TreeType.CustomTreeBiome) +- output += BODefaultValues.BO_Use_Biome.stringValue() + "","" + res.TreeChances[i]; +- else if (res.TreeTypes[i] == TreeType.CustomTree) +- output += res.CUObjects[i].Name + (res.CUObjects[i].ChangedSettings.equals("""") ? """" : (""("" + res.CUObjects[i].ChangedSettings + "")"")) + "","" + res.TreeChances[i]; +- else +- output += res.TreeTypes[i].name() + "","" + res.TreeChances[i]; +- } +- return output; ++ // Left blank, as process() already handles this + } + } +diff --git a/common/src/com/khorn/terraincontrol/generator/resourcegens/TreeType.java b/common/src/com/khorn/terraincontrol/generator/resourcegens/TreeType.java +index 4cf9d4f78..32271c4de 100644 +--- a/common/src/com/khorn/terraincontrol/generator/resourcegens/TreeType.java ++++ b/common/src/com/khorn/terraincontrol/generator/resourcegens/TreeType.java +@@ -2,17 +2,5 @@ + + public enum TreeType + { +- Tree, +- BigTree, +- Forest, +- HugeMushroom, +- SwampTree, +- Taiga1, +- Taiga2, +- JungleTree, +- GroundBush, +- CocoaTree, +- CustomTree, +- CustomTreeWorld, +- CustomTreeBiome ++ Tree, BigTree, Forest, HugeMushroom, SwampTree, Taiga1, Taiga2, JungleTree, GroundBush, CocoaTree, CustomTree, CustomTreeWorld, CustomTreeBiome } -diff --git a/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/internal/IChangesetModel.java b/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/internal/IChangesetModel.java -new file mode 100644 -index 00000000..8839451c ---- /dev/null -+++ b/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/internal/IChangesetModel.java -@@ -0,0 +1,18 @@ -+/******************************************************************************* -+ * Copyright (c) 2012 Research Group for Industrial Software (INSO), Vienna University of Technology. -+ * All rights reserved. This program and the accompanying materials -+ * are made available under the terms of the Eclipse Public License v1.0 -+ * which accompanies this distribution, and is available at -+ * http://www.eclipse.org/legal/epl-v10.html -+ * -+ * Contributors: -+ * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation -+ *******************************************************************************/ -+package org.eclipse.mylyn.versions.tasks.ui.internal; -+ -+/** -+ * @author Kilian Matt -+ */ -+public interface IChangesetModel { -+ public void setIncludeSubTasks(boolean includeSubTasks); -+} \ No newline at end of file -diff --git a/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/internal/IncludeSubTasksAction.java b/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/internal/IncludeSubTasksAction.java -new file mode 100644 -index 00000000..1ae383c8 ---- /dev/null -+++ b/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/internal/IncludeSubTasksAction.java -@@ -0,0 +1,37 @@ -+/******************************************************************************* -+ * Copyright (c) 2012 Research Group for Industrial Software (INSO), Vienna University of Technology. -+ * All rights reserved. This program and the accompanying materials -+ * are made available under the terms of the Eclipse Public License v1.0 -+ * which accompanies this distribution, and is available at -+ * http://www.eclipse.org/legal/epl-v10.html -+ * -+ * Contributors: -+ * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation -+ *******************************************************************************/ -+package org.eclipse.mylyn.versions.tasks.ui.internal; -+ -+import org.eclipse.jface.action.Action; -+import org.eclipse.mylyn.tasks.ui.TasksUiImages; -+import org.eclipse.swt.widgets.Event; -+ -+/** -+ * @author Kilian Matt -+ */ -+public class IncludeSubTasksAction extends Action { -+ private IChangesetModel model; -+ -+ public IncludeSubTasksAction(IChangesetModel model) { -+ super(""Include subtasks"",AS_CHECK_BOX); -+ setImageDescriptor(TasksUiImages.TASK_NEW_SUB); -+ this.model = model; -+ } -+ -+ public void run() { -+ model.setIncludeSubTasks(isChecked()); -+ } -+ -+ public void runWithEvent(Event event) { -+ model.setIncludeSubTasks(isChecked()); -+ } -+ -+} -diff --git a/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/internal/TaskChangesetLabelProvider.java b/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/internal/TaskChangesetLabelProvider.java -new file mode 100644 -index 00000000..966afc90 ---- /dev/null -+++ b/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/internal/TaskChangesetLabelProvider.java -@@ -0,0 +1,53 @@ -+/******************************************************************************* -+ * Copyright (c) 2012 Research Group for Industrial Software (INSO), Vienna University of Technology. -+ * All rights reserved. This program and the accompanying materials -+ * are made available under the terms of the Eclipse Public License v1.0 -+ * which accompanies this distribution, and is available at -+ * http://www.eclipse.org/legal/epl-v10.html -+ * -+ * Contributors: -+ * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation -+ *******************************************************************************/ -+package org.eclipse.mylyn.versions.tasks.ui.internal; -+ -+import org.eclipse.jface.viewers.ILabelProviderListener; -+import org.eclipse.jface.viewers.ITableLabelProvider; -+import org.eclipse.mylyn.versions.tasks.core.TaskChangeSet; -+import org.eclipse.swt.graphics.Image; -+ -+/** -+ * @author Kilian Matt -+ */ -+public class TaskChangesetLabelProvider implements ITableLabelProvider { -+ public void addListener(ILabelProviderListener listener) { -+ } -+ -+ public void dispose() { -+ } -+ -+ public boolean isLabelProperty(Object element, String property) { -+ return false; -+ } -+ -+ public void removeListener(ILabelProviderListener listener) { -+ } -+ -+ public Image getColumnImage(Object element, int columnIndex) { -+ return null; -+ } +diff --git a/common/src/com/khorn/terraincontrol/generator/resourcegens/UnderWaterOreGen.java b/common/src/com/khorn/terraincontrol/generator/resourcegens/UnderWaterOreGen.java +index 02506b868..0a3c2daa2 100644 +--- a/common/src/com/khorn/terraincontrol/generator/resourcegens/UnderWaterOreGen.java ++++ b/common/src/com/khorn/terraincontrol/generator/resourcegens/UnderWaterOreGen.java +@@ -1,36 +1,43 @@ + package com.khorn.terraincontrol.generator.resourcegens; + ++import java.util.ArrayList; ++import java.util.List; ++import java.util.Random; + -+ public String getColumnText(Object element, int columnIndex) { -+ TaskChangeSet cs = ((TaskChangeSet) element); -+ switch (columnIndex) { -+ case 0: -+ return cs.getChangeset().getId(); -+ case 1: -+ return cs.getChangeset().getMessage(); -+ case 2: -+ return cs.getChangeset().getAuthor().getEmail(); -+ case 3: -+ return cs.getChangeset().getDate().toString(); -+ } -+ return element.toString() + "" "" + columnIndex; -+ } -+}" -59f1ae7b2b15776314059123e26dc1563ca064c8,Vala,"tracker-indexer-module-1.0: regenerate -",a,https://github.com/GNOME/vala/,⚠️ Could not parse repo info -ce554e2810317d96078e68b4ab9379efe4c8db61,Vala,"vapigen: Improve support for type_arguments - -Fixes bug 609693. -",a,https://github.com/GNOME/vala/,⚠️ Could not parse repo info -3c4d64750b9fba05c95bb6cc359fe2e6fac9127f,brandonborkholder$glg2d,"Brand new functionality that repaints only child components, not the entire scene.",p,https://github.com/brandonborkholder/glg2d,"diff --git a/src/main/java/glg2d/G2DGLCanvas.java b/src/main/java/glg2d/G2DGLCanvas.java -index 5c74a7d7..5da5cfac 100644 ---- a/src/main/java/glg2d/G2DGLCanvas.java -+++ b/src/main/java/glg2d/G2DGLCanvas.java -@@ -21,14 +21,15 @@ - import java.awt.Dimension; - import java.awt.Graphics; - import java.awt.LayoutManager2; -+import java.awt.Rectangle; - import java.io.Serializable; -+import java.util.Map; + import com.khorn.terraincontrol.LocalWorld; +-import com.khorn.terraincontrol.configuration.BiomeConfig; + import com.khorn.terraincontrol.configuration.Resource; ++import com.khorn.terraincontrol.exception.InvalidResourceException; - import javax.media.opengl.GLAutoDrawable; - import javax.media.opengl.GLCanvas; - import javax.media.opengl.GLCapabilities; - import javax.media.opengl.GLContext; - import javax.media.opengl.GLDrawableFactory; --import javax.media.opengl.GLEventListener; - import javax.media.opengl.GLJPanel; - import javax.media.opengl.GLPbuffer; - import javax.media.opengl.Threading; -@@ -41,17 +42,19 @@ public class G2DGLCanvas extends JComponent { +-import java.util.Random; +- +-public class UnderWaterOreGen extends ResourceGenBase ++public class UnderWaterOreGen extends Resource + { ++ private int blockId; ++ private List sourceBlocks; ++ private int size; ++ private int blockData; ++ + @Override +- protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, int z) ++ public void spawn(LocalWorld world, Random rand, int x, int z) + { + int y = world.getSolidHeight(x, z); + if (world.getLiquidHeight(x, z) < y || y == -1) + return; - protected GLAutoDrawable canvas; +- int i = rand.nextInt(res.MaxSize); +- int j = 2; +- for (int k = x - i; k <= x + i; k++) ++ int currentSize = rand.nextInt(size); ++ int two = 2; ++ for (int k = x - currentSize; k <= x + currentSize; k++) + { +- for (int m = z - i; m <= z + i; m++) ++ for (int m = z - currentSize; m <= z + currentSize; m++) + { + int n = k - x; + int i1 = m - z; +- if (n * n + i1 * i1 <= i * i) ++ if (n * n + i1 * i1 <= currentSize * currentSize) + { +- for (int i2 = y - j; i2 <= y + j; i2++) ++ for (int i2 = y - two; i2 <= y + two; i2++) + { + int i3 = world.getTypeId(k, i2, m); +- if (res.CheckSourceId(i3)) ++ if (sourceBlocks.contains(i3)) + { +- world.setBlock(k, i2, m, res.BlockId, 0, false, false, false); ++ world.setBlock(k, i2, m, blockId, blockData, false, false, false); + } + } + } +@@ -39,25 +46,30 @@ protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, + } -- protected boolean drawGL = true; -+ protected boolean drawGL; + @Override +- protected boolean ReadString(Resource res, String[] Props, BiomeConfig biomeConfig) throws NumberFormatException ++ public void load(List args) throws InvalidResourceException + { ++ assureSize(5, args); ++ blockId = getBlockId(args.get(0)); ++ blockData = getBlockData(args.get(0)); ++ size = getInt(args.get(1), 1, 8); ++ frequency = getInt(args.get(2), 1, 100); ++ rarity = getInt(args.get(3), 1, 100); ++ sourceBlocks = new ArrayList(); ++ for (int i = 4; i < args.size(); i++) ++ { ++ sourceBlocks.add(getBlockId(args.get(i))); ++ } ++ } - /** - * @see #removeNotify() - */ - protected GLPbuffer sideContext; +- res.BlockId = CheckBlock(Props[0]); +- res.MaxSize = CheckValue(Props[1], 1, 8); +- res.Frequency = CheckValue(Props[2], 1, 100); +- res.Rarity = CheckValue(Props[3], 0, 100); +- +- +- res.SourceBlockId = new int[Props.length - 4]; +- for (int i = 4; i < Props.length; i++) +- res.SourceBlockId[i - 4] = CheckBlock(Props[i]); +- +- return true; ++ @Override ++ public ResourceType getType() ++ { ++ return ResourceType.biomeConfigResource; + } -- protected GLEventListener g2dglListener; -+ protected G2DGLEventListener g2dglListener; + @Override +- protected String WriteString(Resource res, String blockSources) ++ public String makeString() + { +- return res.BlockIdToName(res.BlockId) + "","" + res.MaxSize + "","" + res.Frequency + "","" + res.Rarity + blockSources; ++ return ""UnderWaterOre("" + makeMaterial(blockId, blockData) + "","" + size + "","" + frequency + "","" + rarity + makeMaterial(sourceBlocks) + "")""; + } + } +diff --git a/common/src/com/khorn/terraincontrol/generator/resourcegens/UndergroundLakeGen.java b/common/src/com/khorn/terraincontrol/generator/resourcegens/UndergroundLakeGen.java +index 4ce19ea1d..61b3e417f 100644 +--- a/common/src/com/khorn/terraincontrol/generator/resourcegens/UndergroundLakeGen.java ++++ b/common/src/com/khorn/terraincontrol/generator/resourcegens/UndergroundLakeGen.java +@@ -1,23 +1,30 @@ + package com.khorn.terraincontrol.generator.resourcegens; + +-import com.khorn.terraincontrol.configuration.BiomeConfig; + import com.khorn.terraincontrol.configuration.Resource; ++import com.khorn.terraincontrol.exception.InvalidResourceException; + import com.khorn.terraincontrol.DefaultMaterial; + import com.khorn.terraincontrol.LocalWorld; ++import com.khorn.terraincontrol.TerrainControl; + import com.khorn.terraincontrol.util.MathHelper; - protected JComponent drawableComponent; ++import java.util.List; + import java.util.Random; -+ GLGraphics2D g2d; +-public class UndergroundLakeGen extends ResourceGenBase ++public class UndergroundLakeGen extends Resource + { ++ private int minSize; ++ private int maxSize; ++ private int minAltitude; ++ private int maxAltitude; + - public static GLCapabilities getDefaultCapabalities() { - GLCapabilities caps = new GLCapabilities(); - caps.setRedBits(8); -@@ -78,6 +81,7 @@ public G2DGLCanvas(GLCapabilities capabilities) { - add((Component) canvas); - - RepaintManager.setCurrentManager(GLAwareRepaintManager.INSTANCE); -+ setGLDrawing(true); - } - - public G2DGLCanvas(JComponent drawableComponent) { -@@ -120,6 +124,7 @@ public boolean isGLDrawing() { - public void setGLDrawing(boolean drawGL) { - this.drawGL = drawGL; - ((Component) canvas).setVisible(drawGL); -+ setOpaque(drawGL); - repaint(); - } + @Override +- protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, int z) ++ public void spawn(LocalWorld world, Random rand, int x, int z) + { +- int y = rand.nextInt(res.MaxAltitude - res.MinAltitude) + res.MinAltitude; ++ int y = rand.nextInt(maxAltitude - minAltitude) + minAltitude; -@@ -130,6 +135,9 @@ public void setDrawableComponent(JComponent component) { + if (y >= world.getHighestBlockYAt(x, z)) + return; +- int size = rand.nextInt(res.MaxSize - res.MinSize) + res.MinSize; ++ int size = rand.nextInt(maxSize - minSize) + minSize; + + float mPi = rand.nextFloat() * 3.141593F; + +@@ -54,29 +61,34 @@ protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, + int uBlock = world.getTypeId(xLake, yLake - 1, zLake); + if (uBlock != 0) // not air + world.setBlock(xLake, yLake, zLake, DefaultMaterial.WATER.id, 0, false, false, false); +- else // Air block ++ else ++ // Air block + world.setBlock(xLake, yLake, zLake, 0, 0, false, false, false); + } + } + } - if (g2dglListener != null) { - canvas.removeGLEventListener(g2dglListener); -+ if (sideContext != null) { -+ sideContext.removeGLEventListener(g2dglListener); -+ } + @Override +- protected boolean ReadString(Resource res, String[] Props, BiomeConfig biomeConfig) throws NumberFormatException ++ public void load(List args) throws InvalidResourceException + { ++ assureSize(6, args); ++ minSize = getInt(args.get(0), 1, 25); ++ maxSize = getInt(args.get(1), minSize, 60); ++ frequency = getInt(args.get(2), 1, 100); ++ rarity = getInt(args.get(3), 1, 100); ++ minAltitude = getInt(args.get(4), TerrainControl.worldDepth, TerrainControl.worldHeight); ++ maxAltitude = getInt(args.get(5), minAltitude + 1, TerrainControl.worldHeight); ++ } + +- res.MinSize = CheckValue(Props[0], 1, 25); +- res.MaxSize = CheckValue(Props[1], 1, 60, res.MinSize); +- res.Frequency = CheckValue(Props[2], 1, 100); +- res.Rarity = CheckValue(Props[3], 0, 100); +- res.MinAltitude = CheckValue(Props[4], 0, biomeConfig.worldConfig.WorldHeight); +- res.MaxAltitude = CheckValue(Props[5], 0, biomeConfig.worldConfig.WorldHeight, res.MinAltitude); +- +- return true; ++ @Override ++ public ResourceType getType() ++ { ++ return ResourceType.biomeConfigResource; } - if (drawableComponent != null) { -@@ -140,6 +148,10 @@ public void setDrawableComponent(JComponent component) { - if (drawableComponent != null) { - g2dglListener = createG2DListener(drawableComponent); - canvas.addGLEventListener(g2dglListener); -+ if (sideContext != null) { -+ sideContext.addGLEventListener(g2dglListener); -+ } + @Override +- protected String WriteString(Resource res, String blockSources) ++ public String makeString() + { +- return res.MinSize + "","" + res.MaxSize + "","" + res.Frequency + "","" + res.Rarity + "","" + res.MinAltitude + "","" + res.MaxAltitude; ++ return ""UnderGroundLake("" + minSize + "","" + maxSize + "","" + frequency + "","" + rarity + "","" + minAltitude + "","" + maxAltitude + "")""; + } + } +diff --git a/common/src/com/khorn/terraincontrol/generator/resourcegens/VinesGen.java b/common/src/com/khorn/terraincontrol/generator/resourcegens/VinesGen.java +index d921efc66..73328bc50 100644 +--- a/common/src/com/khorn/terraincontrol/generator/resourcegens/VinesGen.java ++++ b/common/src/com/khorn/terraincontrol/generator/resourcegens/VinesGen.java +@@ -1,22 +1,27 @@ + package com.khorn.terraincontrol.generator.resourcegens; + +-import com.khorn.terraincontrol.configuration.BiomeConfig; +-import com.khorn.terraincontrol.configuration.Resource; ++import java.util.List; ++import java.util.Random; + - add(drawableComponent); + import com.khorn.terraincontrol.DefaultMaterial; + import com.khorn.terraincontrol.LocalWorld; ++import com.khorn.terraincontrol.TerrainControl; ++import com.khorn.terraincontrol.configuration.Resource; ++import com.khorn.terraincontrol.exception.InvalidResourceException; - forceViewportToNativeDraw(drawableComponent); -@@ -150,7 +162,7 @@ public void setDrawableComponent(JComponent component) { - * Creates the GLEventListener that will draw the given component to the - * canvas. - */ -- protected GLEventListener createG2DListener(JComponent drawingComponent) { -+ protected G2DGLEventListener createG2DListener(JComponent drawingComponent) { - return new G2DGLEventListener(drawingComponent); - } +-import java.util.Random; +- +-public class VinesGen extends ResourceGenBase ++public class VinesGen extends Resource + { ++ private int minAltitude; ++ private int maxAltitude; ++ + @Override +- protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, int z) ++ public void spawn(LocalWorld world, Random rand, int x, int z) + { + int _x = x; + int _z = z; +- int y = res.MinAltitude; ++ int y = minAltitude; -@@ -213,8 +225,12 @@ public void run() { +- while (y < res.MaxAltitude) ++ while (y < maxAltitude) + { + if (world.isEmpty(_x, y, _z)) + { +@@ -36,51 +41,54 @@ protected void SpawnResource(LocalWorld world, Random rand, Resource res, int x, - @Override - public void paint(Graphics g) { -- if (drawGL && drawableComponent != null) { -- canvas.display(); -+ if (drawGL && drawableComponent != null && canvas != null) { -+ if (g2d == null) { -+ canvas.display(); -+ } else { -+ drawableComponent.paint(g2d); -+ } - } else { - super.paint(g); } -@@ -257,6 +273,17 @@ protected void forceViewportToNativeDraw(Container parent) { + +- + public boolean canPlace(LocalWorld world, int x, int y, int z, int paramInt4) + { + int id; + switch (paramInt4) + { +- default: +- return false; +- case 1: +- id = (world.getTypeId(x, y + 1, z)); +- break; +- case 2: +- id = (world.getTypeId(x, y, z + 1)); +- break; +- case 3: +- id = (world.getTypeId(x, y, z - 1)); +- break; +- case 5: +- id = (world.getTypeId(x - 1, y, z)); +- break; +- case 4: +- id = (world.getTypeId(x + 1, y, z)); +- break; ++ default: ++ return false; ++ case 1: ++ id = (world.getTypeId(x, y + 1, z)); ++ break; ++ case 2: ++ id = (world.getTypeId(x, y, z + 1)); ++ break; ++ case 3: ++ id = (world.getTypeId(x, y, z - 1)); ++ break; ++ case 5: ++ id = (world.getTypeId(x - 1, y, z)); ++ break; ++ case 4: ++ id = (world.getTypeId(x + 1, y, z)); ++ break; + } + return DefaultMaterial.getMaterial(id).isSolid(); } - } -+ @Override -+ public Graphics getGraphics() { -+ return g2d == null ? super.getGraphics() : g2d.create(); -+ } -+ -+ public void paintGLImmediately(Map r) { -+ g2dglListener.canvas = this; -+ g2dglListener.repaints = r; -+ canvas.display(); -+ } -+ - /** - * Implements a simple layout where all the components are the same size as - * the parent. -diff --git a/src/main/java/glg2d/G2DGLEventListener.java b/src/main/java/glg2d/G2DGLEventListener.java -index a65247f6..cf64d05d 100644 ---- a/src/main/java/glg2d/G2DGLEventListener.java -+++ b/src/main/java/glg2d/G2DGLEventListener.java -@@ -17,11 +17,15 @@ - package glg2d; +- +- public static final int[] d = {-1, -1, 2, 0, 1, 3}; +- public static final int[] OPPOSITE_FACING = {1, 0, 3, 2, 5, 4}; ++ public static final int[] d = { -1, -1, 2, 0, 1, 3 }; ++ public static final int[] OPPOSITE_FACING = { 1, 0, 3, 2, 5, 4 }; - import java.awt.Component; -+import java.awt.Rectangle; -+import java.util.Map; -+import java.util.Map.Entry; + @Override +- protected boolean ReadString(Resource res, String[] Props, BiomeConfig biomeConfig) throws NumberFormatException ++ public void load(List args) throws InvalidResourceException + { +- res.Frequency = CheckValue(Props[0], 1, 100); +- res.Rarity = CheckValue(Props[1], 0, 100); +- res.MinAltitude = CheckValue(Props[2], 0, biomeConfig.worldConfig.WorldHeight); +- res.MaxAltitude = CheckValue(Props[3], 0, biomeConfig.worldConfig.WorldHeight, res.MinAltitude); ++ assureSize(4, args); ++ frequency = getInt(args.get(0), 1, 100); ++ rarity = getInt(args.get(1), 1, 100); ++ minAltitude = getInt(args.get(2), TerrainControl.worldDepth, TerrainControl.worldHeight); ++ maxAltitude = getInt(args.get(3), minAltitude + 1, TerrainControl.worldHeight); ++ } - import javax.media.opengl.GL; - import javax.media.opengl.GLAutoDrawable; - import javax.media.opengl.GLContext; - import javax.media.opengl.GLEventListener; -+import javax.swing.JComponent; - import javax.swing.RepaintManager; - - /** -@@ -32,6 +36,10 @@ public class G2DGLEventListener implements GLEventListener { - - protected Component baseComponent; +- return true; ++ @Override ++ public ResourceType getType() ++ { ++ return ResourceType.biomeConfigResource; + } -+ Map repaints; -+ -+ G2DGLCanvas canvas; -+ - /** - * Creates a new listener that will paint using the {@code GLGraphics2D} - * object on each call to {@link #display(GLAutoDrawable)}. The provided -@@ -130,10 +138,30 @@ protected void paintGL(GLGraphics2D g2d) { - RepaintManager mgr = RepaintManager.currentManager(baseComponent); - boolean doubleBuffer = mgr.isDoubleBufferingEnabled(); - mgr.setDoubleBufferingEnabled(false); -- baseComponent.paint(g2d); -+ -+ if (isPaintingDirtyRects()) { -+ paintDirtyRects(); -+ } else { -+ baseComponent.paint(g2d); -+ } -+ - mgr.setDoubleBufferingEnabled(doubleBuffer); - } + @Override +- protected String WriteString(Resource res, String blockSources) ++ public String makeString() + { +- return res.Frequency + "","" + res.Rarity + "","" + res.MinAltitude + "","" + res.MaxAltitude; ++ return ""Vines("" + frequency + "","" + rarity + "","" + minAltitude + "","" + maxAltitude + "")""; + } + } +diff --git a/forge/src/com/khorn/terraincontrol/forge/TCPlugin.java b/forge/src/com/khorn/terraincontrol/forge/TCPlugin.java +index 8491df29c..a17b48b74 100644 +--- a/forge/src/com/khorn/terraincontrol/forge/TCPlugin.java ++++ b/forge/src/com/khorn/terraincontrol/forge/TCPlugin.java +@@ -9,7 +9,7 @@ + import com.khorn.terraincontrol.TerrainControl; + import com.khorn.terraincontrol.TerrainControlEngine; + import com.khorn.terraincontrol.configuration.TCDefaultValues; +-import com.khorn.terraincontrol.customobjects.ObjectsStore; ++import com.khorn.terraincontrol.customobjects.BODefaultValues; + import com.khorn.terraincontrol.util.Txt; + + import cpw.mods.fml.common.FMLCommonHandler; +@@ -52,8 +52,6 @@ public void load(FMLInitializationEvent event) + TerrainControl.startEngine(this); + // Register localization + LanguageRegistry.instance().addStringLocalization(""generator.TerrainControl"", ""TerrainControl""); +- // Load global custom objects +- ObjectsStore.ReadObjects(terrainControlDirectory); + // Register world type + worldType = new TCWorldType(this, 4, ""TerrainControl""); + // Register channel +@@ -98,4 +96,10 @@ public void log(Level level, String... messages) + System.out.println(""TerrainControl: "" + Txt.implode(messages, "","")); + } -+ protected boolean isPaintingDirtyRects() { -+ return repaints != null; -+ } -+ -+ protected void paintDirtyRects() { -+ canvas.g2d = g2d; -+ for (Entry entry : repaints.entrySet()) { -+ entry.getKey().paintImmediately(entry.getValue()); ++ @Override ++ public File getGlobalObjectsDirectory() ++ { ++ return new File(terrainControlDirectory, BODefaultValues.BO_GlobalDirectoryName.stringValue()); + } + -+ repaints = null; -+ canvas.g2d = null; -+ } -+ - @Override - public void init(GLAutoDrawable drawable) { - reshape(drawable, 0, 0, drawable.getWidth(), drawable.getWidth()); -diff --git a/src/main/java/glg2d/GLAwareRepaintManager.java b/src/main/java/glg2d/GLAwareRepaintManager.java -index 9251b7b4..1151bc59 100644 ---- a/src/main/java/glg2d/GLAwareRepaintManager.java -+++ b/src/main/java/glg2d/GLAwareRepaintManager.java -@@ -17,25 +17,78 @@ - package glg2d; - - import java.awt.Container; -+import java.awt.Rectangle; -+import java.util.IdentityHashMap; -+import java.util.Iterator; -+import java.util.Map; + }" +8ee465103850a3dca018273fe5952e40d5c45a66,spring-framework,Improve StringUtils.cleanPath--Issue: SPR-11793-,c,https://github.com/spring-projects/spring-framework,"diff --git a/spring-core/src/main/java/org/springframework/util/StringUtils.java b/spring-core/src/main/java/org/springframework/util/StringUtils.java +index 126cab69fcd3..b659486a19d0 100644 +--- a/spring-core/src/main/java/org/springframework/util/StringUtils.java ++++ b/spring-core/src/main/java/org/springframework/util/StringUtils.java +@@ -622,7 +622,12 @@ public static String cleanPath(String path) { + String prefix = """"; + if (prefixIndex != -1) { + prefix = pathToUse.substring(0, prefixIndex + 1); +- pathToUse = pathToUse.substring(prefixIndex + 1); ++ if (prefix.contains(""/"")) { ++ prefix = """"; ++ } ++ else { ++ pathToUse = pathToUse.substring(prefixIndex + 1); ++ } + } + if (pathToUse.startsWith(FOLDER_SEPARATOR)) { + prefix = prefix + FOLDER_SEPARATOR; +diff --git a/spring-core/src/test/java/org/springframework/util/StringUtilsTests.java b/spring-core/src/test/java/org/springframework/util/StringUtilsTests.java +index b366ed7f96d0..c362a92ed529 100644 +--- a/spring-core/src/test/java/org/springframework/util/StringUtilsTests.java ++++ b/spring-core/src/test/java/org/springframework/util/StringUtilsTests.java +@@ -299,6 +299,8 @@ public void testCleanPath() { + assertEquals(""../mypath/myfile"", StringUtils.cleanPath(""../mypath/../mypath/myfile"")); + assertEquals(""../mypath/myfile"", StringUtils.cleanPath(""mypath/../../mypath/myfile"")); + assertEquals(""/../mypath/myfile"", StringUtils.cleanPath(""/../mypath/myfile"")); ++ assertEquals(""/mypath/myfile"", StringUtils.cleanPath(""/a/:b/../../mypath/myfile"")); ++ assertEquals(""file:///c:/path/to/the%20file.txt"", StringUtils.cleanPath(""file:///c:/some/../path/to/the%20file.txt"")); + } -+import javax.media.opengl.GLAutoDrawable; - import javax.swing.JComponent; - import javax.swing.RepaintManager; -+import javax.swing.SwingUtilities; + public void testPathEquals() {" +5a273e85d77f50d49612ab7ad05b6a8b049c4bae,brandonborkholder$glg2d,"Well, realized that the fragment shader can't composite or blend. So I think this is the closest I'll get to a decent implementation of the Duff-Porter blend functionality.",p,https://github.com/brandonborkholder/glg2d,"diff --git a/src/joglg2d/JOGLG2D.java b/src/joglg2d/JOGLG2D.java +index a5479fde..ceadf90b 100644 +--- a/src/joglg2d/JOGLG2D.java ++++ b/src/joglg2d/JOGLG2D.java +@@ -221,52 +221,41 @@ public Composite getComposite() { + public void setComposite(Composite comp) { + if (comp instanceof AlphaComposite) { + switch (((AlphaComposite) comp).getRule()) { +- case AlphaComposite.CLEAR: +- gl.glBlendFunc(GL.GL_ZERO, GL.GL_ZERO); +- break; +- ++ /* ++ * Since the destination _always_ covers the entire canvas (i.e. there ++ * are always color components for every pixel), some of these ++ * composites can be collapsed into each other. They matter when Java2D ++ * is drawing into an image and the destination may not take up the ++ * entire canvas. ++ */ + case AlphaComposite.SRC: +- gl.glBlendFunc(GL.GL_ONE, GL.GL_ZERO); ++ case AlphaComposite.SRC_IN: ++ gl.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ZERO); + break; - public class GLAwareRepaintManager extends RepaintManager { - public static RepaintManager INSTANCE = new GLAwareRepaintManager(); + case AlphaComposite.SRC_OVER: +- gl.glBlendFunc(GL.GL_ONE, GL.GL_ONE_MINUS_SRC_ALPHA); +- break; +- +- case AlphaComposite.SRC_IN: +- gl.glBlendFunc(GL.GL_DST_ALPHA, GL.GL_ZERO); ++ case AlphaComposite.SRC_ATOP: ++ gl.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA); + break; -+ private Map rects = new IdentityHashMap(); -+ -+ private volatile boolean queued = false; -+ - @Override - public void addDirtyRegion(JComponent c, int x, int y, int w, int h) { -- G2DGLCanvas glDrawable = getGLParent(c); -- if (glDrawable != null) { -- super.addDirtyRegion(glDrawable, 0, 0, glDrawable.getWidth(), glDrawable.getHeight()); -- } else { -+ G2DGLCanvas canvas = getGLParent(c); -+ if (canvas == null || c instanceof GLAutoDrawable) { - super.addDirtyRegion(c, x, y, w, h); -+ } else { -+ synchronized (rects) { -+ if (!rects.containsKey(c)) { -+ rects.put(c, new Rectangle(0, 0, c.getWidth(), c.getHeight())); -+ } -+ -+ if (!queued && rects.size() > 0) { -+ queued = true; -+ queue(); -+ } -+ } -+ } -+ } -+ -+ private void queue() { -+ SwingUtilities.invokeLater(new Runnable() { -+ @Override -+ public void run() { -+ Map r; -+ synchronized (rects) { -+ r = new IdentityHashMap(rects); -+ queued = false; -+ -+ rects.clear(); -+ } -+ -+ r = filter(r); -+ G2DGLCanvas canvas = getGLParent(r.keySet().iterator().next()); -+ canvas.paintGLImmediately(r); -+ } -+ }); -+ } -+ -+ private Map filter(Map rects) { -+ Iterator itr = rects.keySet().iterator(); -+ while (itr.hasNext()) { -+ JComponent desc = itr.next(); -+ for (JComponent key : rects.keySet()) { -+ if (desc != key && SwingUtilities.isDescendingFrom(desc, key)) { -+ itr.remove(); -+ break; -+ } -+ } - } -+ -+ return rects; - } + case AlphaComposite.SRC_OUT: +- gl.glBlendFunc(GL.GL_ONE_MINUS_DST_ALPHA, GL.GL_ZERO); +- break; +- +- case AlphaComposite.SRC_ATOP: +- gl.glBlendFunc(GL.GL_DST_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA); ++ case AlphaComposite.CLEAR: ++ gl.glBlendFunc(GL.GL_ZERO, GL.GL_ZERO); + break; - protected G2DGLCanvas getGLParent(JComponent component) { -- Container c = component; -+ Container c = component.getParent(); - while (true) { - if (c == null) { - return null; -diff --git a/src/main/java/glg2d/GLGraphics2D.java b/src/main/java/glg2d/GLGraphics2D.java -index afb517bc..eb8bb001 100644 ---- a/src/main/java/glg2d/GLGraphics2D.java -+++ b/src/main/java/glg2d/GLGraphics2D.java -@@ -139,8 +139,6 @@ protected void setCanvas(GLAutoDrawable drawable) { - protected void prePaint(GLAutoDrawable drawable, Component component) { - setCanvas(drawable); - setupState(component); + case AlphaComposite.DST: +- gl.glBlendFunc(GL.GL_ZERO, GL.GL_ONE); +- break; - -- gl.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT); - } + case AlphaComposite.DST_OVER: +- gl.glBlendFunc(GL.GL_ONE_MINUS_DST_ALPHA, GL.GL_ONE); ++ gl.glBlendFunc(GL.GL_ZERO, GL.GL_ONE); + break; - protected void setupState(Component component) { -diff --git a/src/test/java/glg2d/examples/shaders/CellShaderExample.java b/src/test/java/glg2d/examples/shaders/CellShaderExample.java -index 4cf8c5ba..2adb01b4 100644 ---- a/src/test/java/glg2d/examples/shaders/CellShaderExample.java -+++ b/src/test/java/glg2d/examples/shaders/CellShaderExample.java -@@ -13,7 +13,6 @@ - import java.awt.Dimension; + case AlphaComposite.DST_IN: ++ case AlphaComposite.DST_ATOP: + gl.glBlendFunc(GL.GL_ZERO, GL.GL_SRC_ALPHA); + break; - import javax.media.opengl.GLAutoDrawable; --import javax.media.opengl.GLEventListener; - import javax.swing.JComponent; - import javax.swing.JFrame; - import javax.swing.UIManager; -@@ -26,7 +25,7 @@ public static void main(String[] args) throws Exception { - JFrame frame = new JFrame(""Cell Shader Example""); - frame.setContentPane(new G2DGLCanvas(new UIDemo()) { - @Override -- protected GLEventListener createG2DListener(JComponent drawingComponent) { -+ protected G2DGLEventListener createG2DListener(JComponent drawingComponent) { - return new G2DGLEventListener(drawingComponent) { - @Override - protected GLGraphics2D createGraphics2D(GLAutoDrawable drawable) { -diff --git a/src/test/java/glg2d/examples/shaders/DepthSimExample.java b/src/test/java/glg2d/examples/shaders/DepthSimExample.java -index 2428cbd2..db66ad4f 100644 ---- a/src/test/java/glg2d/examples/shaders/DepthSimExample.java -+++ b/src/test/java/glg2d/examples/shaders/DepthSimExample.java -@@ -11,7 +11,6 @@ - - import javax.media.opengl.GL; - import javax.media.opengl.GLAutoDrawable; --import javax.media.opengl.GLEventListener; - import javax.swing.JComponent; - import javax.swing.JFrame; - import javax.swing.Timer; -@@ -25,7 +24,7 @@ public static void main(String[] args) throws Exception { - final JFrame frame = new JFrame(""Depth Shaker Example""); - frame.setContentPane(new G2DGLCanvas(new UIDemo()) { - @Override -- protected GLEventListener createG2DListener(JComponent drawingComponent) { -+ protected G2DGLEventListener createG2DListener(JComponent drawingComponent) { - return new G2DGLEventListener(drawingComponent) { - @Override - protected GLGraphics2D createGraphics2D(GLAutoDrawable drawable) { -diff --git a/src/test/java/glg2d/examples/shaders/UIDemo.java b/src/test/java/glg2d/examples/shaders/UIDemo.java -index 54f750fd..a0ef02cd 100644 ---- a/src/test/java/glg2d/examples/shaders/UIDemo.java -+++ b/src/test/java/glg2d/examples/shaders/UIDemo.java -@@ -66,7 +66,7 @@ public UIDemo() { - - JPanel rightSubPanel = new JPanel(new BorderLayout()); - rightPanel.add(rightSubPanel, BorderLayout.CENTER); -- rightSubPanel.add(createProgressComponent(), BorderLayout.NORTH); -+// rightSubPanel.add(createProgressComponent(), BorderLayout.NORTH); - - JSplitPane rightSplit = new JSplitPane(JSplitPane.VERTICAL_SPLIT); - rightSplit.setDividerSize(10); -@@ -214,6 +214,11 @@ JComponent createListComponent() { - model.addElement(""golf""); - model.addElement(""hotel""); - model.addElement(""india""); -+ model.addElement(""juliet""); -+ model.addElement(""kilo""); -+ model.addElement(""limo""); -+ model.addElement(""mike""); -+ model.addElement(""november""); - return new JList(model); - } + case AlphaComposite.DST_OUT: +- gl.glBlendFunc(GL.GL_ZERO, GL.GL_ONE_MINUS_SRC_ALPHA); +- break; +- +- case AlphaComposite.DST_ATOP: +- gl.glBlendFunc(GL.GL_ONE_MINUS_DST_ALPHA, GL.GL_SRC_ALPHA); +- break; +- + case AlphaComposite.XOR: +- gl.glBlendFunc(GL.GL_ONE_MINUS_DST_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA); ++ gl.glBlendFunc(GL.GL_ZERO, GL.GL_ONE_MINUS_SRC_ALPHA); + break; + } -@@ -286,6 +291,7 @@ public static void main(String[] args) throws Exception { +@@ -587,10 +576,9 @@ public void fillRect(int x, int y, int width, int height) { - // frame.setContentPane(new UIDemo()); - frame.setContentPane(new G2DGLCanvas(new UIDemo())); -+// frame.setContentPane(new UIDemo().createTabComponent()); - frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); - frame.setPreferredSize(new Dimension(1024, 768)); - frame.pack();" -5f2ee6ded78a158ec352a376b7d6ee5381e70599,drools,JBRULES-233 for leaps--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@4214 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-,a,https://github.com/kiegroup/drools,"diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/LeapsTest.java b/drools-compiler/src/test/java/org/drools/integrationtests/LeapsTest.java -index 6471a4b1fd2..82960c9491d 100644 ---- a/drools-compiler/src/test/java/org/drools/integrationtests/LeapsTest.java -+++ b/drools-compiler/src/test/java/org/drools/integrationtests/LeapsTest.java -@@ -162,9 +162,4 @@ public void testXorGroups() throws Exception { - assertTrue( ""rule2"", - list.contains( ""rule2"" ) ); - } -- -- public void testLogicalAssertionsDynamicRule() throws Exception { -- // TODO FIXME -- } -- - } -diff --git a/drools-core/src/main/java/org/drools/common/AbstractWorkingMemory.java b/drools-core/src/main/java/org/drools/common/AbstractWorkingMemory.java -index d561015325a..23a3167ef02 100644 ---- a/drools-core/src/main/java/org/drools/common/AbstractWorkingMemory.java -+++ b/drools-core/src/main/java/org/drools/common/AbstractWorkingMemory.java -@@ -111,7 +111,9 @@ abstract public class AbstractWorkingMemory implements WorkingMemory, - protected long propagationIdCounter; + @Override + public void clearRect(int x, int y, int width, int height) { +- Color origColor = color; +- setColor(background); ++ setColor(gl, background); + fillRect(x, y, width, height); +- setColor(origColor); ++ setColor(gl, color); + } - private ReentrantLock lock = new ReentrantLock( ); -- -+ -+ private List factQueue = new ArrayList( ); -+ - public AbstractWorkingMemory(RuleBase ruleBase, - FactHandleFactory handleFactory) { - this.ruleBase = ruleBase; -@@ -377,8 +379,52 @@ public PrimitiveLongMap getJustified() { - return this.justified; - } + @Override +diff --git a/test/joglg2d/VisualTest.java b/test/joglg2d/VisualTest.java +index 6b704dc4..59fecb75 100644 +--- a/test/joglg2d/VisualTest.java ++++ b/test/joglg2d/VisualTest.java +@@ -396,7 +396,7 @@ public void paint(Graphics2D g2d) { + } -+ public long getNextPropagationIdCounter() { -+ return this.propagationIdCounter++; -+ } -+ - abstract public void dispose(); + @Test +- public void srcOverRuleTest() throws Exception { ++ public void compositeTest() throws Exception { + tester.setPainter(new Painter() { + @Override + public void paint(Graphics2D g2d) { +@@ -434,7 +434,6 @@ void draw(Graphics2D g2d, AlphaComposite composite, String name) { + dest.lineTo(100, 0); + dest.lineTo(100, 100); + dest.closePath(); +- g2d.setColor(g2d.getBackground()); + g2d.setComposite(AlphaComposite.SrcOver); + g2d.setColor(new Color(255, 0, 0, 190)); + g2d.fill(dest);" +b07575194155e5f90e3ab514812d11dd74eef75f,abashev$vfs-s3,"Added support for Server Side Encryption +- Added S3FileSystemConfigBuilder +- Refactored S3FileSystem + Object so that FileSystem encapsulates +the various options (similar to SftpFileSystem+Object) +- Added get/setServerSideEncryption as necessary +- Added integration tests +- Fixed a bug in copy file tests where file counts were not being +tested properly. +",a,https://github.com/abashev/vfs-s3,"diff --git a/src/main/java/com/intridea/io/vfs/provider/s3/S3FileObject.java b/src/main/java/com/intridea/io/vfs/provider/s3/S3FileObject.java +index 683a6dde..96572709 100644 +--- a/src/main/java/com/intridea/io/vfs/provider/s3/S3FileObject.java ++++ b/src/main/java/com/intridea/io/vfs/provider/s3/S3FileObject.java +@@ -47,18 +47,8 @@ public class S3FileObject extends AbstractFileObject { -+ public void removeLogicalDependencies(Activation activation, -+ PropagationContext context, -+ Rule rule) throws FactException { -+ org.drools.util.LinkedList list = activation.getLogicalDependencies(); -+ if ( list == null || list.isEmpty() ) { -+ return; -+ } -+ for ( LogicalDependency node = (LogicalDependency) list.getFirst(); node != null; node = (LogicalDependency) node.getNext() ) { -+ InternalFactHandle handle = (InternalFactHandle) node.getFactHandle(); -+ Set set = (Set) this.justified.get( handle.getId( ) ); -+ // check set for null because in some weird cases on logical assertion -+ // it comes back with the same activation/handle and tries on -+ // already cleaned this.justified. only happens on removal of rule -+ // from the working memory -+ if (set != null) { -+ set.remove( node ); -+ if (set.isEmpty( )) { -+ this.justified.remove( handle.getId( ) ); -+ // this needs to be scheduled so we don't upset the current -+ // working memory operation -+ this.factQueue.add( new WorkingMemoryRetractAction( handle, -+ false, -+ true, -+ context.getRuleOrigin( ), -+ context.getActivationOrigin( ) ) ); -+ } -+ } -+ } -+ } -+ -+ public void removeLogicalDependencies(FactHandle handle) throws FactException { -+ Set set = (Set) this.justified.remove( ((InternalFactHandle) handle).getId() ); -+ if ( set != null && !set.isEmpty() ) { -+ for ( Iterator it = set.iterator(); it.hasNext(); ) { -+ LogicalDependency node = (LogicalDependency) it.next(); -+ node.getJustifier().getLogicalDependencies().remove( node ); -+ } -+ } -+ } -+ - public void addLogicalDependency(FactHandle handle, - Activation activation, - PropagationContext context, -@@ -395,34 +441,12 @@ public void addLogicalDependency(FactHandle handle, - set.add( node ); - } + private static final String MIMETYPE_JETS3T_DIRECTORY = ""application/x-directory""; -- public void removeLogicalDependencies( Activation activation, -- PropagationContext context, -- Rule rule ) throws FactException { -- org.drools.util.LinkedList list = activation.getLogicalDependencies(); -- if (list == null || list.isEmpty( )) { -- return; -- } -- for (LogicalDependency node = (LogicalDependency) list.getFirst( ); node != null; node = (LogicalDependency) node.getNext( )) { -- InternalFactHandle handle = (InternalFactHandle) node.getFactHandle( ); -- Set set = (Set) this.justified.get( handle.getId( ) ); -- set.remove( node ); -- if (set.isEmpty( )) { -- this.justified.remove( handle.getId( ) ); -- retractObject( handle, -- false, -- true, -- context.getRuleOrigin( ), -- context.getActivationOrigin( ) ); -- } -- } -- } +- /** Amazon S3 service */ +- private final AWSCredentials awsCredentials; +- private final AmazonS3 service; - -- public void removeLogicalDependencies(FactHandle handle) throws FactException { -- Set set = (Set) this.justified.remove( ((InternalFactHandle) handle).getId() ); -- if ( set != null && !set.isEmpty() ) { -- for ( Iterator it = set.iterator(); it.hasNext(); ) { -- LogicalDependency node = (LogicalDependency) it.next(); -- node.getJustifier().getLogicalDependencies().remove( node ); -+ protected void propagateQueuedActions() { -+ if (!this.factQueue.isEmpty( )) { -+ for (Iterator it = this.factQueue.iterator( ); it.hasNext( );) { -+ WorkingMemoryAction action = (WorkingMemoryAction) it.next( ); -+ it.remove( ); -+ action.propagate( ); - } - } - } -@@ -431,6 +455,41 @@ public Lock getLock() { - return this.lock; - } - -+ private interface WorkingMemoryAction { -+ public void propagate(); -+ } -+ -+ private class WorkingMemoryRetractAction implements WorkingMemoryAction { -+ private InternalFactHandle factHandle; -+ private boolean removeLogical; -+ private boolean updateEqualsMap; -+ private Rule ruleOrigin; -+ private Activation activationOrigin; -+ -+ -+ -+ public WorkingMemoryRetractAction(InternalFactHandle factHandle, -+ boolean removeLogical, -+ boolean updateEqualsMap, -+ Rule ruleOrigin, -+ Activation activationOrigin) { -+ super(); -+ this.factHandle = factHandle; -+ this.removeLogical = removeLogical; -+ this.updateEqualsMap = updateEqualsMap; -+ this.ruleOrigin = ruleOrigin; -+ this.activationOrigin = activationOrigin; -+ } -+ -+ public void propagate() { -+ retractObject( this.factHandle, -+ this.removeLogical, -+ this.updateEqualsMap, -+ this.ruleOrigin, -+ this.activationOrigin ); -+ } -+ } -+ - protected static class FactStatus { - private int counter; - private String status; -diff --git a/drools-core/src/main/java/org/drools/leaps/FactTable.java b/drools-core/src/main/java/org/drools/leaps/FactTable.java -index 9da995d6a05..f57f08a5922 100644 ---- a/drools-core/src/main/java/org/drools/leaps/FactTable.java -+++ b/drools-core/src/main/java/org/drools/leaps/FactTable.java -@@ -46,7 +46,7 @@ class FactTable extends Table { - * Tuples that are either already on agenda or are very close (missing - * exists or have not facts matching) - */ -- private final LinkedList tuples; -+ private LinkedList tuples; +- private final TransferManager transferManager; +- +- /** Amazon S3 bucket */ +- private final Bucket bucket; +- + /** Amazon S3 object */ + private ObjectMetadata objectMetadata; +- + private String objectKey; /** - * initializes base LeapsTable with appropriate Comparator and positive and -@@ -67,9 +67,8 @@ public FactTable(ConflictResolver conflictResolver) { - * @param workingMemory - * @param ruleHandle - */ -- public void addRule(WorkingMemoryImpl workingMemory, -- RuleHandle ruleHandle) { -- if ( !this.rules.contains( ruleHandle ) ) { -+ public void addRule( WorkingMemoryImpl workingMemory, RuleHandle ruleHandle ) { -+ if (!this.rules.contains( ruleHandle )) { - this.rules.add( ruleHandle ); - // push facts back to stack if needed - this.checkAndAddFactsToStack( workingMemory ); -@@ -81,8 +80,18 @@ public void addRule(WorkingMemoryImpl workingMemory, - * - * @param ruleHandle +@@ -82,16 +72,9 @@ public class S3FileObject extends AbstractFileObject { */ -- public void removeRule(RuleHandle ruleHandle) { -+ public void removeRule( RuleHandle ruleHandle ) { - this.rules.remove( ruleHandle ); -+ // remove tuples that are still there -+ LinkedList list = new LinkedList( ); -+ -+ for (Iterator it = this.getTuplesIterator( ); it.hasNext( );) { -+ LeapsTuple tuple = (LeapsTuple) it.next( ); -+ if (ruleHandle.getLeapsRule( ).getRule( ) != tuple.getLeapsRule( ).getRule( )) { -+ list.add( tuple ); -+ } -+ } -+ this.tuples = list; + private Owner fileOwner; + +- public S3FileObject( +- AbstractFileName fileName, S3FileSystem fileSystem, AWSCredentials awsCredentials, AmazonS3 service, +- TransferManager transferManager, Bucket bucket +- ) throws FileSystemException { ++ public S3FileObject(AbstractFileName fileName, ++ S3FileSystem fileSystem) throws FileSystemException { + super(fileName, fileSystem); +- +- this.awsCredentials = awsCredentials; +- this.service = service; +- this.bucket = bucket; +- this.transferManager = transferManager; } - /** -diff --git a/drools-core/src/main/java/org/drools/leaps/WorkingMemoryImpl.java b/drools-core/src/main/java/org/drools/leaps/WorkingMemoryImpl.java -index f0e796876df..8a12a47303d 100644 ---- a/drools-core/src/main/java/org/drools/leaps/WorkingMemoryImpl.java -+++ b/drools-core/src/main/java/org/drools/leaps/WorkingMemoryImpl.java -@@ -74,6 +74,8 @@ class WorkingMemoryImpl extends AbstractWorkingMemory + @Override +@@ -100,7 +83,7 @@ protected void doAttach() { + try { + // Do we have file with name? + String candidateKey = getS3Key(); +- objectMetadata = service.getObjectMetadata(bucket.getName(), candidateKey); ++ objectMetadata = getService().getObjectMetadata(getBucket().getName(), candidateKey); + objectKey = candidateKey; + logger.info(""Attach file to S3 Object: "" + objectKey); - private final IdentityMap leapsRulesToHandlesMap = new IdentityMap( ); +@@ -116,7 +99,7 @@ protected void doAttach() { + try { + // Do we have folder with that name? + String candidateKey = getS3Key() + FileName.SEPARATOR; +- objectMetadata = service.getObjectMetadata(bucket.getName(), candidateKey); ++ objectMetadata = getService().getObjectMetadata(getBucket().getName(), candidateKey); + objectKey = candidateKey; + logger.info(""Attach folder to S3 Object: "" + objectKey); -+ private final IdentityMap rulesActivationsMap = new IdentityMap( ); -+ - /** - * Construct. - * -@@ -214,11 +216,11 @@ public FactHandle assertObject( Object object, - boolean logical, - Rule rule, - Activation activation ) throws FactException { -- -+ FactHandleImpl handle ; - this.getLock().lock( ); - try { - // check if the object already exists in the WM -- FactHandleImpl handle = (FactHandleImpl) this.identityMap.get( object ); -+ handle = (FactHandleImpl) this.identityMap.get( object ); +@@ -156,7 +139,7 @@ protected void doDetach() throws Exception { - // lets see if the object is already logical asserted - FactStatus logicalState = (FactStatus) this.equalsMap.get( object ); -@@ -237,6 +239,7 @@ public FactHandle assertObject( Object object, - activation, - activation.getPropagationContext( ), - rule ); -+ - return logicalState.getHandle( ); - } + @Override + protected void doDelete() throws Exception { +- service.deleteObject(bucket.getName(), objectKey); ++ getService().deleteObject(getBucket().getName(), objectKey); + } -@@ -294,7 +297,6 @@ public FactHandle assertObject( Object object, - activation, - activation.getPropagationContext( ), - rule ); -- - } + @Override +@@ -164,7 +147,7 @@ protected void doCreateFolder() throws Exception { + if (logger.isDebugEnabled()) { + logger.debug( + ""Create new folder in bucket ["" + +- ((bucket != null) ? bucket.getName() : ""null"") + ++ ((getBucket() != null) ? getBucket().getName() : ""null"") + + ""] with key ["" + + ((objectMetadata != null) ? objectKey : ""null"") + + ""]"" +@@ -178,7 +161,9 @@ protected void doCreateFolder() throws Exception { + InputStream input = new ByteArrayInputStream(new byte[0]); + ObjectMetadata metadata = new ObjectMetadata(); + metadata.setContentLength(0); +- service.putObject(new PutObjectRequest(bucket.getName(), objectKey + FileName.SEPARATOR, input, metadata)); ++ if (((S3FileSystem)getFileSystem()).getServerSideEncryption()) ++ metadata.setServerSideEncryption(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); ++ getService().putObject(new PutObjectRequest(getBucket().getName(), objectKey + FileName.SEPARATOR, input, metadata)); + } - // new leaps stack token -@@ -380,12 +382,13 @@ public FactHandle assertObject( Object object, - } - } - } -- -- return handle; -+ propagateQueuedActions( ); + @Override +@@ -228,13 +213,13 @@ protected String[] doListChildren() throws Exception { + path = path + ""/""; } - finally { -- this.getLock().unlock( ); -+ this.getLock( ).unlock( ); + +- ObjectListing listing = service.listObjects(bucket.getName(), path); ++ ObjectListing listing = getService().listObjects(getBucket().getName(), path); + final List summaries = new ArrayList(listing.getObjectSummaries()); + while (listing.isTruncated()) { + final ListObjectsRequest loReq = new ListObjectsRequest(); +- loReq.setBucketName(bucket.getName()); ++ loReq.setBucketName(getBucket().getName()); + loReq.setMarker(listing.getNextMarker()); +- listing = service.listObjects(loReq); ++ listing = getService().listObjects(loReq); + summaries.addAll(listing.getObjectSummaries()); } -+ -+ return handle; - } - /** -@@ -555,6 +558,8 @@ public void retractObject(FactHandle handle, - activation ); +@@ -275,13 +260,13 @@ protected FileObject[] doListChildrenResolved() throws Exception + path = path + ""/""; + } - this.workingMemoryEventSupport.fireObjectRetracted( context, handle, oldObject ); -+ -+ propagateQueuedActions(); - } - finally { - this.getLock().unlock( ); -@@ -586,14 +591,37 @@ private final void invalidateActivation( LeapsTuple tuple ) { - } - } +- ObjectListing listing = service.listObjects(bucket.getName(), path); ++ ObjectListing listing = getService().listObjects(getBucket().getName(), path); + final List summaries = new ArrayList(listing.getObjectSummaries()); + while (listing.isTruncated()) { + final ListObjectsRequest loReq = new ListObjectsRequest(); +- loReq.setBucketName(bucket.getName()); ++ loReq.setBucketName(getBucket().getName()); + loReq.setMarker(listing.getNextMarker()); +- listing = service.listObjects(loReq); ++ listing = getService().listObjects(loReq); + summaries.addAll(listing.getObjectSummaries()); + } -+ -+ -+ public void addLogicalDependency( FactHandle handle, -+ Activation activation, -+ PropagationContext context, -+ Rule rule ) throws FactException { -+ super.addLogicalDependency( handle, activation, context, rule ); -+ -+ LinkedList activations = (LinkedList) this.rulesActivationsMap.get( rule ); -+ if (activations == null) { -+ activations = new LinkedList( ); -+ this.rulesActivationsMap.put( rule, activations ); -+ } -+ activations.add( activation ); -+ } -+ -+ -+ public void removeLogicalDependencies( Activation activation, -+ PropagationContext context, -+ Rule rule ) throws FactException { -+ super.removeLogicalDependencies( activation, context, rule ); -+ } -+ - /** - * @see WorkingMemory +@@ -332,7 +317,7 @@ private void downloadOnce () throws FileSystemException { + final String failedMessage = ""Failed to download S3 Object %s. %s""; + final String objectPath = getName().getPath(); + try { +- S3Object obj = service.getObject(bucket.getName(), objectKey); ++ S3Object obj = getService().getObject(getBucket().getName(), objectKey); + logger.info(String.format(""Downloading S3 Object: %s"", objectPath)); + InputStream is = obj.getObjectContent(); + if (obj.getObjectMetadata().getContentLength() > 0) { +@@ -438,7 +423,7 @@ private Owner getS3Owner() { */ -- public void modifyObject(FactHandle handle, -+ public void modifyObject( FactHandle handle, - Object object, - Rule rule, - Activation activation ) throws FactException { -- this.getLock().lock( ); -+ this.getLock( ).lock( ); - try { + private AccessControlList getS3Acl() { + String key = getS3Key(); +- return """".equals(key) ? service.getBucketAcl(bucket.getName()) : service.getObjectAcl(bucket.getName(), key); ++ return """".equals(key) ? getService().getBucketAcl(getBucket().getName()) : getService().getObjectAcl(getBucket().getName(), key); + } - this.retractObject( handle ); -@@ -624,9 +652,10 @@ public void modifyObject(FactHandle handle, - handle, - ( (FactHandleImpl) handle ).getObject( ), - object ); -+ propagateQueuedActions( ); - } - finally { -- this.getLock().unlock( ); -+ this.getLock( ).unlock( ); + /** +@@ -450,12 +435,12 @@ private void putS3Acl (AccessControlList s3Acl) { + String key = getS3Key(); + // Determine context. Object or Bucket + if ("""".equals(key)) { +- service.setBucketAcl(bucket.getName(), s3Acl); ++ getService().setBucketAcl(getBucket().getName(), s3Acl); + } else { + // Before any operations with object it must be attached + doAttach(); + // Put ACL to S3 +- service.setObjectAcl(bucket.getName(), objectKey, s3Acl); ++ getService().setObjectAcl(getBucket().getName(), objectKey, s3Acl); } } -@@ -778,22 +807,36 @@ protected void removeRule( List rules ) { - this.getLock( ).lock( ); +@@ -608,7 +593,7 @@ public void setAcl (Acl acl) throws FileSystemException { + * @return + */ + public String getHttpUrl() { +- StringBuilder sb = new StringBuilder(""http://"" + bucket.getName() + "".s3.amazonaws.com/""); ++ StringBuilder sb = new StringBuilder(""http://"" + getBucket().getName() + "".s3.amazonaws.com/""); + String key = getS3Key(); + + // Determine context. Object or Bucket +@@ -627,9 +612,9 @@ public String getHttpUrl() { + public String getPrivateUrl() { + return String.format( + ""s3://%s:%s@%s/%s"", +- awsCredentials.getAWSAccessKeyId(), +- awsCredentials.getAWSSecretKey(), +- bucket.getName(), ++ getAwsCredentials().getAWSAccessKeyId(), ++ getAwsCredentials().getAWSSecretKey(), ++ getBucket().getName(), + getS3Key() + ); + } +@@ -646,7 +631,9 @@ public String getSignedUrl(int expireInSeconds) throws FileSystemException { + cal.add(SECOND, expireInSeconds); + try { - ArrayList ruleHandlesList; -- LeapsRule rule; -+ LeapsRule leapsRule; - RuleHandle ruleHandle; - for (Iterator it = rules.iterator( ); it.hasNext( );) { -- rule = (LeapsRule) it.next( ); -+ leapsRule = (LeapsRule) it.next( ); - // some times rules do not have ""normal"" constraints and only - // not and exists -- if (rule.getNumberOfColumns( ) > 0) { -- ruleHandlesList = (ArrayList) this.leapsRulesToHandlesMap.remove( rule ); -+ if (leapsRule.getNumberOfColumns( ) > 0) { -+ ruleHandlesList = (ArrayList) this.leapsRulesToHandlesMap.remove( leapsRule ); - for (int i = 0; i < ruleHandlesList.size( ); i++) { - ruleHandle = (RuleHandle) ruleHandlesList.get( i ); - // -- this.getFactTable( rule.getColumnClassObjectTypeAtPosition( i ) ) -+ this.getFactTable( leapsRule.getColumnClassObjectTypeAtPosition( i ) ) - .removeRule( ruleHandle ); - } - } -+ // -+ } -+ Rule rule = ((LeapsRule)rules.get(0)).getRule( ); -+ List activations = (List) this.rulesActivationsMap.remove( rule ); -+ if (activations != null) { -+ for (Iterator activationsIt = activations.iterator( ); activationsIt.hasNext( );) { -+ Activation activation = (Activation) activationsIt.next( ); -+ ((LeapsTuple)activation.getTuple()).setActivation(null); -+ this.removeLogicalDependencies( activation, -+ activation.getPropagationContext( ), -+ rule ); -+ } - } -+ -+ propagateQueuedActions(); - } - finally { - this.getLock( ).unlock( );" -17db0f11e85bb21def0af785e654db15120d874b,Delta Spike,"DELTASPIKE-339 don't lot the Exception as this is a normal operating situation -",p,https://github.com/apache/deltaspike,"diff --git a/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/util/JndiUtils.java b/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/util/JndiUtils.java -index 85fa77b79..9301da536 100644 ---- a/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/util/JndiUtils.java -+++ b/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/util/JndiUtils.java -@@ -190,7 +190,7 @@ public static Map list(String name, Class type) - } - catch (NamingException e) - { -- LOG.log(Level.SEVERE, ""InitialContext#list failed!"", e); -+ // this is expected if there is no entry in JNDI for the requested name or type +- return service.generatePresignedUrl(bucket.getName(), getS3Key(), cal.getTime()).toString(); ++ return getService().generatePresignedUrl( ++ getBucket().getName(), ++ getS3Key(), cal.getTime()).toString(); + } catch (AmazonServiceException e) { + throw new FileSystemException(e); } - return result; - }" -d2a76d6b71ad0a40daa420d1b4ca447f8fb3b0de,tapiji,"adds ui core -",a,https://github.com/tapiji/tapiji,"diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/Activator.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/Activator.java -new file mode 100644 -index 00000000..59f26760 ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/Activator.java -@@ -0,0 +1,50 @@ -+package org.eclipselabs.tapiji.tools.core.ui; -+ -+import org.eclipse.ui.plugin.AbstractUIPlugin; -+import org.osgi.framework.BundleContext; -+ -+/** -+ * The activator class controls the plug-in life cycle -+ */ -+public class Activator extends AbstractUIPlugin { -+ -+ // The plug-in ID -+ public static final String PLUGIN_ID = ""org.eclipselabs.tapiji.tools.core.ui""; //$NON-NLS-1$ +@@ -658,19 +645,40 @@ public String getSignedUrl(int expireInSeconds) throws FileSystemException { + * @throws FileSystemException + */ + public String getMD5Hash() throws FileSystemException { +- final String key = getS3Key(); + String hash = null; + ++ ObjectMetadata metadata = getObjectMetadata(); ++ if (metadata != null) { ++ hash = metadata.getETag(); // TODO this is something different than mentioned in methodname / javadoc ++ } + -+ // The shared instance -+ private static Activator plugin; -+ -+ /** -+ * The constructor -+ */ -+ public Activator() { -+ } ++ return hash; ++ } + -+ /* -+ * (non-Javadoc) -+ * @see org.eclipse.ui.plugin.AbstractUIPlugin#start(org.osgi.framework.BundleContext) -+ */ -+ public void start(BundleContext context) throws Exception { -+ super.start(context); -+ plugin = this; -+ } ++ public ObjectMetadata getObjectMetadata() throws FileSystemException { + try { +- ObjectMetadata metadata = service.getObjectMetadata(bucket.getName(), key); +- if (metadata != null) { +- hash = metadata.getETag(); // TODO this is something different than mentioned in methodname / javadoc +- } ++ return getService().getObjectMetadata(getBucket().getName(), getS3Key()); + } catch (AmazonServiceException e) { + throw new FileSystemException(e); + } ++ } + +- return hash; ++ /** FileSystem object containing configuration */ ++ protected AWSCredentials getAwsCredentials() { ++ return ((S3FileSystem)getFileSystem()).getAwsCredentials(); ++ } + -+ /* -+ * (non-Javadoc) -+ * @see org.eclipse.ui.plugin.AbstractUIPlugin#stop(org.osgi.framework.BundleContext) -+ */ -+ public void stop(BundleContext context) throws Exception { -+ plugin = null; -+ super.stop(context); -+ } ++ protected AmazonS3 getService() { ++ return ((S3FileSystem)getFileSystem()).getService(); ++ } + -+ /** -+ * Returns the shared instance -+ * -+ * @return the shared instance -+ */ -+ public static Activator getDefault() { -+ return plugin; -+ } ++ protected TransferManager getTransferManager() { ++ return ((S3FileSystem)getFileSystem()).getTransferManager(); ++ } + -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/decorators/ExcludedResource.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/decorators/ExcludedResource.java -new file mode 100644 -index 00000000..e0b12ca0 ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/decorators/ExcludedResource.java -@@ -0,0 +1,105 @@ -+package org.eclipselabs.tapiji.tools.core.ui.decorators; ++ /** Amazon S3 bucket */ ++ protected Bucket getBucket() { ++ return ((S3FileSystem)getFileSystem()).getBucket(); + } + + /** +@@ -689,11 +697,15 @@ protected void onClose() throws IOException { + FileChannel cacheFileChannel = getCacheFileChannel(); + + objectMetadata.setContentLength(cacheFileChannel.size()); +- objectMetadata.setContentType(Mimetypes.getInstance().getMimetype(getName().getBaseName())); +- ++ objectMetadata.setContentType( ++ Mimetypes.getInstance().getMimetype(getName().getBaseName())); ++ if (((S3FileSystem)getFileSystem()).getServerSideEncryption()) ++ objectMetadata.setServerSideEncryption( ++ ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); + try { +- final Upload upload = transferManager.upload( +- bucket.getName(), objectKey, newInputStream(cacheFileChannel), objectMetadata ++ final Upload upload = getTransferManager().upload( ++ getBucket().getName(), objectKey, ++ newInputStream(cacheFileChannel), objectMetadata + ); + + upload.addProgressListener(new ProgressListener() { +@@ -708,7 +720,7 @@ public void progressChanged(ProgressEvent progressEvent) { + if ((progress - lastValue) > REPORT_THRESHOLD) { + logger.info( + ""File "" + objectKey + +- "" was uploaded to "" + bucket.getName() + ++ "" was uploaded to "" + getBucket().getName() + + "" for "" + (int) progress + ""%"" + ); + +@@ -716,7 +728,7 @@ public void progressChanged(ProgressEvent progressEvent) { + } + + if (progressEvent.getEventCode() == COMPLETED_EVENT_CODE) { +- logger.info(""File "" + objectKey + "" was successfully uploaded to "" + bucket.getName()); ++ logger.info(""File "" + objectKey + "" was successfully uploaded to "" + getBucket().getName()); + } + } + }); +@@ -790,21 +802,23 @@ public void copyFrom(final FileObject file, final FileSelector selector) + // Copy across + try + { ++ String srcBucketName = ((S3FileObject)srcFile).getBucket().getName(); ++ String srcFileName = ((S3FileObject)srcFile).getS3Key(); ++ String destBucketName = ((S3FileObject)destFile).getBucket().getName(); ++ String destFileName = ((S3FileObject)destFile).getS3Key(); + if (srcFile.getType() == FileType.FOLDER) { +- service.copyObject( +- ((S3FileObject)srcFile).bucket.getName(), +- ((S3FileObject)srcFile).getS3Key() + FileName.SEPARATOR, +- ((S3FileObject)destFile).bucket.getName(), +- ((S3FileObject)destFile).getS3Key() + FileName.SEPARATOR +- ); +- } else { +- service.copyObject( +- ((S3FileObject)srcFile).bucket.getName(), +- ((S3FileObject)srcFile).getS3Key(), +- ((S3FileObject)destFile).bucket.getName(), +- ((S3FileObject)destFile).getS3Key() +- ); +- } ++ srcFileName = srcFileName + FileName.SEPARATOR; ++ destFileName = destFileName + FileName.SEPARATOR; ++ } ++ CopyObjectRequest copy = new CopyObjectRequest( ++ srcBucketName, srcFileName, destBucketName, destFileName); ++ if (srcFile.getType() == FileType.FILE ++ && ((S3FileSystem)destFile.getFileSystem()).getServerSideEncryption()) { ++ ObjectMetadata meta = ((S3FileObject)srcFile).getObjectMetadata(); ++ meta.setServerSideEncryption(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); ++ copy.setNewObjectMetadata(meta); ++ } ++ getService().copyObject(copy); + + destFile.close(); + } catch (AmazonServiceException e) { +diff --git a/src/main/java/com/intridea/io/vfs/provider/s3/S3FileSystem.java b/src/main/java/com/intridea/io/vfs/provider/s3/S3FileSystem.java +index b2653f7a..74dd428a 100644 +--- a/src/main/java/com/intridea/io/vfs/provider/s3/S3FileSystem.java ++++ b/src/main/java/com/intridea/io/vfs/provider/s3/S3FileSystem.java +@@ -33,15 +33,19 @@ public class S3FileSystem extends AbstractFileSystem { + private final Bucket bucket; + private final TransferManager transferManager; + ++ private Boolean serverSideEncryption; + -+import java.util.ArrayList; -+import java.util.List; + public S3FileSystem( +- S3FileName fileName, AWSCredentials awsCredentials, AmazonS3 service, FileSystemOptions fileSystemOptions +- ) throws FileSystemException { ++ S3FileName fileName, AWSCredentials awsCredentials, AmazonS3 service, ++ FileSystemOptions fileSystemOptions) throws FileSystemException { + super(fileName, null, fileSystemOptions); + + String bucketId = fileName.getBucketId(); + + this.awsCredentials = awsCredentials; + this.service = service; ++ this.serverSideEncryption = S3FileSystemConfigBuilder.getInstance() ++ .getServerSideEncryption(fileSystemOptions); + + try { + if (service.doesBucketExist(bucketId)) { +@@ -71,8 +75,32 @@ protected void addCapabilities(Collection caps) { + caps.addAll(S3FileProvider.capabilities); + } + ++ public Boolean getServerSideEncryption() { ++ return serverSideEncryption; ++ } + -+import org.eclipse.core.resources.IFile; -+import org.eclipse.core.resources.IFolder; -+import org.eclipse.core.resources.IResource; -+import org.eclipse.jface.viewers.DecorationOverlayIcon; -+import org.eclipse.jface.viewers.IDecoration; -+import org.eclipse.jface.viewers.ILabelDecorator; -+import org.eclipse.jface.viewers.ILabelProviderListener; -+import org.eclipse.jface.viewers.LabelProviderChangedEvent; -+import org.eclipse.swt.graphics.Image; -+import org.eclipselabs.tapiji.tools.core.Activator; -+import org.eclipselabs.tapiji.tools.core.Logger; -+import org.eclipselabs.tapiji.tools.core.builder.InternationalizationNature; -+import org.eclipselabs.tapiji.tools.core.model.IResourceExclusionListener; -+import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; -+import org.eclipselabs.tapiji.tools.core.model.manager.ResourceExclusionEvent; -+import org.eclipselabs.tapiji.tools.core.util.ImageUtils; ++ public void setServerSideEncryption(Boolean serverSideEncryption) { ++ this.serverSideEncryption = serverSideEncryption; ++ } + ++ protected Bucket getBucket() { ++ return bucket; ++ } + -+public class ExcludedResource implements ILabelDecorator, -+ IResourceExclusionListener { ++ protected AWSCredentials getAwsCredentials() { ++ return awsCredentials; ++ } + -+ private static final String ENTRY_SUFFIX = ""[no i18n]""; -+ private static final Image OVERLAY_IMAGE_ON = -+ ImageUtils.getImage(ImageUtils.IMAGE_EXCLUDED_RESOURCE_ON); -+ private static final Image OVERLAY_IMAGE_OFF = -+ ImageUtils.getImage(ImageUtils.IMAGE_EXCLUDED_RESOURCE_OFF); -+ private final List label_provider_listener = -+ new ArrayList (); -+ -+ public boolean decorate(Object element) { -+ boolean needsDecoration = false; -+ if (element instanceof IFolder || -+ element instanceof IFile) { -+ IResource resource = (IResource) element; -+ if (!InternationalizationNature.hasNature(resource.getProject())) -+ return false; -+ try { -+ ResourceBundleManager manager = ResourceBundleManager.getManager(resource.getProject()); -+ if (!manager.isResourceExclusionListenerRegistered(this)) -+ manager.registerResourceExclusionListener(this); -+ if (ResourceBundleManager.isResourceExcluded(resource)) { -+ needsDecoration = true; -+ } -+ } catch (Exception e) { -+ Logger.logError(e); -+ } -+ } -+ return needsDecoration; -+ } -+ -+ @Override -+ public void addListener(ILabelProviderListener listener) { -+ label_provider_listener.add(listener); -+ } ++ protected AmazonS3 getService() { ++ return service; ++ } + -+ @Override -+ public void dispose() { -+ ResourceBundleManager.unregisterResourceExclusionListenerFromAllManagers (this); -+ } ++ protected TransferManager getTransferManager() { ++ return transferManager; ++ } + -+ @Override -+ public boolean isLabelProperty(Object element, String property) { -+ return false; -+ } + @Override + protected FileObject createFile(AbstractFileName fileName) throws Exception { +- return new S3FileObject(fileName, this, awsCredentials, service, transferManager, bucket); ++ return new S3FileObject(fileName, this); + } + } +diff --git a/src/main/java/com/intridea/io/vfs/provider/s3/S3FileSystemConfigBuilder.java b/src/main/java/com/intridea/io/vfs/provider/s3/S3FileSystemConfigBuilder.java +new file mode 100644 +index 00000000..15de92fd +--- /dev/null ++++ b/src/main/java/com/intridea/io/vfs/provider/s3/S3FileSystemConfigBuilder.java +@@ -0,0 +1,47 @@ ++package com.intridea.io.vfs.provider.s3; + -+ @Override -+ public void removeListener(ILabelProviderListener listener) { -+ label_provider_listener.remove(listener); -+ } ++import org.apache.commons.vfs2.FileSystem; ++import org.apache.commons.vfs2.FileSystemConfigBuilder; ++import org.apache.commons.vfs2.FileSystemOptions; + -+ @Override -+ public void exclusionChanged(ResourceExclusionEvent event) { -+ LabelProviderChangedEvent labelEvent = new LabelProviderChangedEvent(this, event.getChangedResources().toArray()); -+ for (ILabelProviderListener l : label_provider_listener) -+ l.labelProviderChanged(labelEvent); -+ } ++public class S3FileSystemConfigBuilder extends FileSystemConfigBuilder { ++ private static final S3FileSystemConfigBuilder BUILDER = new S3FileSystemConfigBuilder(); ++ ++ private static final String SERVER_SIDE_ENCRYPTION = S3FileSystemConfigBuilder.class.getName() + "".SERVER_SIDE_ENCRYPTION""; + -+ @Override -+ public Image decorateImage(Image image, Object element) { -+ if (decorate(element)) { -+ DecorationOverlayIcon overlayIcon = new DecorationOverlayIcon(image, -+ Activator.getImageDescriptor(ImageUtils.IMAGE_EXCLUDED_RESOURCE_OFF), -+ IDecoration.TOP_RIGHT); -+ return overlayIcon.createImage(); -+ } else { -+ return image; -+ } -+ } ++ private S3FileSystemConfigBuilder() ++ { ++ super(""s3.""); ++ } + -+ @Override -+ public String decorateText(String text, Object element) { -+ if (decorate(element)) { -+ return text + "" "" + ENTRY_SUFFIX; -+ } else -+ return text; -+ } ++ public static S3FileSystemConfigBuilder getInstance() ++ { ++ return BUILDER; ++ } + ++ @Override ++ protected Class getConfigClass() { ++ return S3FileSystem.class; ++ } + ++ /** ++ * use server-side encryption. ++ * ++ * @param opts The FileSystemOptions. ++ * @param serverSideEncryption true if server-side encryption should be used. ++ */ ++ public void setServerSideEncryption(FileSystemOptions opts, boolean serverSideEncryption) ++ { ++ setParam(opts, SERVER_SIDE_ENCRYPTION, serverSideEncryption ? Boolean.TRUE : Boolean.FALSE); ++ } + ++ /** ++ * @param opts The FileSystemOptions. ++ * @return true if server-side encryption is being used. ++ * @see #setServerSideEncryption(org.apache.commons.vfs2.FileSystemOptions, boolean) ++ */ ++ public Boolean getServerSideEncryption(FileSystemOptions opts) ++ { ++ return getBoolean(opts, SERVER_SIDE_ENCRYPTION, false); ++ } +} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/AddLanguageDialoge.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/AddLanguageDialoge.java -new file mode 100644 -index 00000000..1ff6136d ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/AddLanguageDialoge.java -@@ -0,0 +1,153 @@ -+package org.eclipselabs.tapiji.tools.core.ui.dialogs; +diff --git a/src/test/java/com/intridea/io/vfs/provider/s3/S3ProviderTest.java b/src/test/java/com/intridea/io/vfs/provider/s3/S3ProviderTest.java +index d60c3eab..ca911083 100644 +--- a/src/test/java/com/intridea/io/vfs/provider/s3/S3ProviderTest.java ++++ b/src/test/java/com/intridea/io/vfs/provider/s3/S3ProviderTest.java +@@ -1,5 +1,6 @@ + package com.intridea.io.vfs.provider.s3; + ++import com.amazonaws.services.s3.model.ObjectMetadata; + import com.intridea.io.vfs.TestEnvironment; + import com.intridea.io.vfs.operations.IMD5HashGetter; + import com.intridea.io.vfs.operations.IPublicUrlsGetter; +@@ -14,6 +15,7 @@ + import java.io.FileNotFoundException; + import java.security.MessageDigest; + import java.security.NoSuchAlgorithmException; ++import java.util.Arrays; + import java.util.Locale; + import java.util.Properties; + import java.util.Random; +@@ -29,8 +31,7 @@ public class S3ProviderTest { + private static final String BACKUP_ZIP = ""src/test/resources/backup.zip""; + + private FileSystemManager fsManager; +- +- private String fileName, dirName, bucketName, bigFile; ++ private String fileName, encryptedFileName, dirName, bucketName, bigFile; + private FileObject file, dir; + + private FileSystemOptions opts; +@@ -41,6 +42,7 @@ public void setUp() throws FileNotFoundException, IOException { + + fsManager = VFS.getManager(); + Random r = new Random(); ++ encryptedFileName = ""vfs-encrypted-file"" + r.nextInt(1000); + fileName = ""vfs-file"" + r.nextInt(1000); + dirName = ""vfs-dir"" + r.nextInt(1000); + bucketName = config.getProperty(""s3.testBucket"", ""vfs-s3-tests""); +@@ -54,6 +56,17 @@ public void createFileOk() throws FileSystemException { + assertTrue(file.exists()); + } + ++ @Test ++ public void createEncryptedFileOk() throws FileSystemException { ++ file = fsManager.resolveFile(""s3://"" + bucketName + ""/test-place/"" + encryptedFileName, opts); ++ ((S3FileSystem)file.getFileSystem()).setServerSideEncryption(true); ++ file.createFile(); ++ assertTrue(file.exists()); ++ assertEquals( ++ ((S3FileObject) file).getObjectMetadata().getServerSideEncryption(), ++ ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); ++ } + -+import java.util.Collections; -+import java.util.HashSet; -+import java.util.LinkedList; -+import java.util.List; -+import java.util.Locale; -+import java.util.Set; + @Test(expectedExceptions={FileSystemException.class}) + public void createFileFailed() throws FileSystemException { + FileObject tmpFile = fsManager.resolveFile(""s3://../new-mpoint/vfs-bad-file""); +@@ -130,6 +143,31 @@ public void upload() throws FileNotFoundException, IOException { + dest.copyFrom(src, Selectors.SELECT_SELF); + + assertTrue(dest.exists() && dest.getType().equals(FileType.FILE)); ++ assertEquals(((S3FileObject)dest).getObjectMetadata().getServerSideEncryption(), ++ null); ++ } + -+import org.eclipse.jface.dialogs.Dialog; -+import org.eclipse.swt.SWT; -+import org.eclipse.swt.events.SelectionEvent; -+import org.eclipse.swt.events.SelectionListener; -+import org.eclipse.swt.graphics.Color; -+import org.eclipse.swt.graphics.Font; -+import org.eclipse.swt.layout.GridData; -+import org.eclipse.swt.layout.GridLayout; -+import org.eclipse.swt.widgets.Combo; -+import org.eclipse.swt.widgets.Composite; -+import org.eclipse.swt.widgets.Control; -+import org.eclipse.swt.widgets.Group; -+import org.eclipse.swt.widgets.Label; -+import org.eclipse.swt.widgets.Shell; -+import org.eclipse.swt.widgets.Text; -+import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; -+import org.eclipselabs.tapiji.tools.core.util.LocaleUtils; ++ @Test(dependsOnMethods = {""createEncryptedFileOk""}) ++ public void uploadEncrypted() throws FileNotFoundException, IOException { ++ FileObject dest = fsManager.resolveFile(""s3://"" + bucketName + ""/test-place/backup.zip""); ++ ((S3FileSystem)dest.getFileSystem()).setServerSideEncryption(true); + ++ // Delete file if exists ++ if (dest.exists()) { ++ dest.delete(); ++ } + -+public class AddLanguageDialoge extends Dialog{ -+ private Locale locale; -+ private Shell shell; -+ -+ private Text titelText; -+ private Text descriptionText; -+ private Combo cmbLanguage; -+ private Text language; -+ private Text country; -+ private Text variant; ++ // Copy data ++ final File backupFile = new File(BACKUP_ZIP); + ++ assertTrue(backupFile.exists(), ""Backup file should exists""); + -+ -+ -+ public AddLanguageDialoge(Shell parentShell){ -+ super(parentShell); -+ shell = parentShell; -+ } -+ -+ @Override -+ protected Control createDialogArea(Composite parent) { -+ Composite titelArea = new Composite(parent, SWT.NO_BACKGROUND); -+ Composite dialogArea = (Composite) super.createDialogArea(parent); -+ GridLayout layout = new GridLayout(1,true); -+ dialogArea.setLayout(layout); -+ -+ initDescription(titelArea); -+ initCombo(dialogArea); -+ initTextArea(dialogArea); -+ -+ titelArea.pack(); -+ dialogArea.pack(); -+ parent.pack(); -+ -+ return dialogArea; -+ } ++ FileObject src = fsManager.resolveFile(backupFile.getAbsolutePath()); ++ dest.copyFrom(src, Selectors.SELECT_SELF); + -+ private void initDescription(Composite titelArea) { -+ titelArea.setEnabled(false); -+ titelArea.setLayoutData(new GridData(SWT.CENTER, SWT.TOP, true, true, 1, 1)); -+ titelArea.setLayout(new GridLayout(1, true)); -+ titelArea.setBackground(new Color(shell.getDisplay(), 255, 255, 255)); -+ -+ titelText = new Text(titelArea, SWT.LEFT); -+ titelText.setFont(new Font(shell.getDisplay(),shell.getFont().getFontData()[0].getName(), 11, SWT.BOLD)); -+ titelText.setText(""Please, specify the desired language""); -+ -+ descriptionText = new Text(titelArea, SWT.WRAP); -+ descriptionText.setLayoutData(new GridData(450, 60)); //TODO improve -+ descriptionText.setText(""Note: "" + -+ ""In all ResourceBundles of the project/plug-in will be created a new properties-file with the basename of the ResourceBundle and the corresponding locale-extension. ""+ -+ ""If the locale is just provided of a ResourceBundle, no new file will be created.""); -+ } -+ -+ private void initCombo(Composite dialogArea) { -+ cmbLanguage = new Combo(dialogArea, SWT.DROP_DOWN); -+ cmbLanguage.setLayoutData(new GridData(SWT.CENTER, SWT.CENTER, true, true, 1, 1)); -+ -+ final Locale[] locales = Locale.getAvailableLocales(); -+ final Set localeSet = new HashSet(); -+ List localeNames = new LinkedList(); -+ -+ for (Locale l : locales){ -+ localeNames.add(l.getDisplayName()); -+ localeSet.add(l); -+ } -+ -+ Collections.sort(localeNames); -+ -+ String[] s= new String[localeNames.size()]; -+ cmbLanguage.setItems(localeNames.toArray(s)); -+ cmbLanguage.add(ResourceBundleManager.defaultLocaleTag, 0); -+ -+ cmbLanguage.addSelectionListener(new SelectionListener() { -+ @Override -+ public void widgetSelected(SelectionEvent e) { -+ int selectIndex = ((Combo)e.getSource()).getSelectionIndex(); -+ if (!cmbLanguage.getItem(selectIndex).equals(ResourceBundleManager.defaultLocaleTag)){ -+ Locale l = LocaleUtils.getLocaleByDisplayName(localeSet, cmbLanguage.getItem(selectIndex)); -+ -+ language.setText(l.getLanguage()); -+ country.setText(l.getCountry()); -+ variant.setText(l.getVariant()); -+ }else { -+ language.setText(""""); -+ country.setText(""""); -+ variant.setText(""""); -+ } -+ } -+ @Override -+ public void widgetDefaultSelected(SelectionEvent e) { -+ // TODO Auto-generated method stub -+ } -+ }); -+ } -+ -+ private void initTextArea(Composite dialogArea) { -+ final Group group = new Group (dialogArea, SWT.SHADOW_ETCHED_IN); -+ group.setLayoutData(new GridData(SWT.CENTER, SWT.CENTER, true, true, 1, 1)); -+ group.setLayout(new GridLayout(3, true)); -+ group.setText(""Locale""); -+ -+ Label languageLabel = new Label(group, SWT.SINGLE); -+ languageLabel.setText(""Language""); -+ Label countryLabel = new Label(group, SWT.SINGLE); -+ countryLabel.setText(""Country""); -+ Label variantLabel = new Label(group, SWT.SINGLE); -+ variantLabel.setText(""Variant""); -+ -+ language = new Text(group, SWT.SINGLE); -+ country = new Text(group, SWT.SINGLE); -+ variant = new Text(group, SWT.SINGLE); -+ } -+ -+ @Override -+ protected void okPressed() { -+ locale = new Locale(language.getText(), country.getText(), variant.getText()); -+ -+ super.okPressed(); -+ } -+ -+ public Locale getSelectedLanguage() { -+ return locale; -+ } -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/CreatePatternDialoge.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/CreatePatternDialoge.java -new file mode 100644 -index 00000000..499fff0d ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/CreatePatternDialoge.java -@@ -0,0 +1,60 @@ -+package org.eclipselabs.tapiji.tools.core.ui.dialogs; -+ -+import org.eclipse.jface.dialogs.Dialog; -+import org.eclipse.swt.SWT; -+import org.eclipse.swt.layout.GridData; -+import org.eclipse.swt.layout.GridLayout; -+import org.eclipse.swt.widgets.Composite; -+import org.eclipse.swt.widgets.Control; -+import org.eclipse.swt.widgets.Label; -+import org.eclipse.swt.widgets.Shell; -+import org.eclipse.swt.widgets.Text; -+ -+public class CreatePatternDialoge extends Dialog{ -+ private String pattern; -+ private Text patternText; -+ -+ -+ public CreatePatternDialoge(Shell shell) { -+ this(shell,""""); -+ } -+ -+ public CreatePatternDialoge(Shell shell, String pattern) { -+ super(shell); -+ this.pattern = pattern; -+// setShellStyle(SWT.RESIZE); -+ } ++ assertTrue(dest.exists() && dest.getType().equals(FileType.FILE)); ++ assertEquals(((S3FileObject)dest).getObjectMetadata().getServerSideEncryption(), ++ ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); + } + + @Test(dependsOnMethods={""createFileOk""}) +@@ -318,7 +356,9 @@ public void getUrls() throws FileSystemException { + + final String signedUrl = urlsGetter.getSignedUrl(60); + +- assertTrue(signedUrl.startsWith(""https://"" + bucketName + "".s3.amazonaws.com/test-place%2Fbackup.zip?"")); ++ assertTrue( ++ signedUrl.startsWith(""https://s3.amazonaws.com/"" + bucketName + ""/test-place%2Fbackup.zip?""), ++ signedUrl); + assertTrue(signedUrl.indexOf(""Signature="") != (-1)); + assertTrue(signedUrl.indexOf(""Expires="") != (-1)); + assertTrue(signedUrl.indexOf(""AWSAccessKeyId="") != (-1)); +@@ -347,16 +387,40 @@ public void getMD5Hash() throws NoSuchAlgorithmException, FileNotFoundException, + + @Test(dependsOnMethods={""findFiles""}) + public void copyInsideBucket() throws FileSystemException { +- FileObject testsDir = fsManager.resolveFile(dir, ""find-tests""); +- FileObject testsDirCopy = testsDir.getParent().resolveFile(""find-tests-copy""); +- testsDirCopy.copyFrom(testsDir, Selectors.SELECT_SELF_AND_CHILDREN); +- +- // Should have same number of files +- FileObject[] files = testsDir.findFiles(Selectors.SELECT_ALL); +- FileObject[] filesCopy = testsDir.findFiles(Selectors.SELECT_ALL); +- assertEquals(files.length, filesCopy.length); ++ FileObject testsDir = fsManager.resolveFile(dir, ""find-tests""); ++ FileObject testsDirCopy = testsDir.getParent().resolveFile(""find-tests-copy""); ++ testsDirCopy.copyFrom(testsDir, Selectors.SELECT_SELF_AND_CHILDREN); + -+ @Override -+ protected Control createDialogArea(Composite parent) { -+ Composite composite = new Composite(parent, SWT.NONE); -+ composite.setLayout(new GridLayout(1, true)); -+ composite.setLayoutData(new GridData(SWT.FILL,SWT.TOP, false, false)); -+ -+ Label descriptionLabel = new Label(composite, SWT.NONE); -+ descriptionLabel.setText(""Enter a regular expression:""); -+ -+ patternText = new Text(composite, SWT.WRAP | SWT.MULTI); -+ GridData gData = new GridData(SWT.FILL, SWT.FILL, true, false); -+ gData.widthHint = 400; -+ gData.heightHint = 60; -+ patternText.setLayoutData(gData); -+ patternText.setText(pattern); -+ ++ // Should have same number of files ++ FileObject[] files = testsDir.findFiles(Selectors.SELECT_SELF_AND_CHILDREN); ++ FileObject[] filesCopy = testsDirCopy.findFiles(Selectors.SELECT_SELF_AND_CHILDREN); ++ assertEquals(files.length, filesCopy.length, ++ Arrays.deepToString(files) + "" vs. "" + Arrays.deepToString(filesCopy)); + } + ++ @Test(dependsOnMethods={""findFiles""}) ++ public void copyAllToEncryptedInsideBucket() throws FileSystemException { ++ FileObject testsDir = fsManager.resolveFile(dir, ""find-tests""); ++ FileObject testsDirCopy = testsDir.getParent().resolveFile(""find-tests-encrypted-copy""); ++ ((S3FileSystem)testsDirCopy.getFileSystem()).setServerSideEncryption(true); + -+ -+ return composite; -+ } ++ testsDirCopy.copyFrom(testsDir, Selectors.SELECT_ALL); + -+ @Override -+ protected void okPressed() { -+ pattern = patternText.getText(); -+ -+ super.okPressed(); -+ } -+ -+ public String getPattern(){ -+ return pattern; -+ } ++ // Should have same number of files ++ FileObject[] files = testsDir.findFiles(Selectors.SELECT_ALL); ++ FileObject[] filesCopy = testsDirCopy.findFiles(Selectors.SELECT_ALL); ++ assertEquals(files.length, filesCopy.length); + -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/CreateResourceBundleEntryDialog.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/CreateResourceBundleEntryDialog.java -new file mode 100644 -index 00000000..f2cd09da ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/CreateResourceBundleEntryDialog.java -@@ -0,0 +1,377 @@ -+package org.eclipselabs.tapiji.tools.core.ui.dialogs; ++ for (int i = 0; i < files.length; i++) { ++ if (files[i].getType() == FileType.FILE) { ++ assertEquals(((S3FileObject)files[i]).getObjectMetadata().getServerSideEncryption(), ++ null); ++ assertEquals(((S3FileObject)filesCopy[i]).getObjectMetadata().getServerSideEncryption(), ++ ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); ++ } ++ } ++ } + -+import java.util.Collection; -+import java.util.Locale; + @Test(dependsOnMethods={""findFiles"", ""download""}) + public void delete() throws FileSystemException { + FileObject testsDir = fsManager.resolveFile(dir, ""find-tests"");" +4ae79b6666c22898decaee37c3e33e209ff23b30,d3scomp$jdeeco,Remove old MATSimSimulation and move the functionality into the plugin,p,https://github.com/d3scomp/jdeeco,"diff --git a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimSimulation.java b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimSimulation.java +deleted file mode 100644 +index 7da955166..000000000 +--- a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimSimulation.java ++++ /dev/null +@@ -1,213 +0,0 @@ +-package cz.cuni.mff.d3s.deeco.simulation.matsim; +- +-import java.util.Collection; +-import java.util.HashMap; +-import java.util.HashSet; +-import java.util.Map; +-import java.util.Set; +-import java.util.TreeSet; +- +-import org.matsim.api.core.v01.TransportMode; +-import org.matsim.core.controler.Controler; +-import org.matsim.core.controler.events.StartupEvent; +-import org.matsim.core.controler.listener.StartupListener; +-import org.matsim.core.mobsim.framework.Mobsim; +-import org.matsim.core.router.util.TravelTime; +-import org.matsim.withinday.trafficmonitoring.TravelTimeCollector; +-import org.matsim.withinday.trafficmonitoring.TravelTimeCollectorFactory; +- +-import cz.cuni.mff.d3s.deeco.logging.Log; +-import cz.cuni.mff.d3s.jdeeco.matsim.old.simulation.Simulation; +- +-public class MATSimSimulation extends Simulation implements MATSimSimulationStepListener { +- +- private static final String SIMULATION_CALLBACK = ""SIMULATION_CALLBACK""; +- +- private long currentMilliseconds; +- private final long simulationStep; // in milliseconds +- private final TravelTime travelTime; +- private final TreeSet callbacks; +- private final Map hostIdToCallback; +- private final Controler controler; +- private final JDEECoWithinDayMobsimListener listener; +- private final MATSimDataProvider matSimProvider; +- private final MATSimDataReceiver matSimReceiver; +- private final Map hosts; +- private final MATSimExtractor extractor; +- +- public MATSimSimulation(MATSimDataReceiver matSimReceiver, MATSimDataProvider matSimProvider, +- MATSimUpdater updater, MATSimExtractor extractor, +- final Collection agentSources, String matSimConf) { +- this.callbacks = new TreeSet<>(); +- this.hostIdToCallback = new HashMap<>(); +- this.hosts = new HashMap<>(); +- +- this.controler = new MATSimPreloadingControler(matSimConf); +- this.controler.setOverwriteFiles(true); +- this.controler.getConfig().getQSimConfigGroup().setSimStarttimeInterpretation(""onlyUseStarttime""); +- +- double end = this.controler.getConfig().getQSimConfigGroup().getEndTime(); +- double start = this.controler.getConfig().getQSimConfigGroup().getStartTime(); +- double step = this.controler.getConfig().getQSimConfigGroup().getTimeStepSize(); +- Log.i(""Starting simulation: matsimStartTime: "" + start + "" matsimEndTime: "" + end); +- this.extractor = extractor; +- this.listener = new JDEECoWithinDayMobsimListener(this, updater, extractor); +- this.matSimProvider = matSimProvider; +- this.matSimReceiver = matSimReceiver; +- +- Set analyzedModes = new HashSet(); +- analyzedModes.add(TransportMode.car); +- travelTime = new TravelTimeCollectorFactory().createTravelTimeCollector(controler.getScenario(), analyzedModes); +- +- controler.addControlerListener(new StartupListener() { +- public void notifyStartup(StartupEvent event) { +- controler.getEvents().addHandler((TravelTimeCollector) travelTime); +- controler.getMobsimListeners().add((TravelTimeCollector) travelTime); +- controler.setMobsimFactory(new JDEECoMobsimFactory(listener, agentSources)); +- } +- }); +- /** +- * Bind MATSim listener with the agent source. It is necessary to let the listener know about the jDEECo agents +- * that it needs to update with data coming from a jDEECo runtime. +- */ +- for (AdditionAwareAgentSource source : agentSources) { +- if (source instanceof JDEECoAgentSource) { +- listener.registerAgentProvider((JDEECoAgentSource) source); +- } +- } +- +- this.simulationStep = secondsToMilliseconds(step); +- currentMilliseconds = secondsToMilliseconds(controler.getConfig().getQSimConfigGroup().getStartTime()); +- } +- +- public void addHost(String id, cz.cuni.mff.d3s.jdeeco.matsim.MATSimSimulation.Host host) { +- hosts.put(id, host); +- } +- +- public cz.cuni.mff.d3s.jdeeco.matsim.MATSimSimulation.Host getHost(String id) { +- return hosts.get(id); +- } +- +- public Controler getControler() { +- return this.controler; +- } +- +- public TravelTime getTravelTime() { +- return this.travelTime; +- } +- +- @Override +- public long getCurrentMilliseconds() { +- return currentMilliseconds; +- } +- +- @Override +- public synchronized void callAt(long absoluteTime, String hostId) { +- Callback callback = hostIdToCallback.remove(hostId); +- if (callback != null) { +- callbacks.remove(callback); +- } +- callback = new Callback(hostId, absoluteTime); +- hostIdToCallback.put(hostId, callback); +- // System.out.println(""For "" + absoluteTime); +- callbacks.add(callback); +- } +- +- @Override +- public void at(double seconds, Mobsim mobsim) { +- // Exchange data with MATSim +- long milliseconds = secondsToMilliseconds(seconds); +- matSimReceiver.setMATSimData(extractor.extractFromMATSim(listener.getAllJDEECoAgents(), mobsim)); +- listener.updateJDEECoAgents(matSimProvider.getMATSimData()); +- // Add callback for the MATSim step +- callAt(milliseconds + simulationStep, SIMULATION_CALLBACK); +- cz.cuni.mff.d3s.jdeeco.matsim.MATSimSimulation.Host host; +- Callback callback; +- // Iterate through all the callbacks until the MATSim callback. +- while (!callbacks.isEmpty()) { +- callback = callbacks.pollFirst(); +- if (callback.getHostId().equals(SIMULATION_CALLBACK)) { +- break; +- } +- currentMilliseconds = callback.getAbsoluteTime(); +- // System.out.println(""At: "" + currentMilliseconds); +- host = (cz.cuni.mff.d3s.jdeeco.matsim.MATSimSimulation.Host) hosts.get(callback.hostId); +- host.at(millisecondsToSeconds(currentMilliseconds)); +- } +- } +- +- public synchronized void run() { +- controler.run(); +- } +- +- private class Callback implements Comparable { +- +- private final long milliseconds; +- private final String hostId; +- +- public Callback(String hostId, long milliseconds) { +- this.hostId = hostId; +- this.milliseconds = milliseconds; +- } +- +- public long getAbsoluteTime() { +- return milliseconds; +- } +- +- public String getHostId() { +- return hostId; +- } +- +- @Override +- public int compareTo(Callback c) { +- if (c.getAbsoluteTime() < milliseconds) { +- return 1; +- } else if (c.getAbsoluteTime() > milliseconds) { +- return -1; +- } else if (this == c) { +- return 0; +- } else { +- return this.hashCode() < c.hashCode() ? 1 : -1; +- } +- } +- +- public String toString() { +- return hostId + "" "" + milliseconds; +- } +- +- @Override +- public int hashCode() { +- final int prime = 31; +- int result = 1; +- result = prime * result + getOuterType().hashCode(); +- result = prime * result + ((hostId == null) ? 0 : hostId.hashCode()); +- result = prime * result + (int) (milliseconds ^ (milliseconds >>> 32)); +- return result; +- } +- +- @Override +- public boolean equals(Object obj) { +- if (this == obj) +- return true; +- if (obj == null) +- return false; +- if (getClass() != obj.getClass()) +- return false; +- Callback other = (Callback) obj; +- if (!getOuterType().equals(other.getOuterType())) +- return false; +- if (hostId == null) { +- if (other.hostId != null) +- return false; +- } else if (!hostId.equals(other.hostId)) +- return false; +- if (milliseconds != other.milliseconds) +- return false; +- return true; +- } +- +- private MATSimSimulation getOuterType() { +- return MATSimSimulation.this; +- } +- } +-} +diff --git a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/package-info.java b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/package-info.java +deleted file mode 100644 +index 7b4598c6b..000000000 +--- a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/package-info.java ++++ /dev/null +@@ -1,4 +0,0 @@ +-/** +- * Ported sources from jDEECo 2 +- */ +-package cz.cuni.mff.d3s.deeco.simulation.matsim; +\ No newline at end of file +diff --git a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/MATSimSimulation.java b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/MATSimSimulation.java +index 270690016..7476b88cd 100644 +--- a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/MATSimSimulation.java ++++ b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/MATSimSimulation.java +@@ -3,54 +3,122 @@ + import java.io.File; + import java.io.IOException; + import java.util.Arrays; ++import java.util.HashMap; ++import java.util.HashSet; + import java.util.LinkedList; + import java.util.List; ++import java.util.Map; +import java.util.Set; ++import java.util.TreeSet; + + import org.matsim.api.core.v01.Id; ++import org.matsim.api.core.v01.TransportMode; + import org.matsim.core.basic.v01.IdImpl; + import org.matsim.core.controler.Controler; ++import org.matsim.core.controler.events.StartupEvent; ++import org.matsim.core.controler.listener.StartupListener; ++import org.matsim.core.mobsim.framework.Mobsim; ++import org.matsim.core.router.util.TravelTime; ++import org.matsim.withinday.trafficmonitoring.TravelTimeCollector; ++import org.matsim.withinday.trafficmonitoring.TravelTimeCollectorFactory; + ++import cz.cuni.mff.d3s.deeco.logging.Log; + import cz.cuni.mff.d3s.deeco.network.AbstractHost; + import cz.cuni.mff.d3s.deeco.runtime.DEECoContainer; + import cz.cuni.mff.d3s.deeco.runtime.DEECoPlugin; +-import cz.cuni.mff.d3s.deeco.simulation.matsim.AdditionAwareAgentSource; +-import cz.cuni.mff.d3s.deeco.simulation.matsim.DefaultMATSimExtractor; +-import cz.cuni.mff.d3s.deeco.simulation.matsim.DefaultMATSimUpdater; +-import cz.cuni.mff.d3s.deeco.simulation.matsim.JDEECoAgent; +-import cz.cuni.mff.d3s.deeco.simulation.matsim.JDEECoAgentSource; +-import cz.cuni.mff.d3s.deeco.simulation.matsim.MATSimRouter; + import cz.cuni.mff.d3s.deeco.timer.CurrentTimeProvider; + import cz.cuni.mff.d3s.deeco.timer.SimulationTimer; + import cz.cuni.mff.d3s.deeco.timer.TimerEventListener; + import cz.cuni.mff.d3s.jdeeco.matsim.old.roadtrains.MATSimDataProviderReceiver; ++import cz.cuni.mff.d3s.jdeeco.matsim.old.simulation.AdditionAwareAgentSource; ++import cz.cuni.mff.d3s.jdeeco.matsim.old.simulation.DefaultMATSimExtractor; ++import cz.cuni.mff.d3s.jdeeco.matsim.old.simulation.DefaultMATSimUpdater; ++import cz.cuni.mff.d3s.jdeeco.matsim.old.simulation.JDEECoAgent; ++import cz.cuni.mff.d3s.jdeeco.matsim.old.simulation.JDEECoAgentSource; ++import cz.cuni.mff.d3s.jdeeco.matsim.old.simulation.JDEECoMobsimFactory; ++import cz.cuni.mff.d3s.jdeeco.matsim.old.simulation.JDEECoWithinDayMobsimListener; ++import cz.cuni.mff.d3s.jdeeco.matsim.old.simulation.MATSimDataProvider; ++import cz.cuni.mff.d3s.jdeeco.matsim.old.simulation.MATSimDataReceiver; ++import cz.cuni.mff.d3s.jdeeco.matsim.old.simulation.MATSimExtractor; ++import cz.cuni.mff.d3s.jdeeco.matsim.old.simulation.MATSimPreloadingControler; ++import cz.cuni.mff.d3s.jdeeco.matsim.old.simulation.MATSimRouter; ++import cz.cuni.mff.d3s.jdeeco.matsim.old.simulation.MATSimSimulationStepListener; + + /** + * Plug-in providing MATSim simulation + * ++ * Based on the code from the jDEECo 2 simulation project ++ * + * @author Vladimir Matena + * + */ + public class MATSimSimulation implements DEECoPlugin { +- class TimerProvider implements SimulationTimer { ++ private final TreeSet callbacks; ++ ++ class TimerProvider implements SimulationTimer, CurrentTimeProvider /* ++ * TODO: Current time provider or simulation ++ * timer ++ */, MATSimSimulationStepListener { + @Override + public void notifyAt(long time, TimerEventListener listener, DEECoContainer node) { + // System.out.println(""NOTIFY AT CALLED FOR: "" + time + "" NODE:"" + node.getId()); +- MATSimSimulation.this.oldSimulation.callAt(time, String.valueOf(node.getId())); +- oldSimulation.getHost(String.valueOf(node.getId())).listener = listener; ++ // MATSimSimulation.this.oldSimulation.callAt(time, String.valueOf(node.getId())); ++ final String hostId = String.valueOf(node.getId()); + -+import org.eclipse.jface.dialogs.TitleAreaDialog; -+import org.eclipse.swt.SWT; -+import org.eclipse.swt.events.ModifyEvent; -+import org.eclipse.swt.events.ModifyListener; -+import org.eclipse.swt.events.SelectionAdapter; -+import org.eclipse.swt.events.SelectionEvent; -+import org.eclipse.swt.events.SelectionListener; -+import org.eclipse.swt.layout.GridData; -+import org.eclipse.swt.layout.GridLayout; -+import org.eclipse.swt.widgets.Button; -+import org.eclipse.swt.widgets.Combo; -+import org.eclipse.swt.widgets.Composite; -+import org.eclipse.swt.widgets.Control; -+import org.eclipse.swt.widgets.Group; -+import org.eclipse.swt.widgets.Label; -+import org.eclipse.swt.widgets.Shell; -+import org.eclipse.swt.widgets.Text; -+import org.eclipselabs.tapiji.tools.core.Logger; -+import org.eclipselabs.tapiji.tools.core.model.exception.ResourceBundleException; -+import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; -+import org.eclipselabs.tapiji.tools.core.util.LocaleUtils; -+import org.eclipselabs.tapiji.tools.core.util.ResourceUtils; -+ -+ -+public class CreateResourceBundleEntryDialog extends TitleAreaDialog { -+ -+ private static int WIDTH_LEFT_COLUMN = 100; -+ -+ private ResourceBundleManager manager; -+ private Collection availableBundles; -+ -+ private Text txtKey; -+ private Combo cmbRB; -+ private Text txtDefaultText; -+ private Combo cmbLanguage; -+ -+ private Button okButton; -+ private Button cancelButton; -+ -+ /*** Dialog Model ***/ -+ String selectedRB = """"; -+ String selectedLocale = """"; -+ String selectedKey = """"; -+ String selectedDefaultText = """"; -+ -+ /*** MODIFY LISTENER ***/ -+ ModifyListener rbModifyListener; -+ -+ public CreateResourceBundleEntryDialog(Shell parentShell, -+ ResourceBundleManager manager, -+ String preselectedKey, -+ String preselectedMessage, -+ String preselectedBundle, -+ String preselectedLocale) { -+ super(parentShell); -+ this.manager = manager; -+ this.availableBundles = manager.getResourceBundleNames(); -+ this.selectedKey = preselectedKey != null ? preselectedKey.trim() : preselectedKey; -+ this.selectedDefaultText = preselectedMessage; -+ this.selectedRB = preselectedBundle; -+ this.selectedLocale = preselectedLocale; -+ } ++ callAt(time, hostId); + -+ public String getSelectedResourceBundle () { -+ return selectedRB; -+ } -+ -+ public String getSelectedKey () { -+ return selectedKey; -+ } -+ -+ @Override -+ protected Control createDialogArea(Composite parent) { -+ Composite dialogArea = (Composite) super.createDialogArea(parent); -+ initLayout (dialogArea); -+ constructRBSection (dialogArea); -+ constructDefaultSection (dialogArea); -+ initContent (); -+ return dialogArea; -+ } -+ -+ protected void initContent() { -+ cmbRB.removeAll(); -+ int iSel = -1; -+ int index = 0; -+ -+ for (String bundle : availableBundles) { -+ cmbRB.add(bundle); -+ if (bundle.equals(selectedRB)) { -+ cmbRB.select(index); -+ iSel = index; -+ cmbRB.setEnabled(false); -+ } -+ index ++; -+ } -+ -+ if (availableBundles.size() > 0 && iSel < 0) { -+ cmbRB.select(0); -+ selectedRB = cmbRB.getText(); -+ cmbRB.setEnabled(true); -+ } -+ -+ rbModifyListener = new ModifyListener() { -+ -+ @Override -+ public void modifyText(ModifyEvent e) { -+ selectedRB = cmbRB.getText(); -+ validate(); -+ } -+ }; -+ cmbRB.removeModifyListener(rbModifyListener); -+ cmbRB.addModifyListener(rbModifyListener); -+ -+ -+ cmbRB.addSelectionListener(new SelectionListener() { -+ -+ @Override -+ public void widgetSelected(SelectionEvent e) { -+ selectedLocale = """"; -+ updateAvailableLanguages(); -+ } -+ -+ @Override -+ public void widgetDefaultSelected(SelectionEvent e) { -+ -+ selectedLocale = """"; -+ updateAvailableLanguages(); -+ } -+ }); -+ updateAvailableLanguages(); -+ } -+ -+ protected void updateAvailableLanguages () { -+ cmbLanguage.removeAll(); -+ String selectedBundle = cmbRB.getText(); -+ -+ if (selectedBundle.trim().equals("""")) -+ return; -+ -+ // Retrieve available locales for the selected resource-bundle -+ Set locales = manager.getProvidedLocales(selectedBundle); -+ int index = 0; -+ int iSel = -1; -+ for (Locale l : manager.getProvidedLocales(selectedBundle)) { -+ String displayName = l == null ? ResourceBundleManager.defaultLocaleTag : l.getDisplayName(); -+ if (displayName.equals(selectedLocale)) -+ iSel = index; -+ if (displayName.equals("""")) -+ displayName = ResourceBundleManager.defaultLocaleTag; -+ cmbLanguage.add(displayName); -+ if (index == iSel) -+ cmbLanguage.select(iSel); -+ index++; -+ } -+ -+ if (locales.size() > 0) { -+ cmbLanguage.select(0); -+ selectedLocale = cmbLanguage.getText(); ++ MATSimSimulation.this.getHost(String.valueOf(node.getId())).listener = listener; + } -+ -+ cmbLanguage.addModifyListener(new ModifyListener() { -+ @Override -+ public void modifyText(ModifyEvent e) { -+ selectedLocale = cmbLanguage.getText(); -+ validate(); -+ } -+ }); -+ } -+ -+ protected void initLayout(Composite parent) { -+ final GridLayout layout = new GridLayout(1, true); -+ parent.setLayout(layout); -+ } + -+ protected void constructRBSection(Composite parent) { -+ final Group group = new Group (parent, SWT.SHADOW_ETCHED_IN); -+ group.setLayoutData(new GridData(GridData.FILL, GridData.CENTER, true, false, 1, 1)); -+ group.setText(""Resource Bundle""); -+ -+ // define grid data for this group -+ GridData gridData = new GridData(); -+ gridData.horizontalAlignment = SWT.FILL; -+ gridData.grabExcessHorizontalSpace = true; -+ group.setLayoutData(gridData); -+ group.setLayout(new GridLayout(2, false)); -+ -+ final Label spacer = new Label (group, SWT.NONE | SWT.LEFT); -+ spacer.setLayoutData(new GridData(GridData.BEGINNING, GridData.CENTER, false, false, 1, 1)); -+ -+ final Label infoLabel = new Label (group, SWT.NONE | SWT.LEFT); -+ infoLabel.setLayoutData(new GridData(GridData.BEGINNING, GridData.CENTER, false, false, 1, 1)); -+ infoLabel.setText(""Specify the key of the new resource as well as the Resource-Bundle in\n"" + -+ ""which the resource"" + -+ ""should be added.\n""); -+ -+ // Schl�ssel -+ final Label lblKey = new Label (group, SWT.NONE | SWT.RIGHT); -+ GridData lblKeyGrid = new GridData(GridData.END, GridData.CENTER, false, false, 1, 1); -+ lblKeyGrid.widthHint = WIDTH_LEFT_COLUMN; -+ lblKey.setLayoutData(lblKeyGrid); -+ lblKey.setText(""Key:""); -+ -+ txtKey = new Text (group, SWT.BORDER); -+ txtKey.setText(selectedKey); -+ txtKey.setEditable(selectedKey.trim().length() == 0 || selectedKey.indexOf(""[Platzhalter]"")>=0); -+ txtKey.setLayoutData(new GridData(GridData.FILL, GridData.CENTER, true, false, 1, 1)); -+ txtKey.addModifyListener(new ModifyListener() { -+ -+ @Override -+ public void modifyText(ModifyEvent e) { -+ selectedKey = txtKey.getText(); -+ validate(); -+ } -+ }); -+ -+ // Resource-Bundle -+ final Label lblRB = new Label (group, SWT.NONE); -+ lblRB.setLayoutData(new GridData(GridData.END, GridData.CENTER, false, false, 1, 1)); -+ lblRB.setText(""Resource-Bundle:""); -+ -+ cmbRB = new Combo (group, SWT.DROP_DOWN | SWT.SIMPLE); -+ cmbRB.setLayoutData(new GridData(GridData.FILL, GridData.CENTER, true, false, 1, 1)); -+ } -+ -+ protected void constructDefaultSection(Composite parent) { -+ final Group group = new Group (parent, SWT.SHADOW_ETCHED_IN); -+ group.setLayoutData(new GridData(GridData.FILL, GridData.CENTER, true, true, 1, 1)); -+ group.setText(""Default-Text""); -+ -+ // define grid data for this group -+ GridData gridData = new GridData(); -+ gridData.horizontalAlignment = SWT.FILL; -+ gridData.grabExcessHorizontalSpace = true; -+ group.setLayoutData(gridData); -+ group.setLayout(new GridLayout(2, false)); -+ -+ final Label spacer = new Label (group, SWT.NONE | SWT.LEFT); -+ spacer.setLayoutData(new GridData(GridData.BEGINNING, GridData.CENTER, false, false, 1, 1)); -+ -+ final Label infoLabel = new Label (group, SWT.NONE | SWT.LEFT); -+ infoLabel.setLayoutData(new GridData(GridData.BEGINNING, GridData.CENTER, false, false, 1, 1)); -+ infoLabel.setText(""Define a default text for the specified resource. Moreover, you need to\n"" + -+ ""select the locale for which the default text should be defined.""); -+ -+ // Text -+ final Label lblText = new Label (group, SWT.NONE | SWT.RIGHT); -+ GridData lblTextGrid = new GridData(GridData.END, GridData.CENTER, false, false, 1, 1); -+ lblTextGrid.heightHint = 80; -+ lblTextGrid.widthHint = 100; -+ lblText.setLayoutData(lblTextGrid); -+ lblText.setText(""Text:""); -+ -+ txtDefaultText = new Text (group, SWT.MULTI | SWT.BORDER); -+ txtDefaultText.setText(selectedDefaultText); -+ txtDefaultText.setLayoutData(new GridData(GridData.FILL, GridData.FILL, true, true, 1, 1)); -+ txtDefaultText.addModifyListener(new ModifyListener() { -+ -+ @Override -+ public void modifyText(ModifyEvent e) { -+ selectedDefaultText = txtDefaultText.getText(); -+ validate(); ++ public synchronized void callAt(long absoluteTime, String hostId) { ++ Callback callback = hostIdToCallback.remove(hostId); ++ if (callback != null) { ++ callbacks.remove(callback); + } -+ }); -+ -+ // Sprache -+ final Label lblLanguage = new Label (group, SWT.NONE); -+ lblLanguage.setLayoutData(new GridData(GridData.END, GridData.CENTER, false, false, 1, 1)); -+ lblLanguage.setText(""Language (Country):""); -+ -+ cmbLanguage = new Combo (group, SWT.DROP_DOWN | SWT.SIMPLE); -+ cmbLanguage.setLayoutData(new GridData(GridData.FILL, GridData.CENTER, true, false, 1, 1)); -+ } -+ -+ @Override -+ protected void okPressed() { -+ super.okPressed(); -+ // TODO debug -+ -+ // Insert new Resource-Bundle reference -+ Locale locale = LocaleUtils.getLocaleByDisplayName( manager.getProvidedLocales(selectedRB), selectedLocale); // new Locale(""""); // retrieve locale -+ -+ try { -+ manager.addResourceBundleEntry (selectedRB, selectedKey, locale, selectedDefaultText); -+ } catch (ResourceBundleException e) { -+ Logger.logError(e); ++ callback = new Callback(hostId, absoluteTime); ++ hostIdToCallback.put(hostId, callback); ++ // System.out.println(""For "" + absoluteTime); ++ callbacks.add(callback); + } + + @Override + public long getCurrentMilliseconds() { +- return MATSimSimulation.this.oldSimulation.getCurrentMilliseconds(); ++ return MATSimSimulation.this.currentMilliseconds; + } + + @Override + public void start(long duration) { +- double startTime = MATSimSimulation.this.oldSimulation.getControler().getConfig().getQSimConfigGroup() +- .getStartTime(); ++ double startTime = MATSimSimulation.this.getController().getConfig().getQSimConfigGroup().getStartTime(); + double endTime = startTime + (((double) (duration)) / 1000); +- MATSimSimulation.this.oldSimulation.getControler().getConfig().getQSimConfigGroup().setEndTime(endTime); +- MATSimSimulation.this.oldSimulation.run(); ++ MATSimSimulation.this.getController().getConfig().getQSimConfigGroup().setEndTime(endTime); ++ MATSimSimulation.this.getController().run(); + } -+ } -+ -+ @Override -+ protected void configureShell(Shell newShell) { -+ super.configureShell(newShell); -+ newShell.setText(""Create Resource-Bundle entry""); -+ } + -+ @Override -+ public void create() { -+ // TODO Auto-generated method stub -+ super.create(); -+ this.setTitle(""New Resource-Bundle entry""); -+ this.setMessage(""Please, specify details about the new Resource-Bundle entry""); -+ } -+ -+ protected void validate () { -+ // Check Resource-Bundle ids -+ boolean keyValid = false; -+ boolean keyValidChar = ResourceUtils.isValidResourceKey(selectedKey); -+ boolean rbValid = false; -+ boolean textValid = false; -+ boolean localeValid = LocaleUtils.containsLocaleByDisplayName(manager.getProvidedLocales(selectedRB), selectedLocale); -+ -+ for (String rbId : this.availableBundles) { -+ if (rbId.equals(selectedRB)) { -+ rbValid = true; -+ break; ++ @Override ++ public void at(double seconds, Mobsim mobsim) { ++ // Exchange data with MATSim ++ long milliseconds = secondsToMilliseconds(seconds); ++ matSimReceiver.setMATSimData(extractor.extractFromMATSim(listener.getAllJDEECoAgents(), mobsim)); ++ listener.updateJDEECoAgents(matSimProvider.getMATSimData()); ++ // Add callback for the MATSim step ++ callAt(milliseconds + simulationStep, SIMULATION_CALLBACK); ++ Host host; ++ Callback callback; ++ // Iterate through all the callbacks until the MATSim callback. ++ while (!callbacks.isEmpty()) { ++ callback = callbacks.pollFirst(); ++ if (callback.getHostId().equals(SIMULATION_CALLBACK)) { ++ break; ++ } ++ currentMilliseconds = callback.getAbsoluteTime(); ++ // System.out.println(""At: "" + currentMilliseconds); ++ host = hosts.get(callback.hostId); ++ host.at(millisecondsToSeconds(currentMilliseconds)); + } + } -+ -+ if (!manager.isResourceExisting(selectedRB, selectedKey)) -+ keyValid = true; -+ -+ if (selectedDefaultText.trim().length() > 0) -+ textValid = true; -+ -+ // print Validation summary -+ String errorMessage = null; -+ if (selectedKey.trim().length() == 0) -+ errorMessage = ""No resource key specified.""; -+ else if (! keyValidChar) -+ errorMessage = ""The specified resource key contains invalid characters.""; -+ else if (! keyValid) -+ errorMessage = ""The specified resource key is already existing.""; -+ else if (! rbValid) -+ errorMessage = ""The specified Resource-Bundle does not exist.""; -+ else if (! localeValid) -+ errorMessage = ""The specified Locale does not exist for the selected Resource-Bundle.""; -+ else if (! textValid) -+ errorMessage = ""No default translation specified.""; -+ else { -+ if (okButton != null) -+ okButton.setEnabled(true); -+ } -+ -+ setErrorMessage(errorMessage); -+ if (okButton != null && errorMessage != null) -+ okButton.setEnabled(false); -+ } -+ -+ @Override -+ protected void createButtonsForButtonBar(Composite parent) { -+ okButton = createButton (parent, OK, ""Ok"", true); -+ okButton.addSelectionListener (new SelectionAdapter() { -+ public void widgetSelected(SelectionEvent e) { -+ // Set return code -+ setReturnCode(OK); -+ close(); -+ } -+ }); -+ -+ cancelButton = createButton (parent, CANCEL, ""Cancel"", false); -+ cancelButton.addSelectionListener (new SelectionAdapter() { -+ public void widgetSelected(SelectionEvent e) { -+ setReturnCode (CANCEL); -+ close(); -+ } -+ }); -+ -+ okButton.setEnabled(false); -+ cancelButton.setEnabled(true); -+ } -+ -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/FragmentProjectSelectionDialog.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/FragmentProjectSelectionDialog.java -new file mode 100644 -index 00000000..3540881a ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/FragmentProjectSelectionDialog.java -@@ -0,0 +1,117 @@ -+package org.eclipselabs.tapiji.tools.core.ui.dialogs; -+ -+import java.util.ArrayList; -+import java.util.List; -+import java.util.Locale; -+import java.util.Set; -+ -+import org.eclipse.core.resources.IProject; -+import org.eclipse.jface.viewers.ILabelProvider; -+import org.eclipse.jface.viewers.ILabelProviderListener; -+import org.eclipse.jface.viewers.IStructuredContentProvider; -+import org.eclipse.jface.viewers.Viewer; -+import org.eclipse.swt.graphics.Image; -+import org.eclipse.swt.widgets.Shell; -+import org.eclipse.ui.ISharedImages; -+import org.eclipse.ui.PlatformUI; -+import org.eclipse.ui.dialogs.ListDialog; -+import org.eclipselabs.tapiji.tools.core.util.ImageUtils; -+ -+ -+public class FragmentProjectSelectionDialog extends ListDialog{ -+ private IProject hostproject; -+ private List allProjects; -+ -+ -+ public FragmentProjectSelectionDialog(Shell parent, IProject hostproject, List fragmentprojects){ -+ super(parent); -+ this.hostproject = hostproject; -+ this.allProjects = new ArrayList(fragmentprojects); -+ allProjects.add(0,hostproject); -+ -+ init(); -+ } -+ -+ private void init() { -+ this.setAddCancelButton(true); -+ this.setMessage(""Select one of the following plug-ins:""); -+ this.setTitle(""Project Selector""); -+ this.setContentProvider(new IProjectContentProvider()); -+ this.setLabelProvider(new IProjectLabelProvider()); -+ -+ this.setInput(allProjects); -+ } -+ -+ public IProject getSelectedProject() { -+ Object[] selection = this.getResult(); -+ if (selection != null && selection.length > 0) -+ return (IProject) selection[0]; -+ return null; -+ } + -+ -+ //private classes-------------------------------------------------------- -+ class IProjectContentProvider implements IStructuredContentProvider { ++ private double millisecondsToSeconds(long milliseconds) { ++ return ((double) (milliseconds)) / 1000; + } + } + +@@ -69,23 +137,165 @@ public void at(double absoluteTime) { + + } + ++ private class Callback implements Comparable { + -+ @Override -+ public Object[] getElements(Object inputElement) { -+ List resources = (List) inputElement; -+ return resources.toArray(); -+ } ++ private final long milliseconds; ++ private final String hostId; + -+ @Override -+ public void dispose() { -+ // TODO Auto-generated method stub -+ ++ public Callback(String hostId, long milliseconds) { ++ this.hostId = hostId; ++ this.milliseconds = milliseconds; + } + -+ @Override -+ public void inputChanged(Viewer viewer, Object oldInput, Object newInput) { -+ // TODO Auto-generated method stub ++ public long getAbsoluteTime() { ++ return milliseconds; + } -+ -+ } -+ -+ class IProjectLabelProvider implements ILabelProvider { + -+ @Override -+ public Image getImage(Object element) { -+ return PlatformUI.getWorkbench().getSharedImages().getImage( -+ ISharedImages.IMG_OBJ_PROJECT); ++ public String getHostId() { ++ return hostId; + } + + @Override -+ public String getText(Object element) { -+ IProject p = ((IProject) element); -+ String text = p.getName(); -+ if (p.equals(hostproject)) text += "" [host project]""; -+ else text += "" [fragment project]""; -+ return text; ++ public int compareTo(Callback c) { ++ if (c.getAbsoluteTime() < milliseconds) { ++ return 1; ++ } else if (c.getAbsoluteTime() > milliseconds) { ++ return -1; ++ } else if (this == c) { ++ return 0; ++ } else { ++ return this.hashCode() < c.hashCode() ? 1 : -1; ++ } + } + -+ @Override -+ public void addListener(ILabelProviderListener listener) { -+ // TODO Auto-generated method stub -+ ++ public String toString() { ++ return hostId + "" "" + milliseconds; + } + + @Override -+ public void dispose() { -+ // TODO Auto-generated method stub -+ ++ public int hashCode() { ++ final int prime = 31; ++ int result = 1; ++ result = prime * result + getOuterType().hashCode(); ++ result = prime * result + ((hostId == null) ? 0 : hostId.hashCode()); ++ result = prime * result + (int) (milliseconds ^ (milliseconds >>> 32)); ++ return result; + } + + @Override -+ public boolean isLabelProperty(Object element, String property) { -+ // TODO Auto-generated method stub -+ return false; ++ public boolean equals(Object obj) { ++ if (this == obj) ++ return true; ++ if (obj == null) ++ return false; ++ if (getClass() != obj.getClass()) ++ return false; ++ Callback other = (Callback) obj; ++ if (!getOuterType().equals(other.getOuterType())) ++ return false; ++ if (hostId == null) { ++ if (other.hostId != null) ++ return false; ++ } else if (!hostId.equals(other.hostId)) ++ return false; ++ if (milliseconds != other.milliseconds) ++ return false; ++ return true; + } + -+ @Override -+ public void removeListener(ILabelProviderListener listener) { -+ // TODO Auto-generated method stub -+ ++ private MATSimSimulation getOuterType() { ++ return MATSimSimulation.this; + } -+ + } -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/GenerateBundleAccessorDialog.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/GenerateBundleAccessorDialog.java -new file mode 100644 -index 00000000..389121ba ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/GenerateBundleAccessorDialog.java -@@ -0,0 +1,131 @@ -+package org.eclipselabs.tapiji.tools.core.ui.dialogs; -+ -+import org.eclipse.jface.dialogs.TitleAreaDialog; -+import org.eclipse.swt.SWT; -+import org.eclipse.swt.layout.GridData; -+import org.eclipse.swt.layout.GridLayout; -+import org.eclipse.swt.widgets.Combo; -+import org.eclipse.swt.widgets.Composite; -+import org.eclipse.swt.widgets.Control; -+import org.eclipse.swt.widgets.Group; -+import org.eclipse.swt.widgets.Label; -+import org.eclipse.swt.widgets.Shell; -+import org.eclipse.swt.widgets.Text; + -+public class GenerateBundleAccessorDialog extends TitleAreaDialog { -+ -+ private static int WIDTH_LEFT_COLUMN = 100; -+ -+ private Text bundleAccessor; -+ private Text packageName; -+ -+ public GenerateBundleAccessorDialog(Shell parentShell) { -+ super(parentShell); -+ } + private final TimerProvider timer = new TimerProvider(); +- private final cz.cuni.mff.d3s.deeco.simulation.matsim.MATSimSimulation oldSimulation; + private final JDEECoAgentSource agentSource = new JDEECoAgentSource(); + private final MATSimRouter router; + private final MATSimDataProviderReceiver matSimProviderReceiver = new MATSimDataProviderReceiver( + new LinkedList()); + ++ // private final cz.cuni.mff.d3s.deeco.simulation.matsim.MATSimSimulation oldSimulation; ++ ++ private static final String SIMULATION_CALLBACK = ""SIMULATION_CALLBACK""; ++ private long currentMilliseconds; ++ private final long simulationStep; // in milliseconds ++ private final TravelTime travelTime; ++ private final Map hostIdToCallback; ++ private final Controler controler; ++ private final JDEECoWithinDayMobsimListener listener; ++ private final MATSimDataProvider matSimProvider; ++ private final MATSimDataReceiver matSimReceiver; ++ private final Map hosts; ++ private final MATSimExtractor extractor; ++ ++ // private final Exchanger exchanger; ++ + public MATSimSimulation(File config, AdditionAwareAgentSource... additionalAgentSources) throws IOException { + List agentSources = new LinkedList<>(); + agentSources.add(agentSource); + agentSources.addAll(Arrays.asList(additionalAgentSources)); + +- oldSimulation = new cz.cuni.mff.d3s.deeco.simulation.matsim.MATSimSimulation(matSimProviderReceiver, +- matSimProviderReceiver, new DefaultMATSimUpdater(), new DefaultMATSimExtractor(), agentSources, +- config.getAbsolutePath()); ++ /* ++ * oldSimulation = new cz.cuni.mff.d3s.deeco.simulation.matsim.MATSimSimulation(matSimProviderReceiver, ++ * matSimProviderReceiver, new DefaultMATSimUpdater(), new DefaultMATSimExtractor(), agentSources, ++ * config.getAbsolutePath()); ++ */ ++ // this.exchanger = new Exchanger(); ++ ++ this.callbacks = new TreeSet<>(); ++ this.hostIdToCallback = new HashMap<>(); ++ this.hosts = new HashMap<>(); ++ ++ this.controler = new MATSimPreloadingControler(config.getAbsolutePath()); ++ this.controler.setOverwriteFiles(true); ++ this.controler.getConfig().getQSimConfigGroup().setSimStarttimeInterpretation(""onlyUseStarttime""); + +- router = new MATSimRouter(oldSimulation.getControler(), oldSimulation.getTravelTime(), 10 /* TODO: FAKE VALUE */); ++ double end = this.controler.getConfig().getQSimConfigGroup().getEndTime(); ++ double start = this.controler.getConfig().getQSimConfigGroup().getStartTime(); ++ double step = this.controler.getConfig().getQSimConfigGroup().getTimeStepSize(); ++ Log.i(""Starting simulation: matsimStartTime: "" + start + "" matsimEndTime: "" + end); ++ this.extractor = new DefaultMATSimExtractor(); ++ this.listener = new JDEECoWithinDayMobsimListener(timer, new DefaultMATSimUpdater(), extractor); ++ this.matSimProvider = (MATSimDataProvider) matSimProviderReceiver; ++ this.matSimReceiver = (MATSimDataReceiver) matSimProviderReceiver; ++ ++ Set analyzedModes = new HashSet(); ++ analyzedModes.add(TransportMode.car); ++ travelTime = new TravelTimeCollectorFactory().createTravelTimeCollector(controler.getScenario(), analyzedModes); ++ ++ controler.addControlerListener(new StartupListener() { ++ public void notifyStartup(StartupEvent event) { ++ controler.getEvents().addHandler((TravelTimeCollector) travelTime); ++ controler.getMobsimListeners().add((TravelTimeCollector) travelTime); ++ controler.setMobsimFactory(new JDEECoMobsimFactory(listener, agentSources)); ++ } ++ }); ++ /** ++ * Bind MATSim listener with the agent source. It is necessary to let the listener know about the jDEECo agents ++ * that it needs to update with data coming from a jDEECo runtime. ++ */ ++ for (AdditionAwareAgentSource source : agentSources) { ++ if (source instanceof JDEECoAgentSource) { ++ listener.registerAgentProvider((JDEECoAgentSource) source); ++ } ++ } + -+ @Override -+ protected Control createDialogArea(Composite parent) { -+ Composite dialogArea = (Composite) super.createDialogArea(parent); -+ initLayout (dialogArea); -+ constructBASection (dialogArea); -+ //constructDefaultSection (dialogArea); -+ initContent (); -+ return dialogArea; -+ } ++ this.simulationStep = secondsToMilliseconds(step); ++ currentMilliseconds = secondsToMilliseconds(controler.getConfig().getQSimConfigGroup().getStartTime()); + -+ protected void initLayout(Composite parent) { -+ final GridLayout layout = new GridLayout(1, true); -+ parent.setLayout(layout); ++ router = new MATSimRouter(getController(), travelTime, 10 /* TODO: FAKE VALUE */); + } + -+ protected void constructBASection(Composite parent) { -+ final Group group = new Group (parent, SWT.SHADOW_ETCHED_IN); -+ group.setLayoutData(new GridData(GridData.FILL, GridData.CENTER, true, false, 1, 1)); -+ group.setText(""Resource Bundle""); -+ -+ // define grid data for this group -+ GridData gridData = new GridData(); -+ gridData.horizontalAlignment = SWT.FILL; -+ gridData.grabExcessHorizontalSpace = true; -+ group.setLayoutData(gridData); -+ group.setLayout(new GridLayout(2, false)); -+ -+ final Label spacer = new Label (group, SWT.NONE | SWT.LEFT); -+ spacer.setLayoutData(new GridData(GridData.BEGINNING, GridData.CENTER, false, false, 1, 1)); -+ -+ final Label infoLabel = new Label (group, SWT.NONE | SWT.LEFT); -+ infoLabel.setLayoutData(new GridData(GridData.BEGINNING, GridData.CENTER, false, false, 1, 1)); -+ infoLabel.setText(""Diese Zeile stellt einen Platzhalter für einen kurzen Infotext dar.\nDiese Zeile stellt einen Platzhalter für einen kurzen Infotext dar.""); -+ -+ // Schlüssel -+ final Label lblBA = new Label (group, SWT.NONE | SWT.RIGHT); -+ GridData lblBAGrid = new GridData(GridData.END, GridData.CENTER, false, false, 1, 1); -+ lblBAGrid.widthHint = WIDTH_LEFT_COLUMN; -+ lblBA.setLayoutData(lblBAGrid); -+ lblBA.setText(""Class-Name:""); -+ -+ bundleAccessor = new Text (group, SWT.BORDER); -+ bundleAccessor.setLayoutData(new GridData(GridData.FILL, GridData.CENTER, true, false, 1, 1)); -+ -+ // Resource-Bundle -+ final Label lblPkg = new Label (group, SWT.NONE); -+ lblPkg.setLayoutData(new GridData(GridData.END, GridData.CENTER, false, false, 1, 1)); -+ lblPkg.setText(""Package:""); -+ -+ packageName = new Text (group, SWT.BORDER); -+ packageName.setLayoutData(new GridData(GridData.FILL, GridData.CENTER, true, false, 1, 1)); -+ } -+ -+ protected void initContent () { -+ bundleAccessor.setText(""BundleAccessor""); -+ packageName.setText(""a.b""); ++ private long secondsToMilliseconds(double seconds) { ++ return (long) (seconds * 1000); + } -+ -+ /* -+ protected void constructDefaultSection(Composite parent) { -+ final Group group = new Group (parent, SWT.SHADOW_ETCHED_IN); -+ group.setLayoutData(new GridData(GridData.FILL, GridData.CENTER, true, true, 1, 1)); -+ group.setText(""Basis-Text""); -+ -+ // define grid data for this group -+ GridData gridData = new GridData(); -+ gridData.horizontalAlignment = SWT.FILL; -+ gridData.grabExcessHorizontalSpace = true; -+ group.setLayoutData(gridData); -+ group.setLayout(new GridLayout(2, false)); -+ -+ final Label spacer = new Label (group, SWT.NONE | SWT.LEFT); -+ spacer.setLayoutData(new GridData(GridData.BEGINNING, GridData.CENTER, false, false, 1, 1)); -+ -+ final Label infoLabel = new Label (group, SWT.NONE | SWT.LEFT); -+ infoLabel.setLayoutData(new GridData(GridData.BEGINNING, GridData.CENTER, false, false, 1, 1)); -+ infoLabel.setText(""Diese Zeile stellt einen Platzhalter für einen kurzen Infotext dar.\nDiese Zeile stellt einen Platzhalter für einen kurzen Infotext dar.""); -+ -+ // Text -+ final Label lblText = new Label (group, SWT.NONE | SWT.RIGHT); -+ GridData lblTextGrid = new GridData(GridData.END, GridData.CENTER, false, false, 1, 1); -+ lblTextGrid.heightHint = 80; -+ lblTextGrid.widthHint = 100; -+ lblText.setLayoutData(lblTextGrid); -+ lblText.setText(""Text:""); -+ -+ txtDefaultText = new Text (group, SWT.MULTI | SWT.BORDER); -+ txtDefaultText.setLayoutData(new GridData(GridData.FILL, GridData.FILL, true, true, 1, 1)); -+ -+ // Sprache -+ final Label lblLanguage = new Label (group, SWT.NONE); -+ lblLanguage.setLayoutData(new GridData(GridData.END, GridData.CENTER, false, false, 1, 1)); -+ lblLanguage.setText(""Sprache (Land):""); -+ -+ cmbLanguage = new Combo (group, SWT.DROP_DOWN | SWT.SIMPLE); -+ cmbLanguage.setLayoutData(new GridData(GridData.FILL, GridData.CENTER, true, false, 1, 1)); -+ } */ + -+ @Override -+ protected void configureShell(Shell newShell) { -+ super.configureShell(newShell); -+ newShell.setText(""Create Resource-Bundle Accessor""); ++ public void addHost(String id, cz.cuni.mff.d3s.jdeeco.matsim.MATSimSimulation.Host host) { ++ hosts.put(id, host); + } -+ -+ -+} -\ No newline at end of file -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/QueryResourceBundleEntryDialog.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/QueryResourceBundleEntryDialog.java -new file mode 100644 -index 00000000..304cf6e8 ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/QueryResourceBundleEntryDialog.java -@@ -0,0 +1,425 @@ -+package org.eclipselabs.tapiji.tools.core.ui.dialogs; -+ -+import java.util.Collection; -+import java.util.Iterator; -+import java.util.Locale; -+import java.util.Set; -+ -+import org.eclipse.jface.dialogs.TitleAreaDialog; -+import org.eclipse.swt.SWT; -+import org.eclipse.swt.events.ModifyEvent; -+import org.eclipse.swt.events.ModifyListener; -+import org.eclipse.swt.events.SelectionAdapter; -+import org.eclipse.swt.events.SelectionEvent; -+import org.eclipse.swt.events.SelectionListener; -+import org.eclipse.swt.layout.GridData; -+import org.eclipse.swt.layout.GridLayout; -+import org.eclipse.swt.widgets.Button; -+import org.eclipse.swt.widgets.Combo; -+import org.eclipse.swt.widgets.Composite; -+import org.eclipse.swt.widgets.Control; -+import org.eclipse.swt.widgets.Group; -+import org.eclipse.swt.widgets.Label; -+import org.eclipse.swt.widgets.Shell; -+import org.eclipse.swt.widgets.Text; -+import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; -+import org.eclipselabs.tapiji.tools.core.ui.widgets.ResourceSelector; -+import org.eclipselabs.tapiji.tools.core.ui.widgets.event.ResourceSelectionEvent; -+import org.eclipselabs.tapiji.tools.core.ui.widgets.listener.IResourceSelectionListener; -+import org.eclipselabs.tapiji.tools.core.util.LocaleUtils; + ++ public Host getHost(String id) { ++ return hosts.get(id); + } + + public SimulationTimer getTimer() { +@@ -95,9 +305,9 @@ public SimulationTimer getTimer() { + public MATSimRouter getRouter() { + return router; + } +- + -+public class QueryResourceBundleEntryDialog extends TitleAreaDialog { -+ -+ private static int WIDTH_LEFT_COLUMN = 100; -+ private static int SEARCH_FULLTEXT = 0; -+ private static int SEARCH_KEY = 1; -+ -+ private ResourceBundleManager manager; -+ private Collection availableBundles; -+ private int searchOption = SEARCH_FULLTEXT; -+ private String resourceBundle = """"; -+ -+ private Combo cmbRB; -+ -+ private Text txtKey; -+ private Button btSearchText; -+ private Button btSearchKey; -+ private Combo cmbLanguage; -+ private ResourceSelector resourceSelector; -+ private Text txtPreviewText; -+ -+ private Button okButton; -+ private Button cancelButton; -+ -+ /*** DIALOG MODEL ***/ -+ private String selectedRB = """"; -+ private String preselectedRB = """"; -+ private Locale selectedLocale = null; -+ private String selectedKey = """"; -+ -+ -+ public QueryResourceBundleEntryDialog(Shell parentShell, ResourceBundleManager manager, String bundleName) { -+ super(parentShell); -+ this.manager = manager; -+ // init available resource bundles -+ this.availableBundles = manager.getResourceBundleNames(); -+ this.preselectedRB = bundleName; -+ } -+ -+ @Override -+ protected Control createDialogArea(Composite parent) { -+ Composite dialogArea = (Composite) super.createDialogArea(parent); -+ initLayout (dialogArea); -+ constructSearchSection (dialogArea); -+ initContent (); -+ return dialogArea; -+ } -+ -+ protected void initContent() { -+ // init available resource bundles -+ cmbRB.removeAll(); -+ int i = 0; -+ for (String bundle : availableBundles) { -+ cmbRB.add(bundle); -+ if (bundle.equals(preselectedRB)) { -+ cmbRB.select(i); -+ cmbRB.setEnabled(false); -+ } -+ i++; -+ } -+ -+ if (availableBundles.size() > 0) { -+ if (preselectedRB.trim().length() == 0) { -+ cmbRB.select(0); -+ cmbRB.setEnabled(true); -+ } -+ } -+ -+ cmbRB.addSelectionListener(new SelectionListener() { -+ -+ @Override -+ public void widgetSelected(SelectionEvent e) { -+ //updateAvailableLanguages(); -+ updateResourceSelector (); -+ } -+ -+ @Override -+ public void widgetDefaultSelected(SelectionEvent e) { -+ //updateAvailableLanguages(); -+ updateResourceSelector (); -+ } -+ }); -+ -+ // init available translations -+ //updateAvailableLanguages(); -+ -+ // init resource selector -+ updateResourceSelector(); -+ -+ // update search options -+ updateSearchOptions(); -+ } -+ -+ protected void updateResourceSelector () { -+ resourceBundle = cmbRB.getText(); -+ resourceSelector.setResourceBundle(resourceBundle); -+ } -+ -+ protected void updateSearchOptions () { -+ searchOption = (btSearchKey.getSelection() ? SEARCH_KEY : SEARCH_FULLTEXT); -+// cmbLanguage.setEnabled(searchOption == SEARCH_FULLTEXT); -+// lblLanguage.setEnabled(cmbLanguage.getEnabled()); -+ -+ // update ResourceSelector -+ resourceSelector.setDisplayMode(searchOption == SEARCH_FULLTEXT ? ResourceSelector.DISPLAY_TEXT : ResourceSelector.DISPLAY_KEYS); -+ } -+ -+ protected void updateAvailableLanguages () { -+ cmbLanguage.removeAll(); -+ String selectedBundle = cmbRB.getText(); -+ -+ if (selectedBundle.trim().equals("""")) -+ return; -+ -+ // Retrieve available locales for the selected resource-bundle -+ Set locales = manager.getProvidedLocales(selectedBundle); -+ for (Locale l : locales) { -+ String displayName = l.getDisplayName(); -+ if (displayName.equals("""")) -+ displayName = ResourceBundleManager.defaultLocaleTag; -+ cmbLanguage.add(displayName); -+ } -+ -+// if (locales.size() > 0) { -+// cmbLanguage.select(0); -+ updateSelectedLocale(); -+// } -+ } -+ -+ protected void updateSelectedLocale () { -+ String selectedBundle = cmbRB.getText(); -+ -+ if (selectedBundle.trim().equals("""")) -+ return; -+ -+ Set locales = manager.getProvidedLocales(selectedBundle); -+ Iterator it = locales.iterator(); -+ String selectedLocale = cmbLanguage.getText(); -+ while (it.hasNext()) { -+ Locale l = it.next(); -+ if (l.getDisplayName().equals(selectedLocale)) { -+ resourceSelector.setDisplayLocale(l); -+ break; -+ } -+ } -+ } -+ -+ protected void initLayout(Composite parent) { -+ final GridLayout layout = new GridLayout(1, true); -+ parent.setLayout(layout); -+ } -+ -+ protected void constructSearchSection (Composite parent) { -+ final Group group = new Group (parent, SWT.NONE); -+ group.setLayoutData(new GridData(GridData.FILL, GridData.CENTER, true, false, 1, 1)); -+ group.setText(""Resource selection""); -+ -+ // define grid data for this group -+ GridData gridData = new GridData(); -+ gridData.horizontalAlignment = SWT.FILL; -+ gridData.grabExcessHorizontalSpace = true; -+ group.setLayoutData(gridData); -+ group.setLayout(new GridLayout(2, false)); -+ // TODO export as help text -+ -+ final Label spacer = new Label (group, SWT.NONE | SWT.LEFT); -+ spacer.setLayoutData(new GridData(GridData.BEGINNING, GridData.CENTER, false, false, 1, 1)); -+ -+ final Label infoLabel = new Label (group, SWT.NONE | SWT.LEFT); -+ GridData infoGrid = new GridData(GridData.BEGINNING, GridData.BEGINNING, false, false, 1, 1); -+ infoGrid.heightHint = 70; -+ infoLabel.setLayoutData(infoGrid); -+ infoLabel.setText(""Select the resource that needs to be refrenced. This is achieved in two\n"" + -+ ""steps. First select the Resource-Bundle in which the resource is located. \n"" + -+ ""In a last step you need to choose the required resource.""); -+ -+ // Resource-Bundle -+ final Label lblRB = new Label (group, SWT.NONE); -+ lblRB.setLayoutData(new GridData(GridData.END, GridData.CENTER, false, false, 1, 1)); -+ lblRB.setText(""Resource-Bundle:""); -+ -+ cmbRB = new Combo (group, SWT.DROP_DOWN | SWT.SIMPLE); -+ cmbRB.setLayoutData(new GridData(GridData.FILL, GridData.CENTER, true, false, 1, 1)); -+ cmbRB.addModifyListener(new ModifyListener() { -+ @Override -+ public void modifyText(ModifyEvent e) { -+ selectedRB = cmbRB.getText(); -+ validate(); -+ } -+ }); -+ -+ // Search-Options -+ final Label spacer2 = new Label (group, SWT.NONE | SWT.LEFT); -+ spacer2.setLayoutData(new GridData(GridData.BEGINNING, GridData.CENTER, false, false, 1, 1)); -+ -+ Composite searchOptions = new Composite(group, SWT.NONE); -+ searchOptions.setLayout(new GridLayout (2, true)); -+ -+ btSearchText = new Button (searchOptions, SWT.RADIO); -+ btSearchText.setText(""Full-text""); -+ btSearchText.setSelection(searchOption == SEARCH_FULLTEXT); -+ btSearchText.addSelectionListener(new SelectionListener() { -+ -+ @Override -+ public void widgetSelected(SelectionEvent e) { -+ updateSearchOptions(); -+ } -+ -+ @Override -+ public void widgetDefaultSelected(SelectionEvent e) { -+ updateSearchOptions(); -+ } -+ }); -+ -+ btSearchKey = new Button (searchOptions, SWT.RADIO); -+ btSearchKey.setText(""Key""); -+ btSearchKey.setSelection(searchOption == SEARCH_KEY); -+ btSearchKey.addSelectionListener(new SelectionListener() { -+ -+ @Override -+ public void widgetSelected(SelectionEvent e) { -+ updateSearchOptions(); -+ } -+ -+ @Override -+ public void widgetDefaultSelected(SelectionEvent e) { -+ updateSearchOptions(); -+ } -+ }); -+ -+ // Sprache -+// lblLanguage = new Label (group, SWT.NONE); -+// lblLanguage.setLayoutData(new GridData(GridData.END, GridData.CENTER, false, false, 1, 1)); -+// lblLanguage.setText(""Language (Country):""); -+// -+// cmbLanguage = new Combo (group, SWT.DROP_DOWN | SWT.SIMPLE); -+// cmbLanguage.setLayoutData(new GridData(GridData.FILL, GridData.CENTER, true, false, 1, 1)); -+// cmbLanguage.addSelectionListener(new SelectionListener () { -+// -+// @Override -+// public void widgetDefaultSelected(SelectionEvent e) { -+// updateSelectedLocale(); -+// } -+// -+// @Override -+// public void widgetSelected(SelectionEvent e) { -+// updateSelectedLocale(); -+// } -+// -+// }); -+// cmbLanguage.addModifyListener(new ModifyListener() { -+// @Override -+// public void modifyText(ModifyEvent e) { -+// selectedLocale = LocaleUtils.getLocaleByDisplayName(manager.getProvidedLocales(selectedRB), cmbLanguage.getText()); -+// validate(); -+// } -+// }); -+ -+ // Filter -+ final Label lblKey = new Label (group, SWT.NONE | SWT.RIGHT); -+ GridData lblKeyGrid = new GridData(GridData.END, GridData.CENTER, false, false, 1, 1); -+ lblKeyGrid.widthHint = WIDTH_LEFT_COLUMN; -+ lblKey.setLayoutData(lblKeyGrid); -+ lblKey.setText(""Filter:""); -+ -+ txtKey = new Text (group, SWT.BORDER); -+ txtKey.setLayoutData(new GridData(GridData.FILL, GridData.CENTER, true, false, 1, 1)); -+ -+ // Add selector for property keys -+ final Label lblKeys = new Label (group, SWT.NONE); -+ lblKeys.setLayoutData(new GridData(GridData.END, GridData.BEGINNING, false, false, 1, 1)); -+ lblKeys.setText(""Resource:""); -+ -+ resourceSelector = new ResourceSelector (group, SWT.NONE, manager, cmbRB.getText(), searchOption, null, true); -+ GridData resourceSelectionData = new GridData(GridData.FILL, GridData.CENTER, true, false, 1, 1); -+ resourceSelectionData.heightHint = 150; -+ resourceSelectionData.widthHint = 400; -+ resourceSelector.setLayoutData(resourceSelectionData); -+ resourceSelector.addSelectionChangedListener(new IResourceSelectionListener() { -+ -+ @Override -+ public void selectionChanged(ResourceSelectionEvent e) { -+ selectedKey = e.getSelectedKey(); -+ updatePreviewLabel(e.getSelectionSummary()); -+ validate(); -+ } -+ }); -+ -+// final Label spacer = new Label (group, SWT.SEPARATOR | SWT.HORIZONTAL); -+// spacer.setLayoutData(new GridData(GridData.BEGINNING, GridData.CENTER, true, false, 2, 1)); -+ -+ // Preview -+ final Label lblText = new Label (group, SWT.NONE | SWT.RIGHT); -+ GridData lblTextGrid = new GridData(GridData.END, GridData.CENTER, false, false, 1, 1); -+ lblTextGrid.heightHint = 120; -+ lblTextGrid.widthHint = 100; -+ lblText.setLayoutData(lblTextGrid); -+ lblText.setText(""Preview:""); -+ -+ txtPreviewText = new Text (group, SWT.BORDER | SWT.MULTI | SWT.V_SCROLL); -+ txtPreviewText.setEditable(false); -+ GridData lblTextGrid2 = new GridData(GridData.FILL, GridData.FILL, true, true, 1, 1); -+ txtPreviewText.setLayoutData(lblTextGrid2); -+ } + public Controler getController() { +- return oldSimulation.getControler(); ++ return controler; + } + + public MATSimDataProviderReceiver getMATSimProviderReceiver() { +@@ -116,8 +326,8 @@ public List> getDependencies() { + + @Override + public void init(DEECoContainer container) { +- Host host = new Host(String.valueOf(container.getId()), oldSimulation); ++ Host host = new Host(String.valueOf(container.getId()), getTimer()); + +- oldSimulation.addHost(String.valueOf(container.getId()), host); ++ addHost(String.valueOf(container.getId()), host); + } + } +diff --git a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/roadtrains/MATSimDataProviderReceiver.java b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/roadtrains/MATSimDataProviderReceiver.java +index 205cbe3b7..c7047fde2 100644 +--- a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/roadtrains/MATSimDataProviderReceiver.java ++++ b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/roadtrains/MATSimDataProviderReceiver.java +@@ -8,10 +8,11 @@ + import org.matsim.api.core.v01.Id; + import org.matsim.core.basic.v01.IdImpl; + +-import cz.cuni.mff.d3s.deeco.simulation.matsim.MATSimDataProvider; +-import cz.cuni.mff.d3s.deeco.simulation.matsim.MATSimDataReceiver; +-import cz.cuni.mff.d3s.deeco.simulation.matsim.MATSimInput; +-import cz.cuni.mff.d3s.deeco.simulation.matsim.MATSimOutput; ++import cz.cuni.mff.d3s.jdeeco.matsim.old.simulation.MATSimDataProvider; ++import cz.cuni.mff.d3s.jdeeco.matsim.old.simulation.MATSimDataReceiver; ++import cz.cuni.mff.d3s.jdeeco.matsim.old.simulation.MATSimInput; ++import cz.cuni.mff.d3s.jdeeco.matsim.old.simulation.MATSimOutput; ++ + + public class MATSimDataProviderReceiver implements MATSimDataReceiver, + MATSimDataProvider { +diff --git a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/AdditionAwareAgentSource.java b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/AdditionAwareAgentSource.java +similarity index 90% +rename from jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/AdditionAwareAgentSource.java +rename to jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/AdditionAwareAgentSource.java +index 9dcd57c46..f5e1086a5 100644 +--- a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/AdditionAwareAgentSource.java ++++ b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/AdditionAwareAgentSource.java +@@ -1,4 +1,4 @@ +-package cz.cuni.mff.d3s.deeco.simulation.matsim; ++package cz.cuni.mff.d3s.jdeeco.matsim.old.simulation; + + import org.matsim.core.mobsim.framework.AgentSource; + import org.matsim.core.mobsim.qsim.QSim; +diff --git a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/DefaultMATSimExtractor.java b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/DefaultMATSimExtractor.java +similarity index 91% +rename from jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/DefaultMATSimExtractor.java +rename to jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/DefaultMATSimExtractor.java +index 6f38f5bff..c12919b95 100644 +--- a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/DefaultMATSimExtractor.java ++++ b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/DefaultMATSimExtractor.java +@@ -1,4 +1,4 @@ +-package cz.cuni.mff.d3s.deeco.simulation.matsim; ++package cz.cuni.mff.d3s.jdeeco.matsim.old.simulation; + + import java.util.Collection; + import java.util.HashMap; +diff --git a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/DefaultMATSimUpdater.java b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/DefaultMATSimUpdater.java +similarity index 91% +rename from jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/DefaultMATSimUpdater.java +rename to jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/DefaultMATSimUpdater.java +index a82f18c09..bbfb9844b 100644 +--- a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/DefaultMATSimUpdater.java ++++ b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/DefaultMATSimUpdater.java +@@ -1,4 +1,4 @@ +-package cz.cuni.mff.d3s.deeco.simulation.matsim; ++package cz.cuni.mff.d3s.jdeeco.matsim.old.simulation; + + import java.util.Collection; + import java.util.Map; +diff --git a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/JDEECoAgent.java b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/JDEECoAgent.java +similarity index 99% +rename from jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/JDEECoAgent.java +rename to jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/JDEECoAgent.java +index 6daade023..c5875fb3f 100644 +--- a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/JDEECoAgent.java ++++ b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/JDEECoAgent.java +@@ -1,4 +1,4 @@ +-package cz.cuni.mff.d3s.deeco.simulation.matsim; ++package cz.cuni.mff.d3s.jdeeco.matsim.old.simulation; + + import java.util.LinkedList; + import java.util.List; +diff --git a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/JDEECoAgentProvider.java b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/JDEECoAgentProvider.java +similarity index 84% +rename from jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/JDEECoAgentProvider.java +rename to jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/JDEECoAgentProvider.java +index bc64756ac..ca3f768da 100644 +--- a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/JDEECoAgentProvider.java ++++ b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/JDEECoAgentProvider.java +@@ -1,4 +1,4 @@ +-package cz.cuni.mff.d3s.deeco.simulation.matsim; ++package cz.cuni.mff.d3s.jdeeco.matsim.old.simulation; + + import java.util.Collection; + +diff --git a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/JDEECoAgentSource.java b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/JDEECoAgentSource.java +similarity index 96% +rename from jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/JDEECoAgentSource.java +rename to jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/JDEECoAgentSource.java +index 5b18954af..f21290b8c 100644 +--- a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/JDEECoAgentSource.java ++++ b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/JDEECoAgentSource.java +@@ -1,4 +1,4 @@ +-package cz.cuni.mff.d3s.deeco.simulation.matsim; ++package cz.cuni.mff.d3s.jdeeco.matsim.old.simulation; + + import java.util.Collection; + import java.util.LinkedList; +diff --git a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/JDEECoMobsimFactory.java b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/JDEECoMobsimFactory.java +similarity index 97% +rename from jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/JDEECoMobsimFactory.java +rename to jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/JDEECoMobsimFactory.java +index a1c03bd98..2ca34fbb9 100644 +--- a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/JDEECoMobsimFactory.java ++++ b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/JDEECoMobsimFactory.java +@@ -1,4 +1,4 @@ +-package cz.cuni.mff.d3s.deeco.simulation.matsim; ++package cz.cuni.mff.d3s.jdeeco.matsim.old.simulation; + + import java.util.Collection; + +diff --git a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/JDEECoWithinDayMobsimListener.java b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/JDEECoWithinDayMobsimListener.java +similarity index 98% +rename from jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/JDEECoWithinDayMobsimListener.java +rename to jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/JDEECoWithinDayMobsimListener.java +index c5cf1e25b..0f3f0cffb 100644 +--- a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/JDEECoWithinDayMobsimListener.java ++++ b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/JDEECoWithinDayMobsimListener.java +@@ -1,4 +1,4 @@ +-package cz.cuni.mff.d3s.deeco.simulation.matsim; ++package cz.cuni.mff.d3s.jdeeco.matsim.old.simulation; + + import java.util.Collection; + import java.util.LinkedList; +diff --git a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimDataProvider.java b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimDataProvider.java +similarity index 83% +rename from jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimDataProvider.java +rename to jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimDataProvider.java +index 2f0479567..cdf574239 100644 +--- a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimDataProvider.java ++++ b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimDataProvider.java +@@ -1,4 +1,4 @@ +-package cz.cuni.mff.d3s.deeco.simulation.matsim; ++package cz.cuni.mff.d3s.jdeeco.matsim.old.simulation; + + import java.util.Map; + +diff --git a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimDataReceiver.java b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimDataReceiver.java +similarity index 80% +rename from jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimDataReceiver.java +rename to jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimDataReceiver.java +index c8485854e..0536552fc 100644 +--- a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimDataReceiver.java ++++ b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimDataReceiver.java +@@ -1,4 +1,4 @@ +-package cz.cuni.mff.d3s.deeco.simulation.matsim; ++package cz.cuni.mff.d3s.jdeeco.matsim.old.simulation; + + + /** +diff --git a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimExtractor.java b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimExtractor.java +similarity index 78% +rename from jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimExtractor.java +rename to jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimExtractor.java +index ab6a9828d..8aa7b655d 100644 +--- a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimExtractor.java ++++ b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimExtractor.java +@@ -1,4 +1,4 @@ +-package cz.cuni.mff.d3s.deeco.simulation.matsim; ++package cz.cuni.mff.d3s.jdeeco.matsim.old.simulation; + + import java.util.Collection; + +diff --git a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimInput.java b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimInput.java +similarity index 91% +rename from jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimInput.java +rename to jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimInput.java +index e8c21f89f..d3f3f1b8e 100644 +--- a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimInput.java ++++ b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimInput.java +@@ -1,4 +1,4 @@ +-package cz.cuni.mff.d3s.deeco.simulation.matsim; ++package cz.cuni.mff.d3s.jdeeco.matsim.old.simulation; + + import java.util.LinkedList; + import java.util.List; +diff --git a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimOMNetCoordinatesTranslator.java b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimOMNetCoordinatesTranslator.java +similarity index 96% +rename from jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimOMNetCoordinatesTranslator.java +rename to jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimOMNetCoordinatesTranslator.java +index cc7664c2b..a3059ae5c 100644 +--- a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimOMNetCoordinatesTranslator.java ++++ b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimOMNetCoordinatesTranslator.java +@@ -1,4 +1,4 @@ +-package cz.cuni.mff.d3s.deeco.simulation.matsim; ++package cz.cuni.mff.d3s.jdeeco.matsim.old.simulation; + + import org.matsim.api.core.v01.Coord; + import org.matsim.api.core.v01.network.Network; +diff --git a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimOutput.java b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimOutput.java +similarity index 88% +rename from jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimOutput.java +rename to jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimOutput.java +index bbdc67ae5..db4169079 100644 +--- a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimOutput.java ++++ b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimOutput.java +@@ -1,4 +1,4 @@ +-package cz.cuni.mff.d3s.deeco.simulation.matsim; ++package cz.cuni.mff.d3s.jdeeco.matsim.old.simulation; + + import java.util.List; + +diff --git a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimPopulationAgentSource.java b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimPopulationAgentSource.java +similarity index 98% +rename from jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimPopulationAgentSource.java +rename to jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimPopulationAgentSource.java +index 2f0bca14d..a79186398 100644 +--- a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimPopulationAgentSource.java ++++ b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimPopulationAgentSource.java +@@ -1,4 +1,4 @@ +-package cz.cuni.mff.d3s.deeco.simulation.matsim; ++package cz.cuni.mff.d3s.jdeeco.matsim.old.simulation; + + import java.util.Collection; + import java.util.HashMap; +diff --git a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimPreloadingControler.java b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimPreloadingControler.java +similarity index 93% +rename from jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimPreloadingControler.java +rename to jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimPreloadingControler.java +index c01dcfcd5..00be66f28 100644 +--- a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimPreloadingControler.java ++++ b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimPreloadingControler.java +@@ -1,4 +1,4 @@ +-package cz.cuni.mff.d3s.deeco.simulation.matsim; ++package cz.cuni.mff.d3s.jdeeco.matsim.old.simulation; + + import org.matsim.core.controler.Controler; + import org.matsim.core.scenario.ScenarioImpl; +diff --git a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimRouter.java b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimRouter.java +similarity index 99% +rename from jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimRouter.java +rename to jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimRouter.java +index 2cee0d5e3..d49692255 100644 +--- a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimRouter.java ++++ b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimRouter.java +@@ -1,4 +1,4 @@ +-package cz.cuni.mff.d3s.deeco.simulation.matsim; ++package cz.cuni.mff.d3s.jdeeco.matsim.old.simulation; + + import java.util.Collection; + import java.util.LinkedList; +diff --git a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimSimulationStepListener.java b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimSimulationStepListener.java +similarity index 73% +rename from jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimSimulationStepListener.java +rename to jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimSimulationStepListener.java +index c00c4412b..076df9048 100644 +--- a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimSimulationStepListener.java ++++ b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimSimulationStepListener.java +@@ -1,4 +1,4 @@ +-package cz.cuni.mff.d3s.deeco.simulation.matsim; ++package cz.cuni.mff.d3s.jdeeco.matsim.old.simulation; + + import org.matsim.core.mobsim.framework.Mobsim; + +diff --git a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimUpdater.java b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimUpdater.java +similarity index 72% +rename from jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimUpdater.java +rename to jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimUpdater.java +index 5c63f650e..7ace9385c 100644 +--- a/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/deeco/simulation/matsim/MATSimUpdater.java ++++ b/jdeeco-matsim-plugin/src/cz/cuni/mff/d3s/jdeeco/matsim/old/simulation/MATSimUpdater.java +@@ -1,4 +1,4 @@ +-package cz.cuni.mff.d3s.deeco.simulation.matsim; ++package cz.cuni.mff.d3s.jdeeco.matsim.old.simulation; + + import java.util.Collection; + +diff --git a/jdeeco-matsim-plugin/test/cz/cuni/mff/d3s/jdeeco/matsim/demo/convoy/Vehicle.java b/jdeeco-matsim-plugin/test/cz/cuni/mff/d3s/jdeeco/matsim/demo/convoy/Vehicle.java +index 7d6bada36..400c20e51 100644 +--- a/jdeeco-matsim-plugin/test/cz/cuni/mff/d3s/jdeeco/matsim/demo/convoy/Vehicle.java ++++ b/jdeeco-matsim-plugin/test/cz/cuni/mff/d3s/jdeeco/matsim/demo/convoy/Vehicle.java +@@ -13,7 +13,6 @@ + import cz.cuni.mff.d3s.deeco.annotations.PeriodicScheduling; + import cz.cuni.mff.d3s.deeco.annotations.Process; + import cz.cuni.mff.d3s.deeco.logging.Log; +-import cz.cuni.mff.d3s.deeco.simulation.matsim.MATSimRouter; + import cz.cuni.mff.d3s.deeco.task.ParamHolder; + import cz.cuni.mff.d3s.deeco.timer.CurrentTimeProvider; + import cz.cuni.mff.d3s.jdeeco.matsim.MATSimVehicle; +@@ -21,6 +20,7 @@ + import cz.cuni.mff.d3s.jdeeco.matsim.old.roadtrains.ActuatorType; + import cz.cuni.mff.d3s.jdeeco.matsim.old.roadtrains.Sensor; + import cz.cuni.mff.d3s.jdeeco.matsim.old.roadtrains.SensorType; ++import cz.cuni.mff.d3s.jdeeco.matsim.old.simulation.MATSimRouter; + + @Component + public class Vehicle {" +6d7a01a1ea85e77f7f323604ea7ba104dc0944f3,ning$compress,"Overload of factory methods and constructors in Encoders and Streams, to allow specifying a concrete BufferRecycler instance, as an alternative to the default ThreadLocal soft-references policy. +The change does not break compatibility with existing API, but adds flexibility for resource-eficiency if used from pools (different threads could reuse the same BufferRecycler instances, avoiding the creation of instances on a per Thread basis) and some minor performance gain in preexistent LZFOutputStream constructors (because ThreadLocal is only accessed once).",p,https://github.com/ning/compress,"diff --git a/src/main/java/com/ning/compress/gzip/GZIPUncompressor.java b/src/main/java/com/ning/compress/gzip/GZIPUncompressor.java +index 7f357a4..0338522 100644 +--- a/src/main/java/com/ning/compress/gzip/GZIPUncompressor.java ++++ b/src/main/java/com/ning/compress/gzip/GZIPUncompressor.java +@@ -171,17 +171,22 @@ public class GZIPUncompressor extends Uncompressor + + public GZIPUncompressor(DataHandler h) + { +- this(h, DEFAULT_CHUNK_SIZE); ++ this(h, DEFAULT_CHUNK_SIZE, BufferRecycler.instance(), GZIPRecycler.instance()); + } + + public GZIPUncompressor(DataHandler h, int inputChunkLength) ++ { ++ this(h, inputChunkLength, BufferRecycler.instance(), GZIPRecycler.instance()); ++ } + -+ @Override -+ protected void configureShell(Shell newShell) { -+ super.configureShell(newShell); -+ newShell.setText(""Insert Resource-Bundle-Reference""); -+ } ++ public GZIPUncompressor(DataHandler h, int inputChunkLength, BufferRecycler bufferRecycler, GZIPRecycler gzipRecycler) + { + _inputChunkLength = inputChunkLength; + _handler = h; +- _recycler = BufferRecycler.instance(); +- _decodeBuffer = _recycler.allocDecodeBuffer(DECODE_BUFFER_SIZE); +- _gzipRecycler = GZIPRecycler.instance(); +- _inflater = _gzipRecycler.allocInflater(); ++ _recycler = bufferRecycler; ++ _decodeBuffer = bufferRecycler.allocDecodeBuffer(DECODE_BUFFER_SIZE); ++ _gzipRecycler = gzipRecycler; ++ _inflater = gzipRecycler.allocInflater(); + _crc = new CRC32(); + } + +diff --git a/src/main/java/com/ning/compress/gzip/OptimizedGZIPInputStream.java b/src/main/java/com/ning/compress/gzip/OptimizedGZIPInputStream.java +index 80024dd..bec84e3 100644 +--- a/src/main/java/com/ning/compress/gzip/OptimizedGZIPInputStream.java ++++ b/src/main/java/com/ning/compress/gzip/OptimizedGZIPInputStream.java +@@ -77,15 +77,20 @@ enum State { + */ + + public OptimizedGZIPInputStream(InputStream in) throws IOException ++ { ++ this(in, BufferRecycler.instance(), GZIPRecycler.instance()); ++ } ++ ++ public OptimizedGZIPInputStream(InputStream in, BufferRecycler bufferRecycler, GZIPRecycler gzipRecycler) throws IOException + { + super(); +- _bufferRecycler = BufferRecycler.instance(); +- _gzipRecycler = GZIPRecycler.instance(); ++ _bufferRecycler = bufferRecycler; ++ _gzipRecycler = gzipRecycler; + _rawInput = in; +- _buffer = _bufferRecycler.allocInputBuffer(INPUT_BUFFER_SIZE); ++ _buffer = bufferRecycler.allocInputBuffer(INPUT_BUFFER_SIZE); + + _bufferPtr = _bufferEnd = 0; +- _inflater = _gzipRecycler.allocInflater(); ++ _inflater = gzipRecycler.allocInflater(); + _crc = new CRC32(); + + // And then need to process header... +diff --git a/src/main/java/com/ning/compress/lzf/ChunkEncoder.java b/src/main/java/com/ning/compress/lzf/ChunkEncoder.java +index 799864d..6c5df0a 100644 +--- a/src/main/java/com/ning/compress/lzf/ChunkEncoder.java ++++ b/src/main/java/com/ning/compress/lzf/ChunkEncoder.java +@@ -75,34 +75,56 @@ public abstract class ChunkEncoder + protected byte[] _headerBuffer; + + /** ++ * Uses a ThreadLocal soft-referenced BufferRecycler instance. ++ * + * @param totalLength Total encoded length; used for calculating size + * of hash table to use + */ + protected ChunkEncoder(int totalLength) ++ { ++ this(totalLength, BufferRecycler.instance()); ++ } ++ ++ /** ++ * @param totalLength Total encoded length; used for calculating size ++ * of hash table to use ++ * @param bufferRecycler Buffer recycler instance, for usages where the ++ * caller manages the recycler instances ++ */ ++ protected ChunkEncoder(int totalLength, BufferRecycler bufferRecycler) + { + // Need room for at most a single full chunk + int largestChunkLen = Math.min(totalLength, LZFChunk.MAX_CHUNK_LEN); + int suggestedHashLen = calcHashLen(largestChunkLen); +- _recycler = BufferRecycler.instance(); +- _hashTable = _recycler.allocEncodingHash(suggestedHashLen); ++ _recycler = bufferRecycler; ++ _hashTable = bufferRecycler.allocEncodingHash(suggestedHashLen); + _hashModulo = _hashTable.length - 1; + // Ok, then, what's the worst case output buffer length? + // length indicator for each 32 literals, so: + // 21-Feb-2013, tatu: Plus we want to prepend chunk header in place: + int bufferLen = largestChunkLen + ((largestChunkLen + 31) >> 5) + LZFChunk.MAX_HEADER_LEN; +- _encodeBuffer = _recycler.allocEncodingBuffer(bufferLen); ++ _encodeBuffer = bufferRecycler.allocEncodingBuffer(bufferLen); + } +- + -+ @Override -+ public void create() { -+ // TODO Auto-generated method stub -+ super.create(); -+ this.setTitle(""Reference a Resource""); -+ this.setMessage(""Please, specify details about the required Resource-Bundle reference""); -+ } + /** + * Alternate constructor used when we want to avoid allocation encoding + * buffer, in cases where caller wants full control over allocations. + */ + protected ChunkEncoder(int totalLength, boolean bogus) ++ { ++ this(totalLength, BufferRecycler.instance(), bogus); ++ } + -+ protected void updatePreviewLabel (String previewText) { -+ txtPreviewText.setText(previewText); ++ /** ++ * Alternate constructor used when we want to avoid allocation encoding ++ * buffer, in cases where caller wants full control over allocations. ++ */ ++ protected ChunkEncoder(int totalLength, BufferRecycler bufferRecycler, boolean bogus) + { + int largestChunkLen = Math.max(totalLength, LZFChunk.MAX_CHUNK_LEN); + int suggestedHashLen = calcHashLen(largestChunkLen); +- _recycler = BufferRecycler.instance(); +- _hashTable = _recycler.allocEncodingHash(suggestedHashLen); ++ _recycler = bufferRecycler; ++ _hashTable = bufferRecycler.allocEncodingHash(suggestedHashLen); + _hashModulo = _hashTable.length - 1; + _encodeBuffer = null; + } +@@ -297,6 +319,10 @@ public boolean encodeAndWriteChunkIfCompresses(byte[] data, int offset, int inpu + return false; + } + ++ public BufferRecycler getBufferRecycler() { ++ return _recycler; + } -+ -+ protected void validate () { -+ // Check Resource-Bundle ids -+ boolean rbValid = false; -+ boolean localeValid = false; -+ boolean keyValid = false; -+ -+ for (String rbId : this.availableBundles) { -+ if (rbId.equals(selectedRB)) { -+ rbValid = true; -+ break; -+ } -+ } -+ -+ if (selectedLocale != null) -+ localeValid = true; -+ -+ if (manager.isResourceExisting(selectedRB, selectedKey)) -+ keyValid = true; -+ -+ // print Validation summary -+ String errorMessage = null; -+ if (! rbValid) -+ errorMessage = ""The specified Resource-Bundle does not exist""; -+// else if (! localeValid) -+// errorMessage = ""The specified Locale does not exist for the selecte Resource-Bundle""; -+ else if (! keyValid) -+ errorMessage = ""No resource selected""; -+ else { -+ if (okButton != null) -+ okButton.setEnabled(true); ++ + /* + /////////////////////////////////////////////////////////////////////// + // Abstract methods for sub-classes +diff --git a/src/main/java/com/ning/compress/lzf/LZFCompressingInputStream.java b/src/main/java/com/ning/compress/lzf/LZFCompressingInputStream.java +index 6c49924..e46b8b2 100644 +--- a/src/main/java/com/ning/compress/lzf/LZFCompressingInputStream.java ++++ b/src/main/java/com/ning/compress/lzf/LZFCompressingInputStream.java +@@ -76,16 +76,24 @@ public class LZFCompressingInputStream extends InputStream + + public LZFCompressingInputStream(InputStream in) + { +- this(null, in); ++ this(null, in, BufferRecycler.instance()); + } + + public LZFCompressingInputStream(final ChunkEncoder encoder, InputStream in) ++ { ++ this(encoder, in, null); ++ } ++ ++ public LZFCompressingInputStream(final ChunkEncoder encoder, InputStream in, BufferRecycler bufferRecycler) + { + // may be passed by caller, or could be null + _encoder = encoder; + _inputStream = in; +- _recycler = BufferRecycler.instance(); +- _inputBuffer = _recycler.allocInputBuffer(LZFChunk.MAX_CHUNK_LEN); ++ if (bufferRecycler==null) { ++ bufferRecycler = (encoder!=null) ? _encoder._recycler : BufferRecycler.instance(); + } ++ _recycler = bufferRecycler; ++ _inputBuffer = bufferRecycler.allocInputBuffer(LZFChunk.MAX_CHUNK_LEN); + // let's not yet allocate encoding buffer; don't know optimal size + } + +@@ -259,7 +267,7 @@ protected boolean readyBuffer() throws IOException + if (_encoder == null) { + // need 7 byte header, plus regular max buffer size: + int bufferLen = chunkLength + ((chunkLength + 31) >> 5) + 7; +- _encoder = ChunkEncoderFactory.optimalNonAllocatingInstance(bufferLen); ++ _encoder = ChunkEncoderFactory.optimalNonAllocatingInstance(bufferLen, _recycler); + } + if (_encodedBytes == null) { + int bufferLen = chunkLength + ((chunkLength + 31) >> 5) + 7; +diff --git a/src/main/java/com/ning/compress/lzf/LZFEncoder.java b/src/main/java/com/ning/compress/lzf/LZFEncoder.java +index b147071..9835e2d 100644 +--- a/src/main/java/com/ning/compress/lzf/LZFEncoder.java ++++ b/src/main/java/com/ning/compress/lzf/LZFEncoder.java +@@ -11,6 +11,7 @@ + + package com.ning.compress.lzf; + ++import com.ning.compress.BufferRecycler; + import com.ning.compress.lzf.util.ChunkEncoderFactory; + + /** +@@ -121,6 +122,36 @@ public static byte[] safeEncode(byte[] data, int offset, int length) + return result; + } + ++ /** ++ * Method for compressing given input data using LZF encoding and ++ * block structure (compatible with lzf command line utility). ++ * Result consists of a sequence of chunks. ++ *

++ * Note that {@link ChunkEncoder} instance used is one produced by ++ * {@link ChunkEncoderFactory#optimalInstance}, which typically ++ * is ""unsafe"" instance if one can be used on current JVM. ++ */ ++ public static byte[] encode(byte[] data, int offset, int length, BufferRecycler bufferRecycler) ++ { ++ ChunkEncoder enc = ChunkEncoderFactory.optimalInstance(length, bufferRecycler); ++ byte[] result = encode(enc, data, offset, length); ++ enc.close(); // important for buffer reuse! ++ return result; ++ } + -+ setErrorMessage(errorMessage); -+ if (okButton != null && errorMessage != null) -+ okButton.setEnabled(false); -+ } ++ /** ++ * Method that will use ""safe"" {@link ChunkEncoder}, as produced by ++ * {@link ChunkEncoderFactory#safeInstance}, for encoding. Safe here ++ * means that it does not use any non-compliant features beyond core JDK. ++ */ ++ public static byte[] safeEncode(byte[] data, int offset, int length, BufferRecycler bufferRecycler) ++ { ++ ChunkEncoder enc = ChunkEncoderFactory.safeInstance(length, bufferRecycler); ++ byte[] result = encode(enc, data, offset, length); ++ enc.close(); ++ return result; ++ } + -+ @Override -+ protected void createButtonsForButtonBar(Composite parent) { -+ okButton = createButton (parent, OK, ""Ok"", true); -+ okButton.addSelectionListener (new SelectionAdapter() { -+ public void widgetSelected(SelectionEvent e) { -+ // Set return code -+ setReturnCode(OK); -+ close(); -+ } -+ }); -+ -+ cancelButton = createButton (parent, CANCEL, ""Cancel"", false); -+ cancelButton.addSelectionListener (new SelectionAdapter() { -+ public void widgetSelected(SelectionEvent e) { -+ setReturnCode (CANCEL); -+ close(); -+ } -+ }); -+ -+ okButton.setEnabled(false); -+ cancelButton.setEnabled(true); -+ } -+ -+ public String getSelectedResourceBundle () { -+ return selectedRB; -+ } -+ -+ public String getSelectedResource () { -+ return selectedKey; -+ } -+ -+ public Locale getSelectedLocale () { -+ return selectedLocale; -+ } -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/RemoveLanguageDialoge.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/RemoveLanguageDialoge.java -new file mode 100644 -index 00000000..8046819c ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/RemoveLanguageDialoge.java -@@ -0,0 +1,112 @@ -+package org.eclipselabs.tapiji.tools.core.ui.dialogs; + /** + * Compression method that uses specified {@link ChunkEncoder} for actual + * encoding. +@@ -207,6 +238,36 @@ public static int safeAppendEncoded(byte[] input, int inputPtr, int inputLength, + + /** + * Alternate version that accepts pre-allocated output buffer. ++ *

++ * Note that {@link ChunkEncoder} instance used is one produced by ++ * {@link ChunkEncoderFactory#optimalNonAllocatingInstance}, which typically ++ * is ""unsafe"" instance if one can be used on current JVM. ++ */ ++ public static int appendEncoded(byte[] input, int inputPtr, int inputLength, ++ byte[] outputBuffer, int outputPtr, BufferRecycler bufferRecycler) { ++ ChunkEncoder enc = ChunkEncoderFactory.optimalNonAllocatingInstance(inputLength, bufferRecycler); ++ int len = appendEncoded(enc, input, inputPtr, inputLength, outputBuffer, outputPtr); ++ enc.close(); ++ return len; ++ } + -+import java.util.List; -+import java.util.Locale; -+import java.util.Set; ++ /** ++ * Alternate version that accepts pre-allocated output buffer. ++ *

++ * Method that will use ""safe"" {@link ChunkEncoder}, as produced by ++ * {@link ChunkEncoderFactory#safeInstance}, for encoding. Safe here ++ * means that it does not use any non-compliant features beyond core JDK. ++ */ ++ public static int safeAppendEncoded(byte[] input, int inputPtr, int inputLength, ++ byte[] outputBuffer, int outputPtr, BufferRecycler bufferRecycler) { ++ ChunkEncoder enc = ChunkEncoderFactory.safeNonAllocatingInstance(inputLength, bufferRecycler); ++ int len = appendEncoded(enc, input, inputPtr, inputLength, outputBuffer, outputPtr); ++ enc.close(); ++ return len; ++ } + -+import org.eclipse.core.resources.IProject; -+import org.eclipse.jface.viewers.ILabelProvider; -+import org.eclipse.jface.viewers.ILabelProviderListener; -+import org.eclipse.jface.viewers.IStructuredContentProvider; -+import org.eclipse.jface.viewers.Viewer; -+import org.eclipse.swt.graphics.Image; -+import org.eclipse.swt.widgets.Shell; -+import org.eclipse.ui.dialogs.ListDialog; -+import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; -+import org.eclipselabs.tapiji.tools.core.util.ImageUtils; ++ /** ++ * Alternate version that accepts pre-allocated output buffer. + */ + public static int appendEncoded(ChunkEncoder enc, byte[] input, int inputPtr, int inputLength, + byte[] outputBuffer, int outputPtr) +diff --git a/src/main/java/com/ning/compress/lzf/LZFInputStream.java b/src/main/java/com/ning/compress/lzf/LZFInputStream.java +index cd0c280..6626089 100644 +--- a/src/main/java/com/ning/compress/lzf/LZFInputStream.java ++++ b/src/main/java/com/ning/compress/lzf/LZFInputStream.java +@@ -83,7 +83,7 @@ public LZFInputStream(final InputStream inputStream) throws IOException + public LZFInputStream(final ChunkDecoder decoder, final InputStream in) + throws IOException + { +- this(decoder, in, false); ++ this(decoder, in, BufferRecycler.instance(), false); + } + + /** +@@ -94,21 +94,45 @@ public LZFInputStream(final ChunkDecoder decoder, final InputStream in) + */ + public LZFInputStream(final InputStream in, boolean fullReads) throws IOException + { +- this(ChunkDecoderFactory.optimalInstance(), in, fullReads); ++ this(ChunkDecoderFactory.optimalInstance(), in, BufferRecycler.instance(), fullReads); + } + + public LZFInputStream(final ChunkDecoder decoder, final InputStream in, boolean fullReads) + throws IOException ++ { ++ this(decoder, in, BufferRecycler.instance(), fullReads); ++ } + ++ public LZFInputStream(final InputStream inputStream, final BufferRecycler bufferRecycler) throws IOException ++ { ++ this(inputStream, bufferRecycler, false); ++ } + -+public class RemoveLanguageDialoge extends ListDialog{ -+ private IProject project; ++ /** ++ * @param in Underlying input stream to use ++ * @param fullReads Whether {@link #read(byte[])} should try to read exactly ++ * as many bytes as requested (true); or just however many happen to be ++ * available (false) ++ * @param bufferRecycler Buffer recycler instance, for usages where the ++ * caller manages the recycler instances ++ */ ++ public LZFInputStream(final InputStream in, final BufferRecycler bufferRecycler, boolean fullReads) throws IOException ++ { ++ this(ChunkDecoderFactory.optimalInstance(), in, bufferRecycler, fullReads); ++ } + -+ -+ public RemoveLanguageDialoge(IProject project, Shell shell) { -+ super(shell); -+ this.project=project; -+ -+ initDialog(); -+ } ++ public LZFInputStream(final ChunkDecoder decoder, final InputStream in, final BufferRecycler bufferRecycler, boolean fullReads) ++ throws IOException + { + super(); + _decoder = decoder; +- _recycler = BufferRecycler.instance(); ++ _recycler = bufferRecycler; + _inputStream = in; + _inputStreamClosed = false; + _cfgFullReads = fullReads; + +- _inputBuffer = _recycler.allocInputBuffer(LZFChunk.MAX_CHUNK_LEN); +- _decodedBytes = _recycler.allocDecodeBuffer(LZFChunk.MAX_CHUNK_LEN); ++ _inputBuffer = bufferRecycler.allocInputBuffer(LZFChunk.MAX_CHUNK_LEN); ++ _decodedBytes = bufferRecycler.allocDecodeBuffer(LZFChunk.MAX_CHUNK_LEN); + } + + /** +diff --git a/src/main/java/com/ning/compress/lzf/LZFOutputStream.java b/src/main/java/com/ning/compress/lzf/LZFOutputStream.java +index fe7bdab..c55a8b9 100644 +--- a/src/main/java/com/ning/compress/lzf/LZFOutputStream.java ++++ b/src/main/java/com/ning/compress/lzf/LZFOutputStream.java +@@ -28,7 +28,7 @@ + */ + public class LZFOutputStream extends FilterOutputStream implements WritableByteChannel + { +- private static final int OUTPUT_BUFFER_SIZE = LZFChunk.MAX_CHUNK_LEN; ++ private static final int DEFAULT_OUTPUT_BUFFER_SIZE = LZFChunk.MAX_CHUNK_LEN; + + private final ChunkEncoder _encoder; + private final BufferRecycler _recycler; +@@ -58,15 +58,34 @@ public class LZFOutputStream extends FilterOutputStream implements WritableByteC + + public LZFOutputStream(final OutputStream outputStream) + { +- this(ChunkEncoderFactory.optimalInstance(OUTPUT_BUFFER_SIZE), outputStream); ++ this(ChunkEncoderFactory.optimalInstance(DEFAULT_OUTPUT_BUFFER_SIZE), outputStream); + } + + public LZFOutputStream(final ChunkEncoder encoder, final OutputStream outputStream) ++ { ++ this(encoder, outputStream, DEFAULT_OUTPUT_BUFFER_SIZE, encoder._recycler); ++ } + -+ protected void initDialog () { -+ this.setAddCancelButton(true); -+ this.setMessage(""Select one of the following languages to delete:""); -+ this.setTitle(""Language Selector""); -+ this.setContentProvider(new RBContentProvider()); -+ this.setLabelProvider(new RBLabelProvider()); -+ -+ this.setInput(ResourceBundleManager.getManager(project).getProjectProvidedLocales()); -+ } -+ -+ public Locale getSelectedLanguage() { -+ Object[] selection = this.getResult(); -+ if (selection != null && selection.length > 0) -+ return (Locale) selection[0]; -+ return null; -+ } -+ -+ -+ //private classes------------------------------------------------------------------------------------- -+ class RBContentProvider implements IStructuredContentProvider { ++ public LZFOutputStream(final OutputStream outputStream, final BufferRecycler bufferRecycler) ++ { ++ this(ChunkEncoderFactory.optimalInstance(bufferRecycler), outputStream, bufferRecycler); ++ } + -+ @Override -+ public Object[] getElements(Object inputElement) { -+ Set resources = (Set) inputElement; -+ return resources.toArray(); -+ } ++ public LZFOutputStream(final ChunkEncoder encoder, final OutputStream outputStream, final BufferRecycler bufferRecycler) ++ { ++ this(encoder, outputStream, DEFAULT_OUTPUT_BUFFER_SIZE, bufferRecycler); ++ } + -+ @Override -+ public void dispose() { -+ // TODO Auto-generated method stub -+ ++ public LZFOutputStream(final ChunkEncoder encoder, final OutputStream outputStream, ++ final int bufferSize, BufferRecycler bufferRecycler) + { + super(outputStream); + _encoder = encoder; +- _recycler = BufferRecycler.instance(); +- _outputBuffer = _recycler.allocOutputBuffer(OUTPUT_BUFFER_SIZE); ++ if (bufferRecycler==null) { ++ bufferRecycler = _encoder._recycler; + } ++ _recycler = bufferRecycler; ++ _outputBuffer = bufferRecycler.allocOutputBuffer(bufferSize); + _outputStreamClosed = false; + } + +diff --git a/src/main/java/com/ning/compress/lzf/LZFUncompressor.java b/src/main/java/com/ning/compress/lzf/LZFUncompressor.java +index e8e756f..0cd6a44 100644 +--- a/src/main/java/com/ning/compress/lzf/LZFUncompressor.java ++++ b/src/main/java/com/ning/compress/lzf/LZFUncompressor.java +@@ -109,14 +109,23 @@ public class LZFUncompressor extends Uncompressor + */ + + public LZFUncompressor(DataHandler handler) { +- this(handler, ChunkDecoderFactory.optimalInstance()); ++ this(handler, ChunkDecoderFactory.optimalInstance(), BufferRecycler.instance()); ++ } ++ ++ public LZFUncompressor(DataHandler handler, BufferRecycler bufferRecycler) { ++ this(handler, ChunkDecoderFactory.optimalInstance(), bufferRecycler); + } + + public LZFUncompressor(DataHandler handler, ChunkDecoder dec) ++ { ++ this(handler, dec, BufferRecycler.instance()); ++ } + -+ @Override -+ public void inputChanged(Viewer viewer, Object oldInput, Object newInput) { -+ // TODO Auto-generated method stub -+ -+ } -+ -+ } -+ -+ class RBLabelProvider implements ILabelProvider { ++ public LZFUncompressor(DataHandler handler, ChunkDecoder dec, BufferRecycler bufferRecycler) + { + _handler = handler; + _decoder = dec; +- _recycler = BufferRecycler.instance(); ++ _recycler = bufferRecycler; + } + + /* +diff --git a/src/main/java/com/ning/compress/lzf/impl/UnsafeChunkEncoder.java b/src/main/java/com/ning/compress/lzf/impl/UnsafeChunkEncoder.java +index 3a7648e..6dab20e 100644 +--- a/src/main/java/com/ning/compress/lzf/impl/UnsafeChunkEncoder.java ++++ b/src/main/java/com/ning/compress/lzf/impl/UnsafeChunkEncoder.java +@@ -1,5 +1,6 @@ + package com.ning.compress.lzf.impl; + ++import com.ning.compress.BufferRecycler; + import java.lang.reflect.Field; + + import sun.misc.Unsafe; +@@ -44,6 +45,14 @@ public UnsafeChunkEncoder(int totalLength, boolean bogus) { + super(totalLength, bogus); + } + ++ public UnsafeChunkEncoder(int totalLength, BufferRecycler bufferRecycler) { ++ super(totalLength, bufferRecycler); ++ } + -+ @Override -+ public Image getImage(Object element) { -+ return ImageUtils.getImage(ImageUtils.IMAGE_RESOURCE_BUNDLE); -+ } ++ public UnsafeChunkEncoder(int totalLength, BufferRecycler bufferRecycler, boolean bogus) { ++ super(totalLength, bufferRecycler, bogus); ++ } + -+ @Override -+ public String getText(Object element) { -+ Locale l = ((Locale) element); -+ String text = l.getDisplayName(); -+ if (text==null || text.equals("""")) text=""default""; -+ else text += "" - ""+l.getLanguage()+"" ""+l.getCountry()+"" ""+l.getVariant(); -+ return text; -+ } + /* + /////////////////////////////////////////////////////////////////////// + // Shared helper methods +diff --git a/src/main/java/com/ning/compress/lzf/impl/UnsafeChunkEncoderBE.java b/src/main/java/com/ning/compress/lzf/impl/UnsafeChunkEncoderBE.java +index 72dcedb..bba0139 100644 +--- a/src/main/java/com/ning/compress/lzf/impl/UnsafeChunkEncoderBE.java ++++ b/src/main/java/com/ning/compress/lzf/impl/UnsafeChunkEncoderBE.java +@@ -1,5 +1,6 @@ + package com.ning.compress.lzf.impl; + ++import com.ning.compress.BufferRecycler; + import com.ning.compress.lzf.LZFChunk; + + /** +@@ -16,6 +17,14 @@ public UnsafeChunkEncoderBE(int totalLength) { + public UnsafeChunkEncoderBE(int totalLength, boolean bogus) { + super(totalLength, bogus); + } ++ public UnsafeChunkEncoderBE(int totalLength, BufferRecycler bufferRecycler) { ++ super(totalLength, bufferRecycler); ++ } + -+ @Override -+ public void addListener(ILabelProviderListener listener) { -+ // TODO Auto-generated method stub -+ -+ } ++ public UnsafeChunkEncoderBE(int totalLength, BufferRecycler bufferRecycler, boolean bogus) { ++ super(totalLength, bufferRecycler, bogus); ++ } + -+ @Override -+ public void dispose() { -+ // TODO Auto-generated method stub -+ -+ } + + @Override + protected int tryCompress(byte[] in, int inPos, int inEnd, byte[] out, int outPos) +diff --git a/src/main/java/com/ning/compress/lzf/impl/UnsafeChunkEncoderLE.java b/src/main/java/com/ning/compress/lzf/impl/UnsafeChunkEncoderLE.java +index 21350de..33d1e7d 100644 +--- a/src/main/java/com/ning/compress/lzf/impl/UnsafeChunkEncoderLE.java ++++ b/src/main/java/com/ning/compress/lzf/impl/UnsafeChunkEncoderLE.java +@@ -1,5 +1,6 @@ + package com.ning.compress.lzf.impl; + ++import com.ning.compress.BufferRecycler; + import com.ning.compress.lzf.LZFChunk; + + /** +@@ -17,7 +18,15 @@ public UnsafeChunkEncoderLE(int totalLength, boolean bogus) { + super(totalLength, bogus); + } + +- @Override ++ public UnsafeChunkEncoderLE(int totalLength, BufferRecycler bufferRecycler) { ++ super(totalLength, bufferRecycler); ++ } + -+ @Override -+ public boolean isLabelProperty(Object element, String property) { -+ // TODO Auto-generated method stub -+ return false; -+ } ++ public UnsafeChunkEncoderLE(int totalLength, BufferRecycler bufferRecycler, boolean bogus) { ++ super(totalLength, bufferRecycler, bogus); ++ } + -+ @Override -+ public void removeListener(ILabelProviderListener listener) { -+ // TODO Auto-generated method stub -+ -+ } -+ -+ } -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/ResourceBundleEntrySelectionDialog.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/ResourceBundleEntrySelectionDialog.java -new file mode 100644 -index 00000000..99ef6c33 ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/ResourceBundleEntrySelectionDialog.java -@@ -0,0 +1,423 @@ -+package org.eclipselabs.tapiji.tools.core.ui.dialogs; ++ @Override + protected int tryCompress(byte[] in, int inPos, int inEnd, byte[] out, int outPos) + { + final int[] hashTable = _hashTable; +diff --git a/src/main/java/com/ning/compress/lzf/impl/UnsafeChunkEncoders.java b/src/main/java/com/ning/compress/lzf/impl/UnsafeChunkEncoders.java +index 2ae6777..36489e8 100644 +--- a/src/main/java/com/ning/compress/lzf/impl/UnsafeChunkEncoders.java ++++ b/src/main/java/com/ning/compress/lzf/impl/UnsafeChunkEncoders.java +@@ -11,6 +11,7 @@ + + package com.ning.compress.lzf.impl; + ++import com.ning.compress.BufferRecycler; + import java.nio.ByteOrder; + + +@@ -39,4 +40,18 @@ public static UnsafeChunkEncoder createNonAllocatingEncoder(int totalLength) { + } + return new UnsafeChunkEncoderBE(totalLength, false); + } + -+import java.util.Collection; -+import java.util.Iterator; -+import java.util.Locale; -+import java.util.Set; ++ public static UnsafeChunkEncoder createEncoder(int totalLength, BufferRecycler bufferRecycler) { ++ if (LITTLE_ENDIAN) { ++ return new UnsafeChunkEncoderLE(totalLength, bufferRecycler); ++ } ++ return new UnsafeChunkEncoderBE(totalLength, bufferRecycler); ++ } + -+import org.eclipse.jface.dialogs.TitleAreaDialog; -+import org.eclipse.swt.SWT; -+import org.eclipse.swt.events.ModifyEvent; -+import org.eclipse.swt.events.ModifyListener; -+import org.eclipse.swt.events.SelectionAdapter; -+import org.eclipse.swt.events.SelectionEvent; -+import org.eclipse.swt.events.SelectionListener; -+import org.eclipse.swt.layout.GridData; -+import org.eclipse.swt.layout.GridLayout; -+import org.eclipse.swt.widgets.Button; -+import org.eclipse.swt.widgets.Combo; -+import org.eclipse.swt.widgets.Composite; -+import org.eclipse.swt.widgets.Control; -+import org.eclipse.swt.widgets.Group; -+import org.eclipse.swt.widgets.Label; -+import org.eclipse.swt.widgets.Shell; -+import org.eclipse.swt.widgets.Text; -+import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; -+import org.eclipselabs.tapiji.tools.core.ui.widgets.ResourceSelector; -+import org.eclipselabs.tapiji.tools.core.ui.widgets.event.ResourceSelectionEvent; -+import org.eclipselabs.tapiji.tools.core.ui.widgets.listener.IResourceSelectionListener; ++ public static UnsafeChunkEncoder createNonAllocatingEncoder(int totalLength, BufferRecycler bufferRecycler) { ++ if (LITTLE_ENDIAN) { ++ return new UnsafeChunkEncoderLE(totalLength, bufferRecycler, false); ++ } ++ return new UnsafeChunkEncoderBE(totalLength, bufferRecycler, false); ++ } + } +diff --git a/src/main/java/com/ning/compress/lzf/impl/VanillaChunkEncoder.java b/src/main/java/com/ning/compress/lzf/impl/VanillaChunkEncoder.java +index 9e68ac6..74a52d5 100644 +--- a/src/main/java/com/ning/compress/lzf/impl/VanillaChunkEncoder.java ++++ b/src/main/java/com/ning/compress/lzf/impl/VanillaChunkEncoder.java +@@ -1,5 +1,6 @@ + package com.ning.compress.lzf.impl; + ++import com.ning.compress.BufferRecycler; + import com.ning.compress.lzf.ChunkEncoder; + import com.ning.compress.lzf.LZFChunk; + +@@ -22,10 +23,31 @@ protected VanillaChunkEncoder(int totalLength, boolean bogus) { + super(totalLength, bogus); + } + ++ /** ++ * @param totalLength Total encoded length; used for calculating size ++ * of hash table to use ++ * @param bufferRecycler The BufferRecycler instance ++ */ ++ public VanillaChunkEncoder(int totalLength, BufferRecycler bufferRecycler) { ++ super(totalLength, bufferRecycler); ++ } + ++ /** ++ * Alternate constructor used when we want to avoid allocation encoding ++ * buffer, in cases where caller wants full control over allocations. ++ */ ++ protected VanillaChunkEncoder(int totalLength, BufferRecycler bufferRecycler, boolean bogus) { ++ super(totalLength, bufferRecycler, bogus); ++ } + -+public class ResourceBundleEntrySelectionDialog extends TitleAreaDialog { + public static VanillaChunkEncoder nonAllocatingEncoder(int totalLength) { + return new VanillaChunkEncoder(totalLength, true); + } + ++ public static VanillaChunkEncoder nonAllocatingEncoder(int totalLength, BufferRecycler bufferRecycler) { ++ return new VanillaChunkEncoder(totalLength, bufferRecycler, true); ++ } ++ + /* + /////////////////////////////////////////////////////////////////////// + // Abstract method implementations +diff --git a/src/main/java/com/ning/compress/lzf/parallel/PLZFOutputStream.java b/src/main/java/com/ning/compress/lzf/parallel/PLZFOutputStream.java +index fe52761..0236dec 100644 +--- a/src/main/java/com/ning/compress/lzf/parallel/PLZFOutputStream.java ++++ b/src/main/java/com/ning/compress/lzf/parallel/PLZFOutputStream.java +@@ -41,7 +41,7 @@ + */ + public class PLZFOutputStream extends FilterOutputStream implements WritableByteChannel + { +- private static final int OUTPUT_BUFFER_SIZE = LZFChunk.MAX_CHUNK_LEN; ++ private static final int DEFAULT_OUTPUT_BUFFER_SIZE = LZFChunk.MAX_CHUNK_LEN; + + protected byte[] _outputBuffer; + protected int _position = 0; +@@ -65,16 +65,20 @@ public class PLZFOutputStream extends FilterOutputStream implements WritableByte + */ + + public PLZFOutputStream(final OutputStream outputStream) { +- this(outputStream, getNThreads()); ++ this(outputStream, DEFAULT_OUTPUT_BUFFER_SIZE, getNThreads()); + } + + protected PLZFOutputStream(final OutputStream outputStream, int nThreads) { ++ this(outputStream, DEFAULT_OUTPUT_BUFFER_SIZE, nThreads); ++ } + -+ private static int WIDTH_LEFT_COLUMN = 100; -+ private static int SEARCH_FULLTEXT = 0; -+ private static int SEARCH_KEY = 1; -+ -+ private ResourceBundleManager manager; -+ private Collection availableBundles; -+ private int searchOption = SEARCH_FULLTEXT; -+ private String resourceBundle = """"; -+ -+ private Combo cmbRB; -+ -+ private Button btSearchText; -+ private Button btSearchKey; -+ private Combo cmbLanguage; -+ private ResourceSelector resourceSelector; -+ private Text txtPreviewText; -+ -+ private Button okButton; -+ private Button cancelButton; -+ -+ /*** DIALOG MODEL ***/ -+ private String selectedRB = """"; -+ private String preselectedRB = """"; -+ private Locale selectedLocale = null; -+ private String selectedKey = """"; -+ ++ protected PLZFOutputStream(final OutputStream outputStream, final int bufferSize, int nThreads) { + super(outputStream); + _outputStreamClosed = false; + compressExecutor = new ThreadPoolExecutor(nThreads, nThreads, 60L, TimeUnit.SECONDS, new LinkedBlockingQueue()); // unbounded + ((ThreadPoolExecutor)compressExecutor).allowCoreThreadTimeOut(true); + writeExecutor = Executors.newSingleThreadExecutor(); // unbounded +- blockManager = new BlockManager(nThreads * 2, OUTPUT_BUFFER_SIZE); // this is where the bounds will be enforced! ++ blockManager = new BlockManager(nThreads * 2, bufferSize); // this is where the bounds will be enforced! + _outputBuffer = blockManager.getBlockFromPool(); + } + +diff --git a/src/main/java/com/ning/compress/lzf/util/ChunkEncoderFactory.java b/src/main/java/com/ning/compress/lzf/util/ChunkEncoderFactory.java +index af2134a..f61379f 100644 +--- a/src/main/java/com/ning/compress/lzf/util/ChunkEncoderFactory.java ++++ b/src/main/java/com/ning/compress/lzf/util/ChunkEncoderFactory.java +@@ -1,5 +1,6 @@ + package com.ning.compress.lzf.util; + ++import com.ning.compress.BufferRecycler; + import com.ning.compress.lzf.ChunkEncoder; + import com.ning.compress.lzf.LZFChunk; + import com.ning.compress.lzf.impl.UnsafeChunkEncoders; +@@ -35,6 +36,8 @@ public static ChunkEncoder optimalInstance() { + * non-standard platforms it may be necessary to either directly load + * instances, or use {@link #safeInstance}. + * ++ *

Uses a ThreadLocal soft-referenced BufferRecycler instance. ++ * + * @param totalLength Expected total length of content to compress; only matters + * for content that is smaller than maximum chunk size (64k), to optimize + * encoding hash tables +@@ -50,6 +53,8 @@ public static ChunkEncoder optimalInstance(int totalLength) { + /** + * Factory method for constructing encoder that is always passed buffer + * externally, so that it will not (nor need) allocate encoding buffer. ++ * ++ *

Uses a ThreadLocal soft-referenced BufferRecycler instance. + */ + public static ChunkEncoder optimalNonAllocatingInstance(int totalLength) { + try { +@@ -68,9 +73,12 @@ public static ChunkEncoder optimalNonAllocatingInstance(int totalLength) { + public static ChunkEncoder safeInstance() { + return safeInstance(LZFChunk.MAX_CHUNK_LEN); + } + -+ public ResourceBundleEntrySelectionDialog(Shell parentShell, ResourceBundleManager manager, String bundleName) { -+ super(parentShell); -+ this.manager = manager; -+ // init available resource bundles -+ this.availableBundles = manager.getResourceBundleNames(); -+ this.preselectedRB = bundleName; -+ } + /** + * Method that can be used to ensure that a ""safe"" compressor instance is loaded. + * Safe here means that it should work on any and all Java platforms. ++ * ++ *

Uses a ThreadLocal soft-referenced BufferRecycler instance. + * + * @param totalLength Expected total length of content to compress; only matters + * for content that is smaller than maximum chunk size (64k), to optimize +@@ -83,8 +91,82 @@ public static ChunkEncoder safeInstance(int totalLength) { + /** + * Factory method for constructing encoder that is always passed buffer + * externally, so that it will not (nor need) allocate encoding buffer. ++ * ++ *

Uses a ThreadLocal soft-referenced BufferRecycler instance. + */ + public static ChunkEncoder safeNonAllocatingInstance(int totalLength) { + return VanillaChunkEncoder.nonAllocatingEncoder(totalLength); + } + -+ @Override -+ protected Control createDialogArea(Composite parent) { -+ Composite dialogArea = (Composite) super.createDialogArea(parent); -+ initLayout (dialogArea); -+ constructSearchSection (dialogArea); -+ initContent (); -+ return dialogArea; -+ } -+ -+ protected void initContent() { -+ // init available resource bundles -+ cmbRB.removeAll(); -+ int i = 0; -+ for (String bundle : availableBundles) { -+ cmbRB.add(bundle); -+ if (bundle.equals(preselectedRB)) { -+ cmbRB.select(i); -+ cmbRB.setEnabled(false); -+ } -+ i++; -+ } -+ -+ if (availableBundles.size() > 0) { -+ if (preselectedRB.trim().length() == 0) { -+ cmbRB.select(0); -+ cmbRB.setEnabled(true); -+ } ++ /** ++ * Convenience method, equivalent to: ++ * ++ * return optimalInstance(LZFChunk.MAX_CHUNK_LEN, bufferRecycler); ++ * ++ */ ++ public static ChunkEncoder optimalInstance(BufferRecycler bufferRecycler) { ++ return optimalInstance(LZFChunk.MAX_CHUNK_LEN, bufferRecycler); ++ } ++ ++ /** ++ * Method to use for getting compressor instance that uses the most optimal ++ * available methods for underlying data access. It should be safe to call ++ * this method as implementations are dynamically loaded; however, on some ++ * non-standard platforms it may be necessary to either directly load ++ * instances, or use {@link #safeInstance}. ++ * ++ * @param totalLength Expected total length of content to compress; only matters ++ * for content that is smaller than maximum chunk size (64k), to optimize ++ * encoding hash tables ++ * @param bufferRecycler The BufferRecycler instance ++ */ ++ public static ChunkEncoder optimalInstance(int totalLength, BufferRecycler bufferRecycler) { ++ try { ++ return UnsafeChunkEncoders.createEncoder(totalLength, bufferRecycler); ++ } catch (Exception e) { ++ return safeInstance(totalLength, bufferRecycler); ++ } ++ } ++ ++ /** ++ * Factory method for constructing encoder that is always passed buffer ++ * externally, so that it will not (nor need) allocate encoding buffer. ++ */ ++ public static ChunkEncoder optimalNonAllocatingInstance(int totalLength, BufferRecycler bufferRecycler) { ++ try { ++ return UnsafeChunkEncoders.createNonAllocatingEncoder(totalLength, bufferRecycler); ++ } catch (Exception e) { ++ return safeNonAllocatingInstance(totalLength, bufferRecycler); ++ } ++ } ++ ++ /** ++ * Convenience method, equivalent to: ++ * ++ * return safeInstance(LZFChunk.MAX_CHUNK_LEN, bufferRecycler); ++ * ++ */ ++ public static ChunkEncoder safeInstance(BufferRecycler bufferRecycler) { ++ return safeInstance(LZFChunk.MAX_CHUNK_LEN, bufferRecycler); ++ } ++ /** ++ * Method that can be used to ensure that a ""safe"" compressor instance is loaded. ++ * Safe here means that it should work on any and all Java platforms. ++ * ++ * @param totalLength Expected total length of content to compress; only matters ++ * for content that is smaller than maximum chunk size (64k), to optimize ++ * encoding hash tables ++ * @param bufferRecycler The BufferRecycler instance ++ */ ++ public static ChunkEncoder safeInstance(int totalLength, BufferRecycler bufferRecycler) { ++ return new VanillaChunkEncoder(totalLength, bufferRecycler); ++ } ++ ++ /** ++ * Factory method for constructing encoder that is always passed buffer ++ * externally, so that it will not (nor need) allocate encoding buffer. ++ */ ++ public static ChunkEncoder safeNonAllocatingInstance(int totalLength, BufferRecycler bufferRecycler) { ++ return VanillaChunkEncoder.nonAllocatingEncoder(totalLength, bufferRecycler); ++ } + } +diff --git a/src/main/java/com/ning/compress/lzf/util/LZFFileInputStream.java b/src/main/java/com/ning/compress/lzf/util/LZFFileInputStream.java +index 27e5d25..19f7caa 100644 +--- a/src/main/java/com/ning/compress/lzf/util/LZFFileInputStream.java ++++ b/src/main/java/com/ning/compress/lzf/util/LZFFileInputStream.java +@@ -77,47 +77,62 @@ public class LZFFileInputStream + */ + + public LZFFileInputStream(File file) throws FileNotFoundException { +- this(file, ChunkDecoderFactory.optimalInstance()); ++ this(file, ChunkDecoderFactory.optimalInstance(), BufferRecycler.instance()); + } + + public LZFFileInputStream(FileDescriptor fdObj) { +- this(fdObj, ChunkDecoderFactory.optimalInstance()); ++ this(fdObj, ChunkDecoderFactory.optimalInstance(), BufferRecycler.instance()); + } + + public LZFFileInputStream(String name) throws FileNotFoundException { +- this(name, ChunkDecoderFactory.optimalInstance()); ++ this(name, ChunkDecoderFactory.optimalInstance(), BufferRecycler.instance()); + } + + public LZFFileInputStream(File file, ChunkDecoder decompressor) throws FileNotFoundException ++ { ++ this(file, decompressor, BufferRecycler.instance()); ++ } ++ ++ public LZFFileInputStream(FileDescriptor fdObj, ChunkDecoder decompressor) ++ { ++ this(fdObj, decompressor, BufferRecycler.instance()); ++ } ++ ++ public LZFFileInputStream(String name, ChunkDecoder decompressor) throws FileNotFoundException ++ { ++ this(name, decompressor, BufferRecycler.instance()); ++ } ++ ++ public LZFFileInputStream(File file, ChunkDecoder decompressor, BufferRecycler bufferRecycler) throws FileNotFoundException + { + super(file); + _decompressor = decompressor; +- _recycler = BufferRecycler.instance(); ++ _recycler = bufferRecycler; + _inputStreamClosed = false; +- _inputBuffer = _recycler.allocInputBuffer(LZFChunk.MAX_CHUNK_LEN); +- _decodedBytes = _recycler.allocDecodeBuffer(LZFChunk.MAX_CHUNK_LEN); ++ _inputBuffer = bufferRecycler.allocInputBuffer(LZFChunk.MAX_CHUNK_LEN); ++ _decodedBytes = bufferRecycler.allocDecodeBuffer(LZFChunk.MAX_CHUNK_LEN); + _wrapper = new Wrapper(); + } + +- public LZFFileInputStream(FileDescriptor fdObj, ChunkDecoder decompressor) ++ public LZFFileInputStream(FileDescriptor fdObj, ChunkDecoder decompressor, BufferRecycler bufferRecycler) + { + super(fdObj); + _decompressor = decompressor; +- _recycler = BufferRecycler.instance(); ++ _recycler = bufferRecycler; + _inputStreamClosed = false; +- _inputBuffer = _recycler.allocInputBuffer(LZFChunk.MAX_CHUNK_LEN); +- _decodedBytes = _recycler.allocDecodeBuffer(LZFChunk.MAX_CHUNK_LEN); ++ _inputBuffer = bufferRecycler.allocInputBuffer(LZFChunk.MAX_CHUNK_LEN); ++ _decodedBytes = bufferRecycler.allocDecodeBuffer(LZFChunk.MAX_CHUNK_LEN); + _wrapper = new Wrapper(); + } + +- public LZFFileInputStream(String name, ChunkDecoder decompressor) throws FileNotFoundException ++ public LZFFileInputStream(String name, ChunkDecoder decompressor, BufferRecycler bufferRecycler) throws FileNotFoundException + { + super(name); + _decompressor = decompressor; +- _recycler = BufferRecycler.instance(); ++ _recycler = bufferRecycler; + _inputStreamClosed = false; +- _inputBuffer = _recycler.allocInputBuffer(LZFChunk.MAX_CHUNK_LEN); +- _decodedBytes = _recycler.allocDecodeBuffer(LZFChunk.MAX_CHUNK_LEN); ++ _inputBuffer = bufferRecycler.allocInputBuffer(LZFChunk.MAX_CHUNK_LEN); ++ _decodedBytes = bufferRecycler.allocDecodeBuffer(LZFChunk.MAX_CHUNK_LEN); + _wrapper = new Wrapper(); + } + +diff --git a/src/main/java/com/ning/compress/lzf/util/LZFFileOutputStream.java b/src/main/java/com/ning/compress/lzf/util/LZFFileOutputStream.java +index b20aa5d..3a5f457 100644 +--- a/src/main/java/com/ning/compress/lzf/util/LZFFileOutputStream.java ++++ b/src/main/java/com/ning/compress/lzf/util/LZFFileOutputStream.java +@@ -86,42 +86,65 @@ public LZFFileOutputStream(String name, boolean append) throws FileNotFoundExcep + } + + public LZFFileOutputStream(ChunkEncoder encoder, File file) throws FileNotFoundException { ++ this(encoder, file, encoder.getBufferRecycler()); ++ } ++ ++ public LZFFileOutputStream(ChunkEncoder encoder, File file, boolean append) throws FileNotFoundException { ++ this(encoder, file, append, encoder.getBufferRecycler()); ++ } ++ ++ public LZFFileOutputStream(ChunkEncoder encoder, FileDescriptor fdObj) { ++ this(encoder, fdObj, encoder.getBufferRecycler()); ++ } ++ ++ public LZFFileOutputStream(ChunkEncoder encoder, String name) throws FileNotFoundException { ++ this(encoder, name, encoder.getBufferRecycler()); ++ } ++ ++ public LZFFileOutputStream(ChunkEncoder encoder, String name, boolean append) throws FileNotFoundException { ++ this(encoder, name, append, encoder.getBufferRecycler()); ++ } ++ ++ public LZFFileOutputStream(ChunkEncoder encoder, File file, BufferRecycler bufferRecycler) throws FileNotFoundException { + super(file); + _encoder = encoder; +- _recycler = BufferRecycler.instance(); +- _outputBuffer = _recycler.allocOutputBuffer(OUTPUT_BUFFER_SIZE); ++ if (bufferRecycler==null) { ++ bufferRecycler = encoder.getBufferRecycler(); + } -+ -+ cmbRB.addSelectionListener(new SelectionListener() { -+ -+ @Override -+ public void widgetSelected(SelectionEvent e) { -+ //updateAvailableLanguages(); -+ updateResourceSelector (); -+ } -+ -+ @Override -+ public void widgetDefaultSelected(SelectionEvent e) { -+ //updateAvailableLanguages(); -+ updateResourceSelector (); -+ } -+ }); -+ -+ // init available translations -+ //updateAvailableLanguages(); -+ -+ // init resource selector -+ updateResourceSelector(); -+ -+ // update search options -+ updateSearchOptions(); -+ } -+ -+ protected void updateResourceSelector () { -+ resourceBundle = cmbRB.getText(); -+ resourceSelector.setResourceBundle(resourceBundle); -+ } -+ -+ protected void updateSearchOptions () { -+ searchOption = (btSearchKey.getSelection() ? SEARCH_KEY : SEARCH_FULLTEXT); -+// cmbLanguage.setEnabled(searchOption == SEARCH_FULLTEXT); -+// lblLanguage.setEnabled(cmbLanguage.getEnabled()); -+ -+ // update ResourceSelector -+ resourceSelector.setDisplayMode(searchOption == SEARCH_FULLTEXT ? ResourceSelector.DISPLAY_TEXT : ResourceSelector.DISPLAY_KEYS); -+ } -+ -+ protected void updateAvailableLanguages () { -+ cmbLanguage.removeAll(); -+ String selectedBundle = cmbRB.getText(); -+ -+ if (selectedBundle.trim().equals("""")) -+ return; -+ -+ // Retrieve available locales for the selected resource-bundle -+ Set locales = manager.getProvidedLocales(selectedBundle); -+ for (Locale l : locales) { -+ String displayName = l.getDisplayName(); -+ if (displayName.equals("""")) -+ displayName = ResourceBundleManager.defaultLocaleTag; -+ cmbLanguage.add(displayName); -+ } -+ -+// if (locales.size() > 0) { -+// cmbLanguage.select(0); -+ updateSelectedLocale(); -+// } -+ } -+ -+ protected void updateSelectedLocale () { -+ String selectedBundle = cmbRB.getText(); -+ -+ if (selectedBundle.trim().equals("""")) -+ return; -+ -+ Set locales = manager.getProvidedLocales(selectedBundle); -+ Iterator it = locales.iterator(); -+ String selectedLocale = cmbLanguage.getText(); -+ while (it.hasNext()) { -+ Locale l = it.next(); -+ if (l.getDisplayName().equals(selectedLocale)) { -+ resourceSelector.setDisplayLocale(l); -+ break; -+ } -+ } -+ } -+ -+ protected void initLayout(Composite parent) { -+ final GridLayout layout = new GridLayout(1, true); -+ parent.setLayout(layout); -+ } -+ -+ protected void constructSearchSection (Composite parent) { -+ final Group group = new Group (parent, SWT.NONE); -+ group.setLayoutData(new GridData(GridData.FILL, GridData.CENTER, true, false, 1, 1)); -+ group.setText(""Resource selection""); -+ -+ // define grid data for this group -+ GridData gridData = new GridData(); -+ gridData.horizontalAlignment = SWT.FILL; -+ gridData.grabExcessHorizontalSpace = true; -+ group.setLayoutData(gridData); -+ group.setLayout(new GridLayout(2, false)); -+ // TODO export as help text -+ -+ final Label spacer = new Label (group, SWT.NONE | SWT.LEFT); -+ spacer.setLayoutData(new GridData(GridData.BEGINNING, GridData.CENTER, false, false, 1, 1)); -+ -+ final Label infoLabel = new Label (group, SWT.NONE | SWT.LEFT); -+ GridData infoGrid = new GridData(GridData.BEGINNING, GridData.BEGINNING, false, false, 1, 1); -+ infoGrid.heightHint = 70; -+ infoLabel.setLayoutData(infoGrid); -+ infoLabel.setText(""Select the resource that needs to be refrenced. This is accomplished in two\n"" + -+ ""steps. First select the Resource-Bundle in which the resource is located. \n"" + -+ ""In a last step you need to choose a particular resource.""); -+ -+ // Resource-Bundle -+ final Label lblRB = new Label (group, SWT.NONE); -+ lblRB.setLayoutData(new GridData(GridData.END, GridData.CENTER, false, false, 1, 1)); -+ lblRB.setText(""Resource-Bundle:""); -+ -+ cmbRB = new Combo (group, SWT.DROP_DOWN | SWT.SIMPLE); -+ cmbRB.setLayoutData(new GridData(GridData.FILL, GridData.CENTER, true, false, 1, 1)); -+ cmbRB.addModifyListener(new ModifyListener() { -+ @Override -+ public void modifyText(ModifyEvent e) { -+ selectedRB = cmbRB.getText(); -+ validate(); -+ } -+ }); -+ -+ // Search-Options -+ final Label spacer2 = new Label (group, SWT.NONE | SWT.LEFT); -+ spacer2.setLayoutData(new GridData(GridData.BEGINNING, GridData.CENTER, false, false, 1, 1)); -+ -+ Composite searchOptions = new Composite(group, SWT.NONE); -+ searchOptions.setLayout(new GridLayout (2, true)); -+ -+ btSearchText = new Button (searchOptions, SWT.RADIO); -+ btSearchText.setText(""Flat""); -+ btSearchText.setSelection(searchOption == SEARCH_FULLTEXT); -+ btSearchText.addSelectionListener(new SelectionListener() { -+ -+ @Override -+ public void widgetSelected(SelectionEvent e) { -+ updateSearchOptions(); -+ } -+ -+ @Override -+ public void widgetDefaultSelected(SelectionEvent e) { -+ updateSearchOptions(); -+ } -+ }); -+ -+ btSearchKey = new Button (searchOptions, SWT.RADIO); -+ btSearchKey.setText(""Hierarchical""); -+ btSearchKey.setSelection(searchOption == SEARCH_KEY); -+ btSearchKey.addSelectionListener(new SelectionListener() { -+ -+ @Override -+ public void widgetSelected(SelectionEvent e) { -+ updateSearchOptions(); -+ } -+ -+ @Override -+ public void widgetDefaultSelected(SelectionEvent e) { -+ updateSearchOptions(); -+ } -+ }); -+ -+ // Sprache -+// lblLanguage = new Label (group, SWT.NONE); -+// lblLanguage.setLayoutData(new GridData(GridData.END, GridData.CENTER, false, false, 1, 1)); -+// lblLanguage.setText(""Language (Country):""); -+// -+// cmbLanguage = new Combo (group, SWT.DROP_DOWN | SWT.SIMPLE); -+// cmbLanguage.setLayoutData(new GridData(GridData.FILL, GridData.CENTER, true, false, 1, 1)); -+// cmbLanguage.addSelectionListener(new SelectionListener () { -+// -+// @Override -+// public void widgetDefaultSelected(SelectionEvent e) { -+// updateSelectedLocale(); -+// } -+// -+// @Override -+// public void widgetSelected(SelectionEvent e) { -+// updateSelectedLocale(); -+// } -+// -+// }); -+// cmbLanguage.addModifyListener(new ModifyListener() { -+// @Override -+// public void modifyText(ModifyEvent e) { -+// selectedLocale = LocaleUtils.getLocaleByDisplayName(manager.getProvidedLocales(selectedRB), cmbLanguage.getText()); -+// validate(); -+// } -+// }); -+ -+ // Filter -+// final Label lblKey = new Label (group, SWT.NONE | SWT.RIGHT); -+// GridData lblKeyGrid = new GridData(GridData.END, GridData.CENTER, false, false, 1, 1); -+// lblKeyGrid.widthHint = WIDTH_LEFT_COLUMN; -+// lblKey.setLayoutData(lblKeyGrid); -+// lblKey.setText(""Filter:""); -+// -+// txtKey = new Text (group, SWT.BORDER); -+// txtKey.setLayoutData(new GridData(GridData.FILL, GridData.CENTER, true, false, 1, 1)); -+ -+ // Add selector for property keys -+ final Label lblKeys = new Label (group, SWT.NONE); -+ lblKeys.setLayoutData(new GridData(GridData.END, GridData.BEGINNING, false, false, 1, 1)); -+ lblKeys.setText(""Resource:""); -+ -+ resourceSelector = new ResourceSelector (group, SWT.NONE, manager, cmbRB.getText(), searchOption, null, true); -+ GridData resourceSelectionData = new GridData(GridData.FILL, GridData.CENTER, true, false, 1, 1); -+ resourceSelectionData.heightHint = 150; -+ resourceSelectionData.widthHint = 400; -+ resourceSelector.setLayoutData(resourceSelectionData); -+ resourceSelector.addSelectionChangedListener(new IResourceSelectionListener() { -+ -+ @Override -+ public void selectionChanged(ResourceSelectionEvent e) { -+ selectedKey = e.getSelectedKey(); -+ updatePreviewLabel(e.getSelectionSummary()); -+ validate(); -+ } -+ }); -+ -+// final Label spacer = new Label (group, SWT.SEPARATOR | SWT.HORIZONTAL); -+// spacer.setLayoutData(new GridData(GridData.BEGINNING, GridData.CENTER, true, false, 2, 1)); -+ -+ // Preview -+ final Label lblText = new Label (group, SWT.NONE | SWT.RIGHT); -+ GridData lblTextGrid = new GridData(GridData.END, GridData.CENTER, false, false, 1, 1); -+ lblTextGrid.heightHint = 120; -+ lblTextGrid.widthHint = 100; -+ lblText.setLayoutData(lblTextGrid); -+ lblText.setText(""Preview:""); -+ -+ txtPreviewText = new Text (group, SWT.BORDER | SWT.MULTI | SWT.V_SCROLL); -+ txtPreviewText.setEditable(false); -+ GridData lblTextGrid2 = new GridData(GridData.FILL, GridData.FILL, true, true, 1, 1); -+ txtPreviewText.setLayoutData(lblTextGrid2); -+ } -+ -+ @Override -+ protected void configureShell(Shell newShell) { -+ super.configureShell(newShell); -+ newShell.setText(""Select Resource-Bundle entry""); -+ } -+ -+ @Override -+ public void create() { -+ // TODO Auto-generated method stub -+ super.create(); -+ this.setTitle(""Select a Resource-Bundle entry""); -+ this.setMessage(""Please, select a resource of a particular Resource-Bundle""); -+ } ++ _recycler = bufferRecycler; ++ _outputBuffer = bufferRecycler.allocOutputBuffer(OUTPUT_BUFFER_SIZE); + _wrapper = new Wrapper(); + } + +- public LZFFileOutputStream(ChunkEncoder encoder, File file, boolean append) throws FileNotFoundException { ++ public LZFFileOutputStream(ChunkEncoder encoder, File file, boolean append, BufferRecycler bufferRecycler) throws FileNotFoundException { + super(file, append); + _encoder = encoder; +- _recycler = BufferRecycler.instance(); +- _outputBuffer = _recycler.allocOutputBuffer(OUTPUT_BUFFER_SIZE); ++ _recycler = bufferRecycler; ++ _outputBuffer = bufferRecycler.allocOutputBuffer(OUTPUT_BUFFER_SIZE); + _wrapper = new Wrapper(); + } + +- public LZFFileOutputStream(ChunkEncoder encoder, FileDescriptor fdObj) { ++ public LZFFileOutputStream(ChunkEncoder encoder, FileDescriptor fdObj, BufferRecycler bufferRecycler) { + super(fdObj); + _encoder = encoder; +- _recycler = BufferRecycler.instance(); +- _outputBuffer = _recycler.allocOutputBuffer(OUTPUT_BUFFER_SIZE); ++ _recycler = bufferRecycler; ++ _outputBuffer = bufferRecycler.allocOutputBuffer(OUTPUT_BUFFER_SIZE); + _wrapper = new Wrapper(); + } + +- public LZFFileOutputStream(ChunkEncoder encoder, String name) throws FileNotFoundException { ++ public LZFFileOutputStream(ChunkEncoder encoder, String name, BufferRecycler bufferRecycler) throws FileNotFoundException { + super(name); + _encoder = encoder; +- _recycler = BufferRecycler.instance(); +- _outputBuffer = _recycler.allocOutputBuffer(OUTPUT_BUFFER_SIZE); ++ _recycler = bufferRecycler; ++ _outputBuffer = bufferRecycler.allocOutputBuffer(OUTPUT_BUFFER_SIZE); + _wrapper = new Wrapper(); + } + +- public LZFFileOutputStream(ChunkEncoder encoder, String name, boolean append) throws FileNotFoundException { ++ public LZFFileOutputStream(ChunkEncoder encoder, String name, boolean append, BufferRecycler bufferRecycler) throws FileNotFoundException { + super(name, append); + _encoder = encoder; +- _recycler = BufferRecycler.instance(); +- _outputBuffer = _recycler.allocOutputBuffer(OUTPUT_BUFFER_SIZE); ++ _recycler = bufferRecycler; ++ _outputBuffer = bufferRecycler.allocOutputBuffer(OUTPUT_BUFFER_SIZE); + _wrapper = new Wrapper(); + }" +d3ec4cab768b7062fa7625d623402f25f349028c,Mylyn Reviews,"380843: index commit messages of changesets + +Created custom filter for changesets, which works better with task +numbers like used in jira (Prefix-Number). + +Task-Url: https://bugs.eclipse.org/bugs/show_bug.cgi?id=380843 +Change-Id: Ice1cf8e3a558c36d9e832312a0a6259e7f6e7e86 +",a,https://github.com/eclipse-mylyn/org.eclipse.mylyn.reviews,"diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/internal/BracketFilter.java b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/internal/BracketFilter.java +new file mode 100644 +index 00000000..df4575c3 +--- /dev/null ++++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/internal/BracketFilter.java +@@ -0,0 +1,51 @@ ++/******************************************************************************* ++ * Copyright (c) 2012 Research Group for Industrial Software (INSO), Vienna University of Technology. ++ * All rights reserved. This program and the accompanying materials ++ * are made available under the terms of the Eclipse Public License v1.0 ++ * which accompanies this distribution, and is available at ++ * http://www.eclipse.org/legal/epl-v10.html ++ * ++ * Contributors: ++ * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation ++ *******************************************************************************/ ++package org.eclipse.mylyn.versions.tasks.mapper.internal; + -+ protected void updatePreviewLabel (String previewText) { -+ txtPreviewText.setText(previewText); -+ } -+ -+ protected void validate () { -+ // Check Resource-Bundle ids -+ boolean rbValid = false; -+ boolean localeValid = false; -+ boolean keyValid = false; -+ -+ for (String rbId : this.availableBundles) { -+ if (rbId.equals(selectedRB)) { -+ rbValid = true; -+ break; -+ } -+ } -+ -+ if (selectedLocale != null) -+ localeValid = true; -+ -+ if (manager.isResourceExisting(selectedRB, selectedKey)) -+ keyValid = true; -+ -+ // print Validation summary -+ String errorMessage = null; -+ if (! rbValid) -+ errorMessage = ""The specified Resource-Bundle does not exist""; -+// else if (! localeValid) -+// errorMessage = ""The specified Locale does not exist for the selecte Resource-Bundle""; -+ else if (! keyValid) -+ errorMessage = ""No resource selected""; -+ else { -+ if (okButton != null) -+ okButton.setEnabled(true); -+ } ++import java.util.Arrays; + -+ setErrorMessage(errorMessage); -+ if (okButton != null && errorMessage != null) -+ okButton.setEnabled(false); -+ } ++import org.apache.lucene.analysis.TokenFilter; ++import org.apache.lucene.analysis.TokenStream; ++import org.apache.lucene.analysis.tokenattributes.TermAttribute; + -+ @Override -+ protected void createButtonsForButtonBar(Composite parent) { -+ okButton = createButton (parent, OK, ""Ok"", true); -+ okButton.addSelectionListener (new SelectionAdapter() { -+ public void widgetSelected(SelectionEvent e) { -+ // Set return code -+ setReturnCode(OK); -+ close(); -+ } -+ }); -+ -+ cancelButton = createButton (parent, CANCEL, ""Cancel"", false); -+ cancelButton.addSelectionListener (new SelectionAdapter() { -+ public void widgetSelected(SelectionEvent e) { -+ setReturnCode (CANCEL); -+ close(); -+ } -+ }); -+ -+ okButton.setEnabled(false); -+ cancelButton.setEnabled(true); -+ } -+ -+ public String getSelectedResourceBundle () { -+ return selectedRB; -+ } -+ -+ public String getSelectedResource () { -+ return selectedKey; -+ } -+ -+ public Locale getSelectedLocale () { -+ return selectedLocale; ++/** ++ * ++ * @author Kilian Matt ++ * ++ */ ++public class BracketFilter extends TokenFilter { ++ private TermAttribute attribute; ++ private char[] illegalChars= {'(',')','[',']','{','}'}; ++ public BracketFilter(TokenStream in) { ++ super(in); ++ attribute = (TermAttribute) addAttribute(TermAttribute.class); ++ Arrays.sort(illegalChars); ++ } ++ ++ /** Returns the next token in the stream, or null at EOS. ++ *

Removes 's from the end of words. ++ *

Removes dots from acronyms. ++ */ ++ public final boolean incrementToken() throws java.io.IOException { ++ if(!input.incrementToken()) return false; ++ ++ char[] buffer = attribute.termBuffer(); ++ char[] target = new char[buffer.length]; ++ int targetPos=0; ++ for(int i =0; i < buffer.length; i++) { ++ if(Arrays.binarySearch(illegalChars,buffer[i])<0) { ++ target[targetPos++] = buffer[i]; ++ } ++ } ++ attribute.setTermBuffer(target,0,targetPos); ++ return true; + } +} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/ResourceBundleSelectionDialog.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/ResourceBundleSelectionDialog.java +diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/internal/ChangeSetAnalyzer.java b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/internal/ChangeSetAnalyzer.java new file mode 100644 -index 00000000..5d73db2e +index 00000000..ae61a595 --- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/dialogs/ResourceBundleSelectionDialog.java -@@ -0,0 +1,110 @@ -+package org.eclipselabs.tapiji.tools.core.ui.dialogs; -+ -+import java.util.List; -+ -+import org.eclipse.core.resources.IProject; -+import org.eclipse.jface.viewers.ILabelProvider; -+import org.eclipse.jface.viewers.ILabelProviderListener; -+import org.eclipse.jface.viewers.IStructuredContentProvider; -+import org.eclipse.jface.viewers.Viewer; -+import org.eclipse.swt.graphics.Image; -+import org.eclipse.swt.widgets.Shell; -+import org.eclipse.ui.dialogs.ListDialog; -+import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; -+import org.eclipselabs.tapiji.tools.core.util.ImageUtils; -+ -+ -+public class ResourceBundleSelectionDialog extends ListDialog { -+ -+ private IProject project; -+ -+ public ResourceBundleSelectionDialog(Shell parent, IProject project) { -+ super(parent); -+ this.project = project; -+ -+ initDialog (); -+ } -+ -+ protected void initDialog () { -+ this.setAddCancelButton(true); -+ this.setMessage(""Select one of the following Resource-Bundle to open:""); -+ this.setTitle(""Resource-Bundle Selector""); -+ this.setContentProvider(new RBContentProvider()); -+ this.setLabelProvider(new RBLabelProvider()); -+ this.setBlockOnOpen(true); -+ -+ if (project != null) -+ this.setInput(ResourceBundleManager.getManager(project).getResourceBundleNames()); -+ else -+ this.setInput(ResourceBundleManager.getAllResourceBundleNames()); -+ } -+ -+ public String getSelectedBundleId () { -+ Object[] selection = this.getResult(); -+ if (selection != null && selection.length > 0) -+ return (String) selection[0]; -+ return null; -+ } -+ -+ class RBContentProvider implements IStructuredContentProvider { ++++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/internal/ChangeSetAnalyzer.java +@@ -0,0 +1,43 @@ ++/******************************************************************************* ++ * Copyright (c) 2012 Research Group for Industrial Software (INSO), Vienna University of Technology. ++ * All rights reserved. This program and the accompanying materials ++ * are made available under the terms of the Eclipse Public License v1.0 ++ * which accompanies this distribution, and is available at ++ * http://www.eclipse.org/legal/epl-v10.html ++ * ++ * Contributors: ++ * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation ++ *******************************************************************************/ ++package org.eclipse.mylyn.versions.tasks.mapper.internal; + -+ @Override -+ public Object[] getElements(Object inputElement) { -+ List resources = (List) inputElement; -+ return resources.toArray(); -+ } ++import java.io.Reader; + -+ @Override -+ public void dispose() { -+ // TODO Auto-generated method stub -+ -+ } ++import org.apache.lucene.analysis.Analyzer; ++import org.apache.lucene.analysis.KeywordAnalyzer; ++import org.apache.lucene.analysis.PerFieldAnalyzerWrapper; ++import org.apache.lucene.analysis.TokenStream; ++import org.apache.lucene.analysis.WhitespaceAnalyzer; ++import org.apache.lucene.analysis.standard.StandardAnalyzer; ++import org.apache.lucene.util.Version; + -+ @Override -+ public void inputChanged(Viewer viewer, Object oldInput, Object newInput) { -+ // TODO Auto-generated method stub -+ ++/** ++ * ++ * @author Kilian Matt ++ * ++ */ ++public class ChangeSetAnalyzer { ++ public static Analyzer get() { ++ PerFieldAnalyzerWrapper analyzer = new PerFieldAnalyzerWrapper(new StandardAnalyzer(Version.LUCENE_CURRENT)); ++ analyzer.addAnalyzer(IndexedFields.REPOSITORY.getIndexKey(), new KeywordAnalyzer()); ++ analyzer.addAnalyzer(IndexedFields.COMMIT_MESSAGE.getIndexKey(), new Analyzer() { ++ @Override ++ public TokenStream tokenStream(String fieldName, Reader reader) { ++ WhitespaceAnalyzer delegate =new WhitespaceAnalyzer(); ++ TokenStream tokenStream = delegate.tokenStream(fieldName, reader); ++ BracketFilter filteredStream = new BracketFilter(tokenStream); ++ return filteredStream; ++ } ++ }); ++ return analyzer; + } -+ + } -+ -+ class RBLabelProvider implements ILabelProvider { -+ -+ @Override -+ public Image getImage(Object element) { -+ // TODO Auto-generated method stub -+ return ImageUtils.getImage(ImageUtils.IMAGE_RESOURCE_BUNDLE); -+ } +\ No newline at end of file +diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/internal/ChangeSetIndexer.java b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/internal/ChangeSetIndexer.java +index d7460e44..08a0058b 100644 +--- a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/internal/ChangeSetIndexer.java ++++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/internal/ChangeSetIndexer.java +@@ -9,14 +9,11 @@ + * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation + *******************************************************************************/ + package org.eclipse.mylyn.versions.tasks.mapper.internal; + -+ @Override -+ public String getText(Object element) { -+ // TODO Auto-generated method stub -+ return ((String) element); -+ } + import java.io.File; + import java.io.FileNotFoundException; + import java.io.IOException; + +-import org.apache.lucene.LucenePackage; +-import org.apache.lucene.analysis.KeywordAnalyzer; +-import org.apache.lucene.analysis.PerFieldAnalyzerWrapper; +-import org.apache.lucene.analysis.standard.StandardAnalyzer; + import org.apache.lucene.document.Document; + import org.apache.lucene.document.Field; + import org.apache.lucene.document.Field.Store; +@@ -25,22 +22,15 @@ + import org.apache.lucene.index.IndexWriter; + import org.apache.lucene.index.IndexWriter.MaxFieldLength; + import org.apache.lucene.index.Term; ++import org.apache.lucene.search.BooleanClause.Occur; + import org.apache.lucene.search.BooleanQuery; +-import org.apache.lucene.search.DisjunctionMaxQuery; + import org.apache.lucene.search.IndexSearcher; +-import org.apache.lucene.search.PhraseQuery; + import org.apache.lucene.search.PrefixQuery; + import org.apache.lucene.search.Query; + import org.apache.lucene.search.ScoreDoc; + import org.apache.lucene.search.TermQuery; + import org.apache.lucene.search.TopDocs; +-import org.apache.lucene.search.BooleanClause.Occur; +-import org.apache.lucene.search.spans.SpanOrQuery; +-import org.apache.lucene.search.spans.SpanQuery; +-import org.apache.lucene.search.spans.SpanTermQuery; + import org.apache.lucene.store.NIOFSDirectory; +-import org.apache.lucene.util.Version; +-import org.eclipse.core.resources.IFile; + import org.eclipse.core.runtime.CoreException; + import org.eclipse.core.runtime.IProgressMonitor; + import org.eclipse.mylyn.tasks.core.ITask; +@@ -51,7 +41,7 @@ + import org.eclipse.mylyn.versions.tasks.mapper.generic.IChangeSetSource; + + /** +- * ++ * + * @author Kilian Matt + */ + public class ChangeSetIndexer implements IChangeSetIndexSearcher { +@@ -67,12 +57,10 @@ public ChangeSetIndexer(File directory, IChangeSetSource source) { + this.source = source; + } + + -+ @Override -+ public void addListener(ILabelProviderListener listener) { -+ // TODO Auto-generated method stub -+ -+ } + public void reindex(IProgressMonitor monitor) { + try { +- PerFieldAnalyzerWrapper analyzer = new PerFieldAnalyzerWrapper(new StandardAnalyzer(Version.LUCENE_CURRENT)); +- analyzer.addAnalyzer(IndexedFields.REPOSITORY.getIndexKey(), new KeywordAnalyzer()); +- indexWriter = new IndexWriter(new NIOFSDirectory(indexDirectory), +- analyzer, true, MaxFieldLength.UNLIMITED); ++ indexWriter = new IndexWriter(new NIOFSDirectory(indexDirectory),ChangeSetAnalyzer.get(), true, MaxFieldLength.UNLIMITED); + + IChangeSetIndexer indexer = new IChangeSetIndexer() { + +@@ -96,29 +84,29 @@ public void index(ChangeSet changeset) { + } + } + +- public int search(ITask task, String scmRepositoryUrl, int resultsLimit, IChangeSetCollector collector) throws CoreException { ++ public int search(ITask task, String scmRepositoryUrl, int resultsLimit,IChangeSetCollector collector) throws CoreException { + int count = 0; + IndexReader indexReader = getIndexReader(); + if (indexReader != null) { + IndexSearcher indexSearcher = new IndexSearcher(indexReader); + try { +- Query query = createQuery(task,scmRepositoryUrl); ++ Query query = createQuery(task, scmRepositoryUrl); + TopDocs results = indexSearcher.search(query, resultsLimit); + for (ScoreDoc scoreDoc : results.scoreDocs) { + Document document = indexReader.document(scoreDoc.doc); + count++; +- if(count > resultsLimit) ++ if (count > resultsLimit) + break; +- +- + -+ @Override -+ public void dispose() { -+ // TODO Auto-generated method stub -+ -+ } + String revision = document.getField(IndexedFields.REVISION.getIndexKey()).stringValue(); +- String repositoryUrl =document.getField(IndexedFields.REPOSITORY.getIndexKey()).stringValue(); +- ++ String repositoryUrl = document.getField(IndexedFields.REPOSITORY.getIndexKey()).stringValue(); + -+ @Override -+ public boolean isLabelProperty(Object element, String property) { -+ // TODO Auto-generated method stub -+ return false; -+ } + collector.collect(revision, repositoryUrl); + } + } catch (IOException e) { +-// StatusHandler.log(new Status(IStatus.ERROR, org.eclipse.mylyn.versions.tasks.ui.internal.TaPLUGIN_ID, +-//""Unexpected failure within task list index"", e)); //$NON-NLS-1$ ++ // StatusHandler.log(new Status(IStatus.ERROR, ++ // org.eclipse.mylyn.versions.tasks.ui.internal.TaPLUGIN_ID, ++ //""Unexpected failure within task list index"", e)); //$NON-NLS-1$ + } finally { + try { + indexSearcher.close(); +@@ -132,20 +120,22 @@ public int search(ITask task, String scmRepositoryUrl, int resultsLimit, IChange + } + + private Query createQuery(ITask task, String repositoryUrl) { +- BooleanQuery query =new BooleanQuery(); ++ BooleanQuery query = new BooleanQuery(); + query.setMinimumNumberShouldMatch(1); +- query.add(new TermQuery(new Term(IndexedFields.REPOSITORY.getIndexKey(),repositoryUrl)),Occur.MUST); +- query.add(new PrefixQuery(new Term(IndexedFields.COMMIT_MESSAGE.getIndexKey(),task.getUrl())),Occur.SHOULD); +- query.add(new PrefixQuery(new Term(IndexedFields.COMMIT_MESSAGE.getIndexKey(),task.getTaskId())),Occur.SHOULD); ++ query.add(new TermQuery(new Term(IndexedFields.REPOSITORY.getIndexKey(), repositoryUrl)), Occur.MUST); ++ query.add(new PrefixQuery(new Term(IndexedFields.COMMIT_MESSAGE.getIndexKey(), task.getUrl())), Occur.SHOULD); ++ query.add(new PrefixQuery(new Term(IndexedFields.COMMIT_MESSAGE.getIndexKey(), task.getTaskId())), Occur.SHOULD); ++ query.add(new PrefixQuery(new Term(IndexedFields.COMMIT_MESSAGE.getIndexKey(), task.getTaskKey())), Occur.SHOULD); ++ + return query; + } + +- + private IndexReader getIndexReader() { + try { + synchronized (this) { + if (indexReader == null) { +- indexReader = IndexReader.open(new NIOFSDirectory(indexDirectory), true); ++ indexReader = IndexReader.open(new NIOFSDirectory( ++ indexDirectory), true); + } + return indexReader; + } +diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.tests/src/org/eclipse/mylyn/versions/tasks/mapper/internal/ChangeSetIndexerTest.java b/tbr/org.eclipse.mylyn.versions.tasks.mapper.tests/src/org/eclipse/mylyn/versions/tasks/mapper/internal/ChangeSetIndexerTest.java +index f909b736..f7aa8c42 100644 +--- a/tbr/org.eclipse.mylyn.versions.tasks.mapper.tests/src/org/eclipse/mylyn/versions/tasks/mapper/internal/ChangeSetIndexerTest.java ++++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.tests/src/org/eclipse/mylyn/versions/tasks/mapper/internal/ChangeSetIndexerTest.java +@@ -36,15 +36,16 @@ + import org.junit.Test; + + /** +- * ++ * + * @author Kilian Matt + * + */ ++@SuppressWarnings(""restriction"") + public class ChangeSetIndexerTest { + + protected static final String REPO_URL = ""http://git.eclipse.org/c/mylyn/org.eclipse.mylyn.versions.git""; + private ChangeSetIndexer indexer; +- + -+ @Override -+ public void removeListener(ILabelProviderListener listener) { -+ // TODO Auto-generated method stub -+ -+ } -+ + @Before + public void prepareIndex() { + File dir = createTempDirectoryForIndex(); +@@ -61,7 +62,7 @@ public void testSingleResult() throws CoreException{ + collectors.expect(""1"", REPO_URL); + assertEquals(1, indexer.search(task,REPO_URL, 5,collectors)); + collectors.verifyAllExpectations(); +- } + } -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/filters/PropertiesFileFilter.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/filters/PropertiesFileFilter.java -new file mode 100644 -index 00000000..cf92e76f ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/filters/PropertiesFileFilter.java -@@ -0,0 +1,31 @@ -+package org.eclipselabs.tapiji.tools.core.ui.filters; -+ -+import org.eclipse.core.resources.IFile; -+import org.eclipse.jface.viewers.Viewer; -+import org.eclipse.jface.viewers.ViewerFilter; -+ -+public class PropertiesFileFilter extends ViewerFilter { -+ -+ private boolean debugEnabled = true; -+ -+ public PropertiesFileFilter() { -+ + @Test + public void testMultipleResults() throws CoreException{ + ITask task = new MockTask(REPO_URL,""2""); +@@ -73,13 +74,33 @@ public void testMultipleResults() throws CoreException{ + collectors.verifyAllExpectations(); + } + ++ @Test ++ public void testFindByTaskUrl() throws CoreException{ ++ ITask task = new MockTask(REPO_URL,""4""); ++ task.setUrl(REPO_URL+""/4""); ++ ExpectingChangeSetCollector collectors= new ExpectingChangeSetCollector(); ++ collectors.expect(""4"", REPO_URL); ++ assertEquals(1, indexer.search(task,REPO_URL, 5,collectors)); ++ collectors.verifyAllExpectations(); + } ++ @Test ++ public void testComplexTaskKeys() throws CoreException{ ++ ITask task = new MockTask(REPO_URL,""2131""); ++ task.setTaskKey(""SPR-9030""); ++ task.setUrl(REPO_URL+""/1""); ++ ExpectingChangeSetCollector collectors= new ExpectingChangeSetCollector(); ++ collectors.expect(""5"", REPO_URL); ++ collectors.expect(""6"", REPO_URL); ++ assertEquals(2, indexer.search(task,REPO_URL, 5,collectors)); ++ collectors.verifyAllExpectations(); ++ } + + static class ExpectingChangeSetCollector implements IChangeSetCollector{ + private List expected=new LinkedList(); + void expect(String revision, String repositoryUrl){ + this.expected.add(new Pair(revision,repositoryUrl)); + } +- + -+ @Override -+ public boolean select(Viewer viewer, Object parentElement, Object element) { -+ if (debugEnabled) -+ return true; -+ -+ if (element.getClass().getSimpleName().equals(""CompilationUnit"")) -+ return false; -+ -+ if (!(element instanceof IFile)) -+ return true; -+ -+ IFile file = (IFile) element; -+ -+ return file.getFileExtension().equalsIgnoreCase(""properties""); -+ } + public void verifyAllExpectations() { + if(expected.size()>0){ + fail( expected.size() + "" expected changesets not collected""); +@@ -108,16 +129,20 @@ public void collect(String revision, String repositoryUrl) + } + private ListChangeSetSource createIndexerSource() { + ScmRepository repository=new ScmRepository(null, """", REPO_URL); +- ScmRepository otherRepo=new ScmRepository(null, """", ""http://git.eclipse.org/c/mylyn/org.eclipse.mylyn.reviews.git""); ++ ScmRepository otherRepo=new ScmRepository(null, """", ""http://git.eclipse.org/c/mylyn/org.eclipse.mylyn.reviews.git""); + ListChangeSetSource source = new ListChangeSetSource(Arrays.asList( + new ChangeSet(new ScmUser(""test"", ""Name"", ""test@eclipse.org""), new Date(), ""1"", ""commit message 1"", repository, new ArrayList()), + new ChangeSet(new ScmUser(""test"", ""Name"", ""test@eclipse.org""), new Date(), ""1"", ""commit message 1"", otherRepo, new ArrayList()), + new ChangeSet(new ScmUser(""test"", ""Name"", ""test@eclipse.org""), new Date(), ""2"", ""commit message 2"", repository, new ArrayList()), +- new ChangeSet(new ScmUser(""test"", ""Name"", ""test@eclipse.org""), new Date(), ""3"", ""commit message 2"", repository, new ArrayList()) ++ new ChangeSet(new ScmUser(""test"", ""Name"", ""test@eclipse.org""), new Date(), ""3"", ""commit message 2"", repository, new ArrayList()), ++ new ChangeSet(new ScmUser(""test"", ""Name"", ""test@eclipse.org""), new Date(), ""4"", ""another commit message with url http://git.eclipse.org/c/mylyn/org.eclipse.mylyn.versions.git/4 "", repository, new ArrayList()), ++ ++ new ChangeSet(new ScmUser(""test"", ""Name"", ""test@eclipse.org""), new Date(), ""5"", ""SPR-9030: Test"", repository, new ArrayList()), ++ new ChangeSet(new ScmUser(""test"", ""Name"", ""test@eclipse.org""), new Date(), ""6"", ""Fixed Bug (SPR-9030)"", repository, new ArrayList()) + )); + return source; + } +- + -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/markers/StringLiterals.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/markers/StringLiterals.java -new file mode 100644 -index 00000000..8d6e57cd ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/markers/StringLiterals.java -@@ -0,0 +1,5 @@ -+package org.eclipselabs.tapiji.tools.core.ui.markers; + class ListChangeSetSource implements IChangeSetSource { + private List changesets; + public ListChangeSetSource(List changesets){ +@@ -131,7 +156,7 @@ public void fetchAllChangesets(IProgressMonitor monitor, + } + } + } +- + -+public class StringLiterals { + private File createTempDirectoryForIndex() { + File dir = null; + try { +@@ -144,7 +169,7 @@ private File createTempDirectoryForIndex() { + } + return dir; + } +- +- +- + -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/menus/InternationalizationMenu.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/menus/InternationalizationMenu.java -new file mode 100644 -index 00000000..3f6493fd ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/menus/InternationalizationMenu.java -@@ -0,0 +1,373 @@ -+package org.eclipselabs.tapiji.tools.core.ui.menus; + -+import java.util.Collection; -+import java.util.HashSet; -+import java.util.Iterator; -+import java.util.List; -+import java.util.Locale; + -+import org.eclipse.core.resources.IProject; -+import org.eclipse.core.resources.IResource; -+import org.eclipse.core.runtime.IAdaptable; -+import org.eclipse.core.runtime.IProgressMonitor; -+import org.eclipse.jdt.core.IJavaElement; -+import org.eclipse.jdt.core.IPackageFragment; -+import org.eclipse.jface.action.ContributionItem; -+import org.eclipse.jface.dialogs.InputDialog; -+import org.eclipse.jface.dialogs.MessageDialog; -+import org.eclipse.jface.operation.IRunnableWithProgress; -+import org.eclipse.jface.viewers.ISelection; -+import org.eclipse.jface.viewers.IStructuredSelection; -+import org.eclipse.swt.SWT; -+import org.eclipse.swt.custom.BusyIndicator; -+import org.eclipse.swt.events.MenuAdapter; -+import org.eclipse.swt.events.MenuEvent; -+import org.eclipse.swt.events.SelectionAdapter; -+import org.eclipse.swt.events.SelectionEvent; -+import org.eclipse.swt.widgets.Display; -+import org.eclipse.swt.widgets.Menu; -+import org.eclipse.swt.widgets.MenuItem; -+import org.eclipse.swt.widgets.Shell; -+import org.eclipse.ui.IWorkbench; -+import org.eclipse.ui.IWorkbenchWindow; -+import org.eclipse.ui.PlatformUI; -+import org.eclipse.ui.progress.IProgressService; + }" +36cfc1272adb7973aeb7437ac689ff2168c9777d,Vala,"glib-2.0: Add more GMarkup bindings + +Add g_markup_collect_attributes, g_markup_parser_context_push, +g_markup_parser_context_pop, and GMarkupCollectType bindings, +based on patch by Yu Feng, fixes bug 564704. +",a,https://github.com/GNOME/vala/,"diff --git a/vapi/glib-2.0.vapi b/vapi/glib-2.0.vapi +index d1d075f0de..ff9474eb3f 100644 +--- a/vapi/glib-2.0.vapi ++++ b/vapi/glib-2.0.vapi +@@ -2684,7 +2684,8 @@ namespace GLib { + PARSE, + UNKNOWN_ELEMENT, + UNKNOWN_ATTRIBUTE, +- INVALID_CONTENT ++ INVALID_CONTENT, ++ MISSING_ATTRIBUTE + } + + [CCode (cprefix = ""G_MARKUP_"", has_type_id = false)] +@@ -2701,6 +2702,8 @@ namespace GLib { + public weak string get_element (); + public weak SList get_element_stack (); + public void get_position (out int line_number, out int char_number); ++ public void push (MarkupParser parser, void* user_data); ++ public void* pop (MarkupParser parser); + } + + public delegate void MarkupParserStartElementFunc (MarkupParseContext context, string element_name, [CCode (array_length = false, array_null_terminated = true)] string[] attribute_names, [CCode (array_length = false, array_null_terminated = true)] string[] attribute_values) throws MarkupError; +@@ -2722,9 +2725,21 @@ namespace GLib { + } + + namespace Markup { ++ [CCode (cprefix = ""G_MARKUP_COLLECT_"", has_type_id = false)] ++ public enum CollectType { ++ INVALID, ++ STRING, ++ STRDUP, ++ BOOLEAN, ++ TRISTATE, ++ OPTIONAL ++ } ++ + public static string escape_text (string text, long length = -1); + [PrintfFormat] + public static string printf_escaped (string format, ...); ++ [CCode (sentinel = ""G_MARKUP_COLLECT_INVALID"")] ++ public static bool collect_attributes (string element_name, string[] attribute_names, string[] attribute_values, ...) throws MarkupError; + } + + /* Key-value file parser */" +fef434b11eb3abf88fca6ac3073a5025447a646d,orientdb,Fixed issue -1521 about JSON management of- embedded lists with different types--,c,https://github.com/orientechnologies/orientdb,"diff --git a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerSchemaAware2CSV.java b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerSchemaAware2CSV.java +index f4490b73182..7df87652f56 100755 +--- a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerSchemaAware2CSV.java ++++ b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerSchemaAware2CSV.java +@@ -212,13 +212,9 @@ else if (firstValue instanceof Enum) + else { + linkedType = OType.getTypeByClass(firstValue.getClass()); + +- if (linkedType != OType.LINK) { +- // EMBEDDED FOR SURE SINCE IT CONTAINS JAVA TYPES +- if (linkedType == null) { +- linkedType = OType.EMBEDDED; +- // linkedClass = new OClass(firstValue.getClass()); +- } +- } ++ if (linkedType != OType.LINK) ++ // EMBEDDED FOR SURE DON'T USE THE LINKED TYPE ++ linkedType = null; + } + + if (type == null) +diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/JSONTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/JSONTest.java +index 411fe57fa61..e702f9d9fc1 100755 +--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/JSONTest.java ++++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/JSONTest.java +@@ -15,7 +15,12 @@ + */ + package com.orientechnologies.orient.test.database.auto; + +-import java.util.*; ++import java.util.ArrayList; ++import java.util.Collection; ++import java.util.Date; ++import java.util.HashMap; ++import java.util.List; ++import java.util.Map; + + import org.testng.Assert; + import org.testng.annotations.Parameters; +@@ -37,6 +42,11 @@ + public class JSONTest { + private String url; + ++// public static final void main(String[] args) throws Exception { ++// JSONTest test = new JSONTest(""memory:test""); ++// test.testList(); ++// } ++ + @Parameters(value = ""url"") + public JSONTest(final String iURL) { + url = iURL; +@@ -687,4 +697,17 @@ public void nestedJsonTest() { + + db.close(); + } ++ ++ @Test ++ public void testList() throws Exception { ++ ODocument documentSource = new ODocument(); ++ documentSource.fromJSON(""{\""list\"" : [\""string\"", 42]}""); ++ ++ ODocument documentTarget = new ODocument(); ++ documentTarget.fromStream(documentSource.toStream()); ++ ++ OTrackedList list = documentTarget.field(""list"", OType.EMBEDDEDLIST); ++ Assert.assertEquals(list.get(0), ""string""); ++ Assert.assertEquals(list.get(1), 42); ++ } + }" +ba424c200bb5b1321f2a8872dfbb1b6f95e2eab0,tapiji,"Adapts the namespace of all TapiJI plug-ins to org.eclipselabs.tapiji.*. +",p,https://github.com/tapiji/tapiji,"diff --git a/at.ac.tuwien.inso.eclipse.i18n.java/.project b/at.ac.tuwien.inso.eclipse.i18n.java/.project +index fc4b668e..fe4d5b8a 100644 +--- a/at.ac.tuwien.inso.eclipse.i18n.java/.project ++++ b/at.ac.tuwien.inso.eclipse.i18n.java/.project +@@ -1,6 +1,6 @@ + + +- at.ac.tuwien.inso.eclipse.i18n.java ++ org.eclipselabs.tapiji.tools.java + + + +diff --git a/at.ac.tuwien.inso.eclipse.i18n.java/META-INF/MANIFEST.MF b/at.ac.tuwien.inso.eclipse.i18n.java/META-INF/MANIFEST.MF +index d28b965f..979bf42a 100644 +--- a/at.ac.tuwien.inso.eclipse.i18n.java/META-INF/MANIFEST.MF ++++ b/at.ac.tuwien.inso.eclipse.i18n.java/META-INF/MANIFEST.MF +@@ -1,17 +1,17 @@ + Manifest-Version: 1.0 + Bundle-ManifestVersion: 2 + Bundle-Name: JavaBuilderExtension +-Bundle-SymbolicName: at.ac.tuwien.inso.eclipse.i18n.java;singleton:=true ++Bundle-SymbolicName: org.eclipselabs.tapiji.tools.java;singleton:=true + Bundle-Version: 0.0.1.qualifier + Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +-Require-Bundle: at.ac.tuwien.inso.eclipse.i18n;bundle-version=""0.0.1"", +- org.eclipse.core.resources;bundle-version=""3.6.0"", ++Require-Bundle: org.eclipse.core.resources;bundle-version=""3.6.0"", + org.eclipse.jdt.core;bundle-version=""3.6.0"", + org.eclipse.core.runtime;bundle-version=""3.6.0"", + org.eclipse.jdt.ui;bundle-version=""3.6.0"", +- org.eclipse.jface +-Import-Package: at.ac.tuwien.inso.eclipse.rbe.model.bundle, +- at.ac.tuwien.inso.eclipse.rbe.ui.wizards, ++ org.eclipse.jface, ++ org.eclipselabs.tapiji.tools.core;bundle-version=""0.0.1"" ++Import-Package: org.eclipselabs.tapiji.translator.rbe.model.bundle, ++ org.eclipselabs.tapiji.translator.rbe.ui.wizards, + org.eclipse.core.filebuffers, + org.eclipse.jface.dialogs, + org.eclipse.jface.text, +diff --git a/at.ac.tuwien.inso.eclipse.i18n.java/plugin.xml b/at.ac.tuwien.inso.eclipse.i18n.java/plugin.xml +index abdbec6b..33585177 100644 +--- a/at.ac.tuwien.inso.eclipse.i18n.java/plugin.xml ++++ b/at.ac.tuwien.inso.eclipse.i18n.java/plugin.xml +@@ -2,7 +2,7 @@ + + + ++ point=""org.eclipselabs.tapiji.tools.core.builderExtension""> + + +@@ -13,13 +13,13 @@ + activate=""true"" + class=""ui.ConstantStringHover"" + description=""hovers constant strings"" +- id=""at.ac.tuwien.inso.eclipse.i18n.ui.ConstantStringHover"" ++ id=""org.eclipselabs.tapiji.tools.java.ui.ConstantStringHover"" + label=""Constant Strings""> + + + + getMarkerResolutions(IMarker marker, +- int cause) { ++ public List getMarkerResolutions(IMarker marker) { + List resolutions = new ArrayList(); +- ++ int cause = marker.getAttribute(""cause"", -1); ++ + switch (marker.getAttribute(""cause"", -1)) { + case IMarkerConstants.CAUSE_CONSTANT_LITERAL: + resolutions.add(new ExportToResourceBundleResolution()); +diff --git a/at.ac.tuwien.inso.eclipse.i18n.java/src/auditor/ResourceAuditVisitor.java b/at.ac.tuwien.inso.eclipse.i18n.java/src/auditor/ResourceAuditVisitor.java +index 4a22f0a5..74708c6b 100644 +--- a/at.ac.tuwien.inso.eclipse.i18n.java/src/auditor/ResourceAuditVisitor.java ++++ b/at.ac.tuwien.inso.eclipse.i18n.java/src/auditor/ResourceAuditVisitor.java +@@ -24,9 +24,9 @@ + import org.eclipse.jdt.core.dom.VariableDeclarationStatement; + import org.eclipse.jface.text.IRegion; + import org.eclipse.jface.text.Region; ++import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; + + import util.ASTutils; +-import at.ac.tuwien.inso.eclipse.i18n.model.manager.ResourceBundleManager; + import auditor.model.SLLocation; + + /** +diff --git a/at.ac.tuwien.inso.eclipse.i18n.java/src/auditor/model/SLLocation.java b/at.ac.tuwien.inso.eclipse.i18n.java/src/auditor/model/SLLocation.java +index 97269552..3bfd8d28 100644 +--- a/at.ac.tuwien.inso.eclipse.i18n.java/src/auditor/model/SLLocation.java ++++ b/at.ac.tuwien.inso.eclipse.i18n.java/src/auditor/model/SLLocation.java +@@ -3,8 +3,8 @@ + import java.io.Serializable; + + import org.eclipse.core.resources.IFile; ++import org.eclipselabs.tapiji.tools.core.extensions.ILocation; + +-import at.ac.tuwien.inso.eclipse.i18n.extensions.ILocation; + + public class SLLocation implements Serializable, ILocation { + +@@ -13,7 +13,7 @@ public class SLLocation implements Serializable, ILocation { + private int startPos = -1; + private int endPos = -1; + private String literal; +- private Object data; ++ private Serializable data; + + public SLLocation(IFile file, int startPos, int endPos, String literal) { + super(); +@@ -43,10 +43,10 @@ public void setEndPos(int endPos) { + public String getLiteral() { + return literal; + } +- public Object getData () { ++ public Serializable getData () { + return data; + } +- public void setData (Object data) { ++ public void setData (Serializable data) { + this.data = data; + } + +diff --git a/at.ac.tuwien.inso.eclipse.i18n.java/src/quickfix/ExportToResourceBundleResolution.java b/at.ac.tuwien.inso.eclipse.i18n.java/src/quickfix/ExportToResourceBundleResolution.java +index af0afc60..8757c86c 100644 +--- a/at.ac.tuwien.inso.eclipse.i18n.java/src/quickfix/ExportToResourceBundleResolution.java ++++ b/at.ac.tuwien.inso.eclipse.i18n.java/src/quickfix/ExportToResourceBundleResolution.java +@@ -12,10 +12,10 @@ + import org.eclipse.swt.graphics.Image; + import org.eclipse.swt.widgets.Display; + import org.eclipse.ui.IMarkerResolution2; ++import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; ++import org.eclipselabs.tapiji.tools.core.ui.dialogs.CreateResourceBundleEntryDialog; + + import util.ASTutils; +-import at.ac.tuwien.inso.eclipse.i18n.model.manager.ResourceBundleManager; +-import at.ac.tuwien.inso.eclipse.i18n.ui.dialogs.CreateResourceBundleEntryDialog; + + public class ExportToResourceBundleResolution implements IMarkerResolution2 { + +diff --git a/at.ac.tuwien.inso.eclipse.i18n.java/src/quickfix/ReplaceResourceBundleDefReference.java b/at.ac.tuwien.inso.eclipse.i18n.java/src/quickfix/ReplaceResourceBundleDefReference.java +index 58fefe76..8352326b 100644 +--- a/at.ac.tuwien.inso.eclipse.i18n.java/src/quickfix/ReplaceResourceBundleDefReference.java ++++ b/at.ac.tuwien.inso.eclipse.i18n.java/src/quickfix/ReplaceResourceBundleDefReference.java +@@ -14,10 +14,10 @@ + import org.eclipse.swt.graphics.Image; + import org.eclipse.swt.widgets.Display; + import org.eclipse.ui.IMarkerResolution2; ++import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; ++import org.eclipselabs.tapiji.tools.core.ui.dialogs.InsertResourceBundleReferenceDialog; ++import org.eclipselabs.tapiji.tools.core.ui.dialogs.ResourceBundleSelectionDialog; + +-import at.ac.tuwien.inso.eclipse.i18n.model.manager.ResourceBundleManager; +-import at.ac.tuwien.inso.eclipse.i18n.ui.dialogs.InsertResourceBundleReferenceDialog; +-import at.ac.tuwien.inso.eclipse.i18n.ui.dialogs.ResourceBundleSelectionDialog; + + public class ReplaceResourceBundleDefReference implements IMarkerResolution2 { + +diff --git a/at.ac.tuwien.inso.eclipse.i18n.java/src/quickfix/ReplaceResourceBundleReference.java b/at.ac.tuwien.inso.eclipse.i18n.java/src/quickfix/ReplaceResourceBundleReference.java +index 7b18a4cb..3e92be00 100644 +--- a/at.ac.tuwien.inso.eclipse.i18n.java/src/quickfix/ReplaceResourceBundleReference.java ++++ b/at.ac.tuwien.inso.eclipse.i18n.java/src/quickfix/ReplaceResourceBundleReference.java +@@ -14,9 +14,9 @@ + import org.eclipse.swt.graphics.Image; + import org.eclipse.swt.widgets.Display; + import org.eclipse.ui.IMarkerResolution2; ++import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; ++import org.eclipselabs.tapiji.tools.core.ui.dialogs.InsertResourceBundleReferenceDialog; + +-import at.ac.tuwien.inso.eclipse.i18n.model.manager.ResourceBundleManager; +-import at.ac.tuwien.inso.eclipse.i18n.ui.dialogs.InsertResourceBundleReferenceDialog; + + public class ReplaceResourceBundleReference implements IMarkerResolution2 { + +diff --git a/at.ac.tuwien.inso.eclipse.i18n.java/src/ui/ConstantStringHover.java b/at.ac.tuwien.inso.eclipse.i18n.java/src/ui/ConstantStringHover.java +index face8493..a0e12a53 100644 +--- a/at.ac.tuwien.inso.eclipse.i18n.java/src/ui/ConstantStringHover.java ++++ b/at.ac.tuwien.inso.eclipse.i18n.java/src/ui/ConstantStringHover.java +@@ -9,8 +9,8 @@ + import org.eclipse.jface.text.IRegion; + import org.eclipse.jface.text.ITextViewer; + import org.eclipse.ui.IEditorPart; ++import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; + +-import at.ac.tuwien.inso.eclipse.i18n.model.manager.ResourceBundleManager; + import auditor.ResourceAuditVisitor; + + public class ConstantStringHover implements IJavaEditorTextHover { +diff --git a/at.ac.tuwien.inso.eclipse.i18n.java/src/ui/MessageCompletionProposalComputer.java b/at.ac.tuwien.inso.eclipse.i18n.java/src/ui/MessageCompletionProposalComputer.java +index 739cce7e..f6b77524 100644 +--- a/at.ac.tuwien.inso.eclipse.i18n.java/src/ui/MessageCompletionProposalComputer.java ++++ b/at.ac.tuwien.inso.eclipse.i18n.java/src/ui/MessageCompletionProposalComputer.java +@@ -18,15 +18,15 @@ + import org.eclipse.jdt.ui.text.java.JavaContentAssistInvocationContext; + import org.eclipse.jface.text.IRegion; + import org.eclipse.jface.text.contentassist.ICompletionProposal; +import org.eclipselabs.tapiji.tools.core.builder.InternationalizationNature; ++import org.eclipselabs.tapiji.tools.core.builder.quickfix.CreateResourceBundle; +import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; -+import org.eclipselabs.tapiji.tools.core.ui.dialogs.AddLanguageDialoge; -+import org.eclipselabs.tapiji.tools.core.ui.dialogs.FragmentProjectSelectionDialog; -+import org.eclipselabs.tapiji.tools.core.ui.dialogs.GenerateBundleAccessorDialog; -+import org.eclipselabs.tapiji.tools.core.ui.dialogs.RemoveLanguageDialoge; -+import org.eclipselabs.tapiji.tools.core.util.FragmentProjectUtils; -+import org.eclipselabs.tapiji.tools.core.util.LanguageUtils; -+ -+ -+public class InternationalizationMenu extends ContributionItem { -+ private boolean excludeMode = true; -+ private boolean internationalizationEnabled = false; -+ -+ private MenuItem mnuToggleInt; -+ private MenuItem excludeResource; -+ private MenuItem addLanguage; -+ private MenuItem removeLanguage; -+ -+ public InternationalizationMenu() {} -+ -+ public InternationalizationMenu(String id) { -+ super(id); -+ } -+ -+ @Override -+ public void fill(Menu menu, int index) { -+ if (getSelectedProjects().size() == 0 || -+ !projectsSupported()) -+ return; -+ -+ // Toggle Internatinalization -+ mnuToggleInt = new MenuItem (menu, SWT.PUSH); -+ mnuToggleInt.addSelectionListener(new SelectionAdapter () { -+ -+ @Override -+ public void widgetSelected(SelectionEvent e) { -+ runToggleInt(); -+ } -+ -+ }); -+ -+ // Exclude Resource -+ excludeResource = new MenuItem (menu, SWT.PUSH); -+ excludeResource.addSelectionListener(new SelectionAdapter () { ++import org.eclipselabs.tapiji.translator.rbe.model.bundle.IBundleGroup; + + import ui.autocompletion.InsertResourceBundleReferenceProposal; + import ui.autocompletion.MessageCompletionProposal; + import ui.autocompletion.NewResourceBundleEntryProposal; + import ui.autocompletion.NoActionProposal; +-import at.ac.tuwien.inso.eclipse.i18n.builder.InternationalizationNature; +-import at.ac.tuwien.inso.eclipse.i18n.builder.quickfix.CreateResourceBundle; +-import at.ac.tuwien.inso.eclipse.i18n.model.manager.ResourceBundleManager; +-import at.ac.tuwien.inso.eclipse.rbe.model.bundle.IBundleGroup; + import auditor.ResourceAuditVisitor; + + public class MessageCompletionProposalComputer implements +diff --git a/at.ac.tuwien.inso.eclipse.i18n.java/src/ui/autocompletion/InsertResourceBundleReferenceProposal.java b/at.ac.tuwien.inso.eclipse.i18n.java/src/ui/autocompletion/InsertResourceBundleReferenceProposal.java +index 9193c764..0382ba19 100644 +--- a/at.ac.tuwien.inso.eclipse.i18n.java/src/ui/autocompletion/InsertResourceBundleReferenceProposal.java ++++ b/at.ac.tuwien.inso.eclipse.i18n.java/src/ui/autocompletion/InsertResourceBundleReferenceProposal.java +@@ -13,10 +13,10 @@ + import org.eclipse.swt.widgets.Display; + import org.eclipse.ui.ISharedImages; + import org.eclipse.ui.PlatformUI; ++import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; ++import org.eclipselabs.tapiji.tools.core.ui.dialogs.InsertResourceBundleReferenceDialog; + + import util.ASTutils; +-import at.ac.tuwien.inso.eclipse.i18n.model.manager.ResourceBundleManager; +-import at.ac.tuwien.inso.eclipse.i18n.ui.dialogs.InsertResourceBundleReferenceDialog; + + public class InsertResourceBundleReferenceProposal implements IJavaCompletionProposal { + +diff --git a/at.ac.tuwien.inso.eclipse.i18n.java/src/ui/autocompletion/MessageCompletionProposal.java b/at.ac.tuwien.inso.eclipse.i18n.java/src/ui/autocompletion/MessageCompletionProposal.java +index d034358d..7c43856d 100644 +--- a/at.ac.tuwien.inso.eclipse.i18n.java/src/ui/autocompletion/MessageCompletionProposal.java ++++ b/at.ac.tuwien.inso.eclipse.i18n.java/src/ui/autocompletion/MessageCompletionProposal.java +@@ -5,8 +5,8 @@ + import org.eclipse.jface.text.contentassist.IContextInformation; + import org.eclipse.swt.graphics.Image; + import org.eclipse.swt.graphics.Point; ++import org.eclipselabs.tapiji.tools.core.util.ImageUtils; + +-import at.ac.tuwien.inso.eclipse.i18n.util.ImageUtils; + + public class MessageCompletionProposal implements IJavaCompletionProposal { + +diff --git a/at.ac.tuwien.inso.eclipse.i18n.java/src/ui/autocompletion/NewResourceBundleEntryProposal.java b/at.ac.tuwien.inso.eclipse.i18n.java/src/ui/autocompletion/NewResourceBundleEntryProposal.java +index 37261d98..7477d2d1 100644 +--- a/at.ac.tuwien.inso.eclipse.i18n.java/src/ui/autocompletion/NewResourceBundleEntryProposal.java ++++ b/at.ac.tuwien.inso.eclipse.i18n.java/src/ui/autocompletion/NewResourceBundleEntryProposal.java +@@ -10,10 +10,10 @@ + import org.eclipse.swt.widgets.Display; + import org.eclipse.ui.ISharedImages; + import org.eclipse.ui.PlatformUI; ++import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; ++import org.eclipselabs.tapiji.tools.core.ui.dialogs.CreateResourceBundleEntryDialog; + + import util.ASTutils; +-import at.ac.tuwien.inso.eclipse.i18n.model.manager.ResourceBundleManager; +-import at.ac.tuwien.inso.eclipse.i18n.ui.dialogs.CreateResourceBundleEntryDialog; + + public class NewResourceBundleEntryProposal implements IJavaCompletionProposal {" +e56f26140787fbe76b3c155c0248558287370e2c,Delta Spike,"DELTASPIKE-208 explicitely enable global alternatives + +This now works on Weld, OWB with BDA enabled, etc +",c,https://github.com/apache/deltaspike,"diff --git a/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/exclude/extension/ExcludeExtension.java b/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/exclude/extension/ExcludeExtension.java +index 88a59724e..22dd33cb1 100644 +--- a/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/exclude/extension/ExcludeExtension.java ++++ b/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/exclude/extension/ExcludeExtension.java +@@ -44,8 +44,10 @@ + import java.net.URL; + import java.util.ArrayList; + import java.util.Arrays; ++import java.util.HashMap; + import java.util.HashSet; + import java.util.List; ++import java.util.Map; + import java.util.Set; + import java.util.jar.Attributes; + import java.util.jar.Manifest; +@@ -60,27 +62,56 @@ + */ + public class ExcludeExtension implements Extension, Deactivatable + { +- private static final Logger LOG = Logger.getLogger(ExcludeExtension.class.getName()); ++ private static final String GLOBAL_ALTERNATIVES = ""globalAlternatives.""; + +- private static Boolean isWeld1Detected = false; ++ private static final Logger LOG = Logger.getLogger(ExcludeExtension.class.getName()); + + private boolean isActivated = true; + private boolean isGlobalAlternativeActivated = true; + private boolean isCustomProjectStageBeanFilterActivated = true; + ++ /** ++ * Contains the globalAlternatives which should get used ++ * KEY=Interface class name ++ * VALUE=Implementation class name ++ */ ++ private Map globalAlternatives = new HashMap(); + -+ @Override -+ public void widgetSelected(SelectionEvent e) { -+ runExclude(); -+ } + -+ }); -+ -+ new MenuItem(menu, SWT.SEPARATOR); -+ -+ // Add Language -+ addLanguage = new MenuItem(menu, SWT.PUSH); -+ addLanguage.addSelectionListener(new SelectionAdapter() { -+ -+ @Override -+ public void widgetSelected(SelectionEvent e) { -+ runAddLanguage(); -+ } -+ -+ }); -+ -+ // Remove Language -+ removeLanguage = new MenuItem(menu, SWT.PUSH); -+ removeLanguage.addSelectionListener(new SelectionAdapter() { -+ -+ @Override -+ public void widgetSelected(SelectionEvent e) { -+ runRemoveLanguage(); -+ } -+ -+ }); -+ -+ menu.addMenuListener(new MenuAdapter () { -+ public void menuShown (MenuEvent e) { -+ updateStateToggleInt (mnuToggleInt); -+ //updateStateGenRBAccessor (generateAccessor); -+ updateStateExclude (excludeResource); -+ updateStateAddLanguage (addLanguage); -+ updateStateRemoveLanguage (removeLanguage); -+ } -+ }); -+ } + @SuppressWarnings(""UnusedDeclaration"") + protected void init(@Observes BeforeBeanDiscovery beforeBeanDiscovery, BeanManager beanManager) + { + isActivated = + ClassDeactivationUtils.isActivated(getClass()); + +- isGlobalAlternativeActivated = +- ClassDeactivationUtils.isActivated(GlobalAlternative.class); +- + isCustomProjectStageBeanFilterActivated = + ClassDeactivationUtils.isActivated(CustomProjectStageBeanFilter.class); + +- isWeld1Detected = isWeld1(beanManager); ++ isGlobalAlternativeActivated = ++ ClassDeactivationUtils.isActivated(GlobalAlternative.class); ++ if (isGlobalAlternativeActivated) ++ { ++ Map allProperties = ConfigResolver.getAllProperties(); ++ for (Map.Entry property : allProperties.entrySet()) ++ { ++ if (property.getKey().startsWith(GLOBAL_ALTERNATIVES)) ++ { ++ String interfaceName = property.getKey().substring(GLOBAL_ALTERNATIVES.length()); ++ String implementation = property.getValue(); ++ if (LOG.isLoggable(Level.FINE)) ++ { ++ LOG.fine(""Enabling global alternative for interface "" + interfaceName + "": "" + implementation); ++ } + -+ protected void runGenRBAccessor () { -+ GenerateBundleAccessorDialog dlg = new GenerateBundleAccessorDialog(Display.getDefault().getActiveShell()); -+ if (dlg.open() != InputDialog.OK) -+ return; -+ } -+ -+ protected void updateStateGenRBAccessor (MenuItem menuItem) { -+ Collection frags = getSelectedPackageFragments(); -+ menuItem.setEnabled(frags.size() > 0); -+ } -+ -+ protected void updateStateToggleInt (MenuItem menuItem) { -+ Collection projects = getSelectedProjects(); -+ boolean enabled = projects.size() > 0; -+ menuItem.setEnabled(enabled); -+ setVisible(enabled); -+ internationalizationEnabled = InternationalizationNature.hasNature(projects.iterator().next()); -+ //menuItem.setSelection(enabled && internationalizationEnabled); -+ -+ if (internationalizationEnabled) -+ menuItem.setText(""Disable Internationalization""); -+ else -+ menuItem.setText(""Enable Internationalization""); -+ } ++ globalAlternatives.put(interfaceName, implementation); ++ } ++ } + -+ private Collection getSelectedPackageFragments () { -+ Collection frags = new HashSet (); -+ IWorkbenchWindow window = -+ PlatformUI.getWorkbench().getActiveWorkbenchWindow(); -+ ISelection selection = window.getActivePage().getSelection (); -+ if (selection instanceof IStructuredSelection) { -+ for (Iterator iter = ((IStructuredSelection)selection).iterator(); iter.hasNext();) { -+ Object elem = iter.next(); -+ if (elem instanceof IPackageFragment) { -+ IPackageFragment frag = (IPackageFragment) elem; -+ if (!frag.isReadOnly()) -+ frags.add (frag); -+ } -+ } -+ } -+ return frags; -+ } -+ -+ private Collection getSelectedProjects () { -+ Collection projects = new HashSet (); -+ IWorkbenchWindow window = -+ PlatformUI.getWorkbench().getActiveWorkbenchWindow(); -+ ISelection selection = window.getActivePage().getSelection (); -+ if (selection instanceof IStructuredSelection) { -+ for (Iterator iter = ((IStructuredSelection)selection).iterator(); iter.hasNext();) { -+ Object elem = iter.next(); -+ if (!(elem instanceof IResource)) { -+ if (!(elem instanceof IAdaptable)) -+ continue; -+ elem = ((IAdaptable) elem).getAdapter (IResource.class); -+ if (!(elem instanceof IResource)) -+ continue; -+ } -+ if (!(elem instanceof IProject)) { -+ elem = ((IResource) elem).getProject(); -+ if (!(elem instanceof IProject)) -+ continue; -+ } -+ if (((IProject)elem).isAccessible()) -+ projects.add ((IProject)elem); -+ -+ } -+ } -+ return projects; -+ } -+ -+ protected boolean projectsSupported() { -+ Collection projects = getSelectedProjects (); -+ for (IProject project : projects) { -+ if (!InternationalizationNature.supportsNature(project)) -+ return false; -+ } -+ -+ return true; -+ } -+ -+ protected void runToggleInt () { -+ Collection projects = getSelectedProjects (); -+ for (IProject project : projects) { -+ toggleNature (project); -+ } -+ } -+ -+ private void toggleNature (IProject project) { -+ if (InternationalizationNature.hasNature (project)) { -+ InternationalizationNature.removeNature (project); -+ } else { -+ InternationalizationNature.addNature (project); -+ } -+ } -+ protected void updateStateExclude (MenuItem menuItem) { -+ Collection resources = getSelectedResources(); -+ menuItem.setEnabled(resources.size() > 0 && internationalizationEnabled); -+ ResourceBundleManager manager = null; -+ excludeMode = false; -+ -+ for (IResource res : resources) { -+ if (manager == null || (manager.getProject() != res.getProject())) -+ manager = ResourceBundleManager.getManager(res.getProject()); -+ try { -+ if (!ResourceBundleManager.isResourceExcluded(res)) { -+ excludeMode = true; -+ } -+ } catch (Exception e) { } -+ } -+ -+ if (!excludeMode) -+ menuItem.setText(""Include Resource""); -+ else -+ menuItem.setText(""Exclude Resource""); -+ } -+ -+ private Collection getSelectedResources () { -+ Collection resources = new HashSet (); -+ IWorkbenchWindow window = -+ PlatformUI.getWorkbench().getActiveWorkbenchWindow(); -+ ISelection selection = window.getActivePage().getSelection (); -+ if (selection instanceof IStructuredSelection) { -+ for (Iterator iter = ((IStructuredSelection)selection).iterator(); iter.hasNext();) { -+ Object elem = iter.next(); -+ if (elem instanceof IProject) -+ continue; -+ -+ if (elem instanceof IResource) { -+ resources.add ((IResource)elem); -+ } else if (elem instanceof IJavaElement) { -+ resources.add (((IJavaElement)elem).getResource()); -+ } -+ } -+ } -+ return resources; -+ } -+ -+ protected void runExclude () { -+ final Collection selectedResources = getSelectedResources (); -+ -+ IWorkbench wb = PlatformUI.getWorkbench(); -+ IProgressService ps = wb.getProgressService(); -+ try { -+ ps.busyCursorWhile(new IRunnableWithProgress() { -+ public void run(IProgressMonitor pm) { -+ -+ ResourceBundleManager manager = null; -+ pm.beginTask(""Including resources to Internationalization"", selectedResources.size()); -+ -+ for (IResource res : selectedResources) { -+ if (manager == null || (manager.getProject() != res.getProject())) -+ manager = ResourceBundleManager.getManager(res.getProject()); -+ if (excludeMode) -+ manager.excludeResource(res, pm); -+ else -+ manager.includeResource(res, pm); -+ pm.worked(1); -+ } -+ pm.done(); -+ } -+ }); -+ } catch (Exception e) {} -+ } ++ if (globalAlternatives.isEmpty()) ++ { ++ isGlobalAlternativeActivated = false; ++ } ++ } + } + + /** +@@ -101,9 +132,9 @@ protected void initProjectStage(@Observes AfterDeploymentValidation afterDeploym + protected void vetoBeans(@Observes ProcessAnnotatedType processAnnotatedType, BeanManager beanManager) + { + //we need to do it before the exclude logic to keep the @Exclude support for global alternatives +- if (isGlobalAlternativeActivated && isWeld1Detected) ++ if (isGlobalAlternativeActivated) + { +- activateGlobalAlternativesWeld1(processAnnotatedType, beanManager); ++ activateGlobalAlternatives(processAnnotatedType, beanManager); + } + + if (isCustomProjectStageBeanFilterActivated) +@@ -158,8 +189,8 @@ protected void vetoCustomProjectStageBeans(ProcessAnnotatedType processAnnotated + + + +- private void activateGlobalAlternativesWeld1(ProcessAnnotatedType processAnnotatedType, +- BeanManager beanManager) ++ private void activateGlobalAlternatives(ProcessAnnotatedType processAnnotatedType, ++ BeanManager beanManager) + { + Class currentBean = processAnnotatedType.getAnnotatedType().getJavaClass(); + +@@ -184,7 +215,7 @@ private void activateGlobalAlternativesWeld1(ProcessAnnotatedType processAnnotat + { + alternativeBeanAnnotations = new HashSet(); + +- configuredBeanName = ConfigResolver.getPropertyValue(currentType.getName()); ++ configuredBeanName = globalAlternatives.get(currentType.getName()); + if (configuredBeanName != null && configuredBeanName.length() > 0) + { + alternativeBeanClass = ClassUtils.tryToLoadClassForName(configuredBeanName); +@@ -442,26 +473,6 @@ private void veto(ProcessAnnotatedType processAnnotatedType, String vetoType) + processAnnotatedType.getAnnotatedType().getJavaClass()); + } + +- private boolean isWeld1(BeanManager beanManager) +- { +- if (beanManager.getClass().getName().startsWith(""org.apache"")) +- { +- return false; +- } +- +- if (beanManager.getClass().getName().startsWith(""org.jboss.weld"")) +- { +- String version = getJarVersion(beanManager.getClass()); +- +- if (version != null && version.startsWith(""1."")) +- { +- return true; +- } +- } +- +- return false; +- } +- + private static String getJarVersion(Class targetClass) + { + String manifestFileLocation = getManifestFileLocationOfClass(targetClass); +diff --git a/deltaspike/core/impl/src/test/resources/META-INF/apache-deltaspike.properties b/deltaspike/core/impl/src/test/resources/META-INF/apache-deltaspike.properties +index b935ffcb8..ba2908684 100644 +--- a/deltaspike/core/impl/src/test/resources/META-INF/apache-deltaspike.properties ++++ b/deltaspike/core/impl/src/test/resources/META-INF/apache-deltaspike.properties +@@ -20,10 +20,10 @@ org.apache.deltaspike.core.spi.activation.ClassDeactivator=org.apache.deltaspike + testProperty02=test_value_02 + db=prodDB + +-org.apache.deltaspike.test.core.api.alternative.global.BaseBean1=org.apache.deltaspike.test.core.api.alternative.global.SubBaseBean2 +-org.apache.deltaspike.test.core.api.alternative.global.BaseInterface1=org.apache.deltaspike.test.core.api.alternative.global.BaseInterface1AlternativeImplementation ++globalAlternatives.org.apache.deltaspike.test.core.api.alternative.global.BaseBean1=org.apache.deltaspike.test.core.api.alternative.global.SubBaseBean2 ++globalAlternatives.org.apache.deltaspike.test.core.api.alternative.global.BaseInterface1=org.apache.deltaspike.test.core.api.alternative.global.BaseInterface1AlternativeImplementation + +-org.apache.deltaspike.test.core.api.alternative.global.qualifier.BaseInterface=org.apache.deltaspike.test.core.api.alternative.global.qualifier.AlternativeBaseBeanB ++globalAlternatives.org.apache.deltaspike.test.core.api.alternative.global.qualifier.BaseInterface=org.apache.deltaspike.test.core.api.alternative.global.qualifier.AlternativeBaseBeanB + + configProperty1=14 + configProperty2=7" +c251c539b5e9aaf8b1e8f957254917c0b18e850d,apache$maven-plugins,"[MCHANGES-168]: Fix non-Latin-script character handling. +The actual repair here was the change to maven-reporting-impl version 2.1. However, I then went off and set up a +way to test JIRA functionality without actually talking to JIRA. That involved a bit of refactoring and mocking. +git-svn-id: https://svn.apache.org/repos/asf/maven/plugins/trunk@1131489 13f79535-47bb-0310-9956-ffa450edef68 +",p,https://github.com/apache/maven-plugins,"diff --git a/maven-changes-plugin/pom.xml b/maven-changes-plugin/pom.xml +index ed3fb521c4..08bec40dc7 100644 +--- a/maven-changes-plugin/pom.xml ++++ b/maven-changes-plugin/pom.xml +@@ -122,7 +122,7 @@ under the License. + + org.apache.maven.reporting + maven-reporting-impl +- 2.0.5 ++ 2.1 + + + org.apache.maven.shared +diff --git a/maven-changes-plugin/src/main/java/org/apache/maven/plugin/changes/AbstractChangesReport.java b/maven-changes-plugin/src/main/java/org/apache/maven/plugin/changes/AbstractChangesReport.java +index dfb64e2e32..234ebb8b66 100644 +--- a/maven-changes-plugin/src/main/java/org/apache/maven/plugin/changes/AbstractChangesReport.java ++++ b/maven-changes-plugin/src/main/java/org/apache/maven/plugin/changes/AbstractChangesReport.java +@@ -174,7 +174,7 @@ public void execute() + { + DecorationModel model = new DecorationModel(); + model.setBody( new Body() ); +- Map attributes = new HashMap(); ++ Map attributes = new HashMap(); + attributes.put( ""outputEncoding"", getOutputEncoding() ); + Locale locale = Locale.getDefault(); + SiteRenderingContext siteContext = siteRenderer.createContextForSkin( getSkinArtifactFile(), attributes, +diff --git a/maven-changes-plugin/src/main/java/org/apache/maven/plugin/jira/AbstractJiraDownloader.java b/maven-changes-plugin/src/main/java/org/apache/maven/plugin/jira/AbstractJiraDownloader.java +index 777c3e430c..1063044f6a 100644 +--- a/maven-changes-plugin/src/main/java/org/apache/maven/plugin/jira/AbstractJiraDownloader.java ++++ b/maven-changes-plugin/src/main/java/org/apache/maven/plugin/jira/AbstractJiraDownloader.java +@@ -69,7 +69,7 @@ public abstract class AbstractJiraDownloader + private static final String UTF_8 = ""UTF-8""; + + /** Log for debug output. */ +- private Log log; ++ protected Log log; + /** Output file for xml document. */ + private File output; + /** The maximum number of entries to show. */ +@@ -111,7 +111,7 @@ public abstract class AbstractJiraDownloader + /** Mapping containing all allowed JIRA type values. */ + protected final Map typeMap = new HashMap( 8 ); + /** The pattern used to parse dates from the JIRA xml file. */ +- private String jiraDatePattern; ++ protected String jiraDatePattern; + + /** + * Creates a filter given the parameters and some defaults. +@@ -421,7 +421,12 @@ public void doExecute() + } + catch ( Exception e ) + { +- getLog().error( ""Error accessing "" + project.getIssueManagement().getUrl(), e ); ++ if ( project.getIssueManagement() != null) ++ { ++ getLog().error( ""Error accessing "" + project.getIssueManagement().getUrl(), e ); ++ } else { ++ getLog().error( ""Error accessing mock project issues"", e ); ++ } + } + } + +diff --git a/maven-changes-plugin/src/main/java/org/apache/maven/plugin/jira/JiraMojo.java b/maven-changes-plugin/src/main/java/org/apache/maven/plugin/jira/JiraMojo.java +index 4e31627fdd..3b677b415d 100644 +--- a/maven-changes-plugin/src/main/java/org/apache/maven/plugin/jira/JiraMojo.java ++++ b/maven-changes-plugin/src/main/java/org/apache/maven/plugin/jira/JiraMojo.java +@@ -295,6 +295,11 @@ public class JiraMojo + * @parameter default-value="""" + */ + private String webUser; ++ ++ /* ++ * Used for tests. ++ */ ++ private AbstractJiraDownloader mockDownloader; + + /* --------------------------------------------------------------------- */ + /* Public methods */ +@@ -305,6 +310,10 @@ public class JiraMojo + */ + public boolean canGenerateReport() + { ++ if ( mockDownloader != null) ++ { ++ return true; ++ } + return ProjectUtils.validateIfIssueManagementComplete( project, ""JIRA"", ""JIRA Report"", getLog() ); + } + +@@ -323,7 +332,13 @@ public void executeReport( Locale locale ) + try + { + // Download issues +- JiraDownloader issueDownloader = new JiraDownloader(); ++ AbstractJiraDownloader issueDownloader; ++ if ( mockDownloader != null) ++ { ++ issueDownloader = mockDownloader; ++ } else { ++ issueDownloader = new JiraDownloader(); ++ } + configureIssueDownloader( issueDownloader ); + issueDownloader.doExecute(); + +@@ -386,7 +401,7 @@ private ResourceBundle getBundle( Locale locale ) + return ResourceBundle.getBundle( ""jira-report"", locale, this.getClass().getClassLoader() ); + } + +- private void configureIssueDownloader( JiraDownloader issueDownloader ) ++ private void configureIssueDownloader( AbstractJiraDownloader issueDownloader ) + { + issueDownloader.setLog( getLog() ); + +@@ -424,4 +439,14 @@ private void configureIssueDownloader( JiraDownloader issueDownloader ) + + issueDownloader.setSettings( settings ); + } + -+ protected void updateStateAddLanguage(MenuItem menuItem){ -+ Collection projects = getSelectedProjects(); -+ boolean hasResourceBundles=false; -+ for (IProject p : projects){ -+ ResourceBundleManager rbmanager = ResourceBundleManager.getManager(p); -+ hasResourceBundles = rbmanager.getResourceBundleIdentifiers().size() > 0 ? true : false; -+ } -+ -+ menuItem.setText(""Add Language To Project""); -+ menuItem.setEnabled(projects.size() > 0 && hasResourceBundles); -+ } -+ -+ protected void runAddLanguage() { -+ AddLanguageDialoge dialog = new AddLanguageDialoge(new Shell(Display.getCurrent())); -+ if (dialog.open() == InputDialog.OK) { -+ final Locale locale = dialog.getSelectedLanguage(); -+ -+ Collection selectedProjects = getSelectedProjects(); -+ for (IProject project : selectedProjects){ -+ //check if project is fragmentproject and continue working with the hostproject, if host not member of selectedProjects -+ if (FragmentProjectUtils.isFragment(project)){ -+ IProject host = FragmentProjectUtils.getFragmentHost(project); -+ if (!selectedProjects.contains(host)) -+ project = host; -+ else -+ continue; -+ } -+ -+ List fragments = FragmentProjectUtils.getFragments(project); -+ -+ if (!fragments.isEmpty()) { -+ FragmentProjectSelectionDialog fragmentDialog = new FragmentProjectSelectionDialog( -+ Display.getCurrent().getActiveShell(), project, -+ fragments); -+ -+ if (fragmentDialog.open() == InputDialog.OK) -+ project = fragmentDialog.getSelectedProject(); -+ } -+ -+ final IProject selectedProject = project; -+ BusyIndicator.showWhile(Display.getCurrent(), new Runnable() { -+ @Override -+ public void run() { -+ LanguageUtils.addLanguageToProject(selectedProject, locale); -+ } -+ -+ }); -+ -+ } -+ } -+ } -+ -+ -+ protected void updateStateRemoveLanguage(MenuItem menuItem) { -+ Collection projects = getSelectedProjects(); -+ boolean hasResourceBundles=false; -+ if (projects.size() == 1){ -+ IProject project = projects.iterator().next(); -+ ResourceBundleManager rbmanager = ResourceBundleManager.getManager(project); -+ hasResourceBundles = rbmanager.getResourceBundleIdentifiers().size() > 0 ? true : false; -+ } -+ menuItem.setText(""Remove Language From Project""); -+ menuItem.setEnabled(projects.size() == 1 && hasResourceBundles/*&& more than one common languages contained*/); -+ } ++ public void setMockDownloader( AbstractJiraDownloader mockDownloader ) ++ { ++ this.mockDownloader = mockDownloader; ++ } + -+ protected void runRemoveLanguage() { -+ final IProject project = getSelectedProjects().iterator().next(); -+ RemoveLanguageDialoge dialog = new RemoveLanguageDialoge(project, new Shell(Display.getCurrent())); -+ -+ -+ if (dialog.open() == InputDialog.OK) { -+ final Locale locale = dialog.getSelectedLanguage(); -+ if (locale != null) { -+ if (MessageDialog.openConfirm(Display.getCurrent().getActiveShell(), ""Confirm"", ""Do you really want remove all properties-files for ""+locale.getDisplayName()+""?"")) -+ BusyIndicator.showWhile(Display.getCurrent(), new Runnable() { -+ @Override -+ public void run() { -+ LanguageUtils.removeLanguageFromProject(project, locale); -+ } -+ }); -+ -+ } -+ } -+ } -+ -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/prefrences/BuilderPreferencePage.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/prefrences/BuilderPreferencePage.java ++ public AbstractJiraDownloader getMockDownloader() ++ { ++ return mockDownloader; ++ } + } +diff --git a/maven-changes-plugin/src/main/java/org/apache/maven/plugin/jira/JiraXML.java b/maven-changes-plugin/src/main/java/org/apache/maven/plugin/jira/JiraXML.java +index 3f03363212..0e4ce55487 100644 +--- a/maven-changes-plugin/src/main/java/org/apache/maven/plugin/jira/JiraXML.java ++++ b/maven-changes-plugin/src/main/java/org/apache/maven/plugin/jira/JiraXML.java +@@ -20,6 +20,8 @@ + */ + + import java.io.File; ++import java.io.FileInputStream; ++import java.io.IOException; + import java.text.ParseException; + import java.text.SimpleDateFormat; + import java.util.ArrayList; +@@ -34,14 +36,14 @@ + import org.apache.maven.plugin.issues.Issue; + import org.apache.maven.plugin.logging.Log; + import org.xml.sax.Attributes; ++import org.xml.sax.InputSource; + import org.xml.sax.SAXException; + import org.xml.sax.helpers.DefaultHandler; + + /** +- * XML parser that extracts Issues from JIRA. This works on an XML +- * file downloaded from JIRA and creates a List of issues that is +- * exposed to the user of the class. +- * ++ * XML parser that extracts Issues from JIRA. This works on an XML file downloaded from JIRA and creates a ++ * List of issues that is exposed to the user of the class. ++ * + * @version $Id$ + */ + public class JiraXML +@@ -64,7 +66,6 @@ public class JiraXML + private SimpleDateFormat sdf = null; + + /** +- * + * @param log not null. + * @param datePattern may be null. + * @since 2.4 +@@ -89,29 +90,49 @@ public JiraXML( Log log, String datePattern ) + + /** + * Parse the given xml file. The list of issues can then be retrieved with {@link #getIssueList()}. +- * ++ * + * @param xmlPath the file to pares. +- * @throws MojoExecutionException +- * ++ * @throws MojoExecutionException + * @since 2.4 + */ +- public void parseXML( File xmlPath ) throws MojoExecutionException ++ public void parseXML( File xmlPath ) ++ throws MojoExecutionException + { +- parse( xmlPath ); ++ FileInputStream xmlStream = null; ++ try ++ { ++ InputSource inputSource = new InputSource( xmlStream ); ++ parse( inputSource ); ++ } ++ finally ++ { ++ if ( xmlStream != null ) ++ { ++ try ++ { ++ xmlStream.close(); ++ } ++ catch ( IOException e ) ++ { ++ // ++ } ++ } ++ } + } + +- private void parse( File xmlPath ) throws MojoExecutionException ++ void parse( InputSource xmlSource ) ++ throws MojoExecutionException + { + try + { + SAXParserFactory factory = SAXParserFactory.newInstance(); + SAXParser saxParser = factory.newSAXParser(); + +- saxParser.parse( xmlPath, this ); ++ saxParser.parse( xmlSource, this ); + } + catch ( Throwable t ) + { +- throw new MojoExecutionException ( ""Failed to parse JIRA XML."", t ); ++ throw new MojoExecutionException( ""Failed to parse JIRA XML."", t ); + } + } + +diff --git a/maven-changes-plugin/src/test/java/org/apache/maven/plugin/jira/JiraUnicodeTestCase.java b/maven-changes-plugin/src/test/java/org/apache/maven/plugin/jira/JiraUnicodeTestCase.java new file mode 100644 -index 00000000..e43aef21 +index 0000000000..113b1af698 --- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/prefrences/BuilderPreferencePage.java -@@ -0,0 +1,129 @@ -+package org.eclipselabs.tapiji.tools.core.ui.prefrences; -+ -+import org.eclipse.jface.preference.IPreferenceStore; -+import org.eclipse.jface.preference.PreferencePage; -+import org.eclipse.swt.SWT; -+import org.eclipse.swt.events.SelectionAdapter; -+import org.eclipse.swt.events.SelectionEvent; -+import org.eclipse.swt.layout.GridData; -+import org.eclipse.swt.layout.GridLayout; -+import org.eclipse.swt.widgets.Button; -+import org.eclipse.swt.widgets.Composite; -+import org.eclipse.swt.widgets.Control; -+import org.eclipse.swt.widgets.Label; -+import org.eclipse.ui.IWorkbench; -+import org.eclipse.ui.IWorkbenchPreferencePage; -+import org.eclipselabs.tapiji.tools.core.Activator; -+import org.eclipselabs.tapiji.tools.core.model.preferences.TapiJIPreferences; -+ -+public class BuilderPreferencePage extends PreferencePage implements -+ IWorkbenchPreferencePage { -+ private static final int INDENT = 20; -+ -+ private Button checkSameValueButton; -+ private Button checkMissingValueButton; -+ private Button checkMissingLanguageButton; ++++ b/maven-changes-plugin/src/test/java/org/apache/maven/plugin/jira/JiraUnicodeTestCase.java +@@ -0,0 +1,61 @@ ++/* ++ * Licensed to the Apache Software Foundation (ASF) under one ++ * or more contributor license agreements. See the NOTICE file ++ * distributed with this work for additional information ++ * regarding copyright ownership. The ASF licenses this file ++ * to you under the Apache License, Version 2.0 (the ++ * ""License""); you may not use this file except in compliance ++ * with the License. You may obtain a copy of the License at ++ * ++ * http://www.apache.org/licenses/LICENSE-2.0 ++ * ++ * Unless required by applicable law or agreed to in writing, ++ * software distributed under the License is distributed on an ++ * ""AS IS"" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY ++ * KIND, either express or implied. See the License for the ++ * specific language governing permissions and limitations ++ * under the License. ++ */ + -+ private Button rbAuditButton; ++package org.apache.maven.plugin.jira; + -+ private Button sourceAuditButton; -+ -+ -+ @Override -+ public void init(IWorkbench workbench) { -+ setPreferenceStore(Activator.getDefault().getPreferenceStore()); -+ } ++import java.io.File; ++import java.io.InputStream; + -+ @Override -+ protected Control createContents(Composite parent) { -+ IPreferenceStore prefs = getPreferenceStore(); -+ Composite composite = new Composite(parent, SWT.SHADOW_OUT); ++import org.apache.commons.io.FileUtils; ++import org.apache.commons.io.IOUtils; ++import org.apache.maven.plugin.testing.AbstractMojoTestCase; + -+ composite.setLayout(new GridLayout(1,false)); -+ composite.setLayoutData(new GridData(SWT.FILL, SWT.TOP, true, true)); -+ -+ Composite field = createComposite(parent, 0, 10); -+ Label descriptionLabel = new Label(composite, SWT.NONE); -+ descriptionLabel.setText(""Select types of reported problems:""); -+ -+ field = createComposite(composite, 0, 0); -+ sourceAuditButton = new Button(field, SWT.CHECK); -+ sourceAuditButton.setSelection(prefs.getBoolean(TapiJIPreferences.AUDIT_RESOURCE)); -+ sourceAuditButton.setText(""Check source code for non externalizated Strings""); -+ -+ field = createComposite(composite, 0, 0); -+ rbAuditButton = new Button(field, SWT.CHECK); -+ rbAuditButton.setSelection(prefs.getBoolean(TapiJIPreferences.AUDIT_RB)); -+ rbAuditButton.setText(""Check ResourceBundles on the following problems:""); -+ rbAuditButton.addSelectionListener(new SelectionAdapter() { -+ @Override -+ public void widgetSelected(SelectionEvent event) { -+ setRBAudits(); -+ } -+ }); -+ -+ field = createComposite(composite, INDENT, 0); -+ checkMissingValueButton = new Button(field, SWT.CHECK); -+ checkMissingValueButton.setSelection(prefs.getBoolean(TapiJIPreferences.AUDIT_UNSPEZIFIED_KEY)); -+ checkMissingValueButton.setText(""Missing translation for a key""); -+ -+ field = createComposite(composite, INDENT, 0); -+ checkSameValueButton = new Button(field, SWT.CHECK); -+ checkSameValueButton.setSelection(prefs.getBoolean(TapiJIPreferences.AUDIT_SAME_VALUE)); -+ checkSameValueButton.setText(""Same translations for one key in diffrent languages""); -+ -+ field = createComposite(composite, INDENT, 0); -+ checkMissingLanguageButton = new Button(field, SWT.CHECK); -+ checkMissingLanguageButton.setSelection(prefs.getBoolean(TapiJIPreferences.AUDIT_MISSING_LANGUAGE)); -+ checkMissingLanguageButton.setText(""Missing languages in a ResourceBundle""); -+ -+ setRBAudits(); -+ -+ composite.pack(); -+ -+ return composite; -+ } ++/** ++ * ++ */ ++public class JiraUnicodeTestCase extends AbstractMojoTestCase ++{ ++ /* ++ * Something in Doxia escapes all non-Ascii even when the charset is UTF-8. ++ * This test will fail if that ever changes. ++ */ ++ private final static String TEST_TURTLES = ""海龟一路下跌。""; ++ public void testUnicodeReport() throws Exception { ++ ++ File pom = new File( getBasedir(), ""/src/test/unit/jira-plugin-config.xml"" ); ++ assertNotNull( pom ); ++ assertTrue( pom.exists() ); ++ ++ JiraMojo mojo = (JiraMojo) lookupMojo( ""jira-report"", pom ); ++ InputStream testJiraXmlStream = JiraUnicodeTestCase.class.getResourceAsStream( ""unicode-jira-results.xml"" ); ++ String jiraXml = IOUtils.toString( testJiraXmlStream, ""utf-8"" ); ++ MockJiraDownloader mockDownloader = new MockJiraDownloader(); ++ mockDownloader.setJiraXml( jiraXml ); ++ mojo.setMockDownloader( mockDownloader ); ++ File outputDir = new File ( ""target/jira-test-output"" ); ++ outputDir.mkdirs(); ++ mojo.setReportOutputDirectory( outputDir ); ++ mojo.execute(); ++ String reportHtml = FileUtils.readFileToString( new File( outputDir, ""jira-report.html"" ), ++ ""utf-8"" ); ++ int turtleIndex = reportHtml.indexOf( TEST_TURTLES ); ++ assertTrue ( turtleIndex >= 0 ); ++ } + -+ @Override -+ protected void performDefaults() { -+ IPreferenceStore prefs = getPreferenceStore(); -+ -+ sourceAuditButton.setSelection(prefs.getDefaultBoolean(TapiJIPreferences.AUDIT_RESOURCE)); -+ rbAuditButton.setSelection(prefs.getDefaultBoolean(TapiJIPreferences.AUDIT_RB)); -+ checkMissingValueButton.setSelection(prefs.getDefaultBoolean(TapiJIPreferences.AUDIT_UNSPEZIFIED_KEY)); -+ checkSameValueButton.setSelection(prefs.getDefaultBoolean(TapiJIPreferences.AUDIT_SAME_VALUE)); -+ checkMissingLanguageButton.setSelection(prefs.getDefaultBoolean(TapiJIPreferences.AUDIT_MISSING_LANGUAGE)); -+ } -+ -+ @Override -+ public boolean performOk() { -+ IPreferenceStore prefs = getPreferenceStore(); -+ -+ prefs.setValue(TapiJIPreferences.AUDIT_RESOURCE, sourceAuditButton.getSelection()); -+ prefs.setValue(TapiJIPreferences.AUDIT_RB, rbAuditButton.getSelection()); -+ prefs.setValue(TapiJIPreferences.AUDIT_UNSPEZIFIED_KEY, checkMissingValueButton.getSelection()); -+ prefs.setValue(TapiJIPreferences.AUDIT_SAME_VALUE, checkSameValueButton.getSelection()); -+ prefs.setValue(TapiJIPreferences.AUDIT_MISSING_LANGUAGE, checkMissingLanguageButton.getSelection()); -+ -+ return super.performOk(); -+ } -+ -+ private Composite createComposite(Composite parent, int marginWidth, int marginHeight) { -+ Composite composite = new Composite(parent, SWT.NONE); -+ -+ GridLayout indentLayout = new GridLayout(1, false); -+ indentLayout.marginWidth = marginWidth; -+ indentLayout.marginHeight = marginHeight; -+ indentLayout.verticalSpacing = 0; -+ composite.setLayout(indentLayout); -+ -+ return composite; -+ } -+ -+ protected void setRBAudits() { -+ boolean selected = rbAuditButton.getSelection(); -+ checkMissingValueButton.setEnabled(selected); -+ checkSameValueButton.setEnabled(selected); -+ checkMissingLanguageButton.setEnabled(selected); -+ } +} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/prefrences/FilePreferencePage.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/prefrences/FilePreferencePage.java +diff --git a/maven-changes-plugin/src/test/java/org/apache/maven/plugin/jira/JiraUnicodeTestProjectStub.java b/maven-changes-plugin/src/test/java/org/apache/maven/plugin/jira/JiraUnicodeTestProjectStub.java new file mode 100644 -index 00000000..ff6c1cbe +index 0000000000..7937bd7ac6 --- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/prefrences/FilePreferencePage.java -@@ -0,0 +1,192 @@ -+package org.eclipselabs.tapiji.tools.core.ui.prefrences; ++++ b/maven-changes-plugin/src/test/java/org/apache/maven/plugin/jira/JiraUnicodeTestProjectStub.java +@@ -0,0 +1,45 @@ ++/* ++ * Licensed to the Apache Software Foundation (ASF) under one ++ * or more contributor license agreements. See the NOTICE file ++ * distributed with this work for additional information ++ * regarding copyright ownership. The ASF licenses this file ++ * to you under the Apache License, Version 2.0 (the ++ * ""License""); you may not use this file except in compliance ++ * with the License. You may obtain a copy of the License at ++ * ++ * http://www.apache.org/licenses/LICENSE-2.0 ++ * ++ * Unless required by applicable law or agreed to in writing, ++ * software distributed under the License is distributed on an ++ * ""AS IS"" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY ++ * KIND, either express or implied. See the License for the ++ * specific language governing permissions and limitations ++ * under the License. ++ */ ++package org.apache.maven.plugin.jira; + -+import java.util.LinkedList; ++import java.util.Collections; +import java.util.List; + -+import org.eclipse.jface.dialogs.InputDialog; -+import org.eclipse.jface.preference.IPreferenceStore; -+import org.eclipse.jface.preference.PreferencePage; -+import org.eclipse.swt.SWT; -+import org.eclipse.swt.events.MouseEvent; -+import org.eclipse.swt.events.MouseListener; -+import org.eclipse.swt.events.SelectionEvent; -+import org.eclipse.swt.events.SelectionListener; -+import org.eclipse.swt.layout.GridData; -+import org.eclipse.swt.layout.GridLayout; -+import org.eclipse.swt.widgets.Button; -+import org.eclipse.swt.widgets.Composite; -+import org.eclipse.swt.widgets.Control; -+import org.eclipse.swt.widgets.Display; -+import org.eclipse.swt.widgets.Label; -+import org.eclipse.swt.widgets.Table; -+import org.eclipse.swt.widgets.TableItem; -+import org.eclipse.ui.IWorkbench; -+import org.eclipse.ui.IWorkbenchPreferencePage; -+import org.eclipselabs.tapiji.tools.core.Activator; -+import org.eclipselabs.tapiji.tools.core.model.preferences.CheckItem; -+import org.eclipselabs.tapiji.tools.core.model.preferences.TapiJIPreferences; -+import org.eclipselabs.tapiji.tools.core.ui.dialogs.CreatePatternDialoge; -+ -+public class FilePreferencePage extends PreferencePage implements IWorkbenchPreferencePage { -+ -+ private Table table; -+ protected Object dialoge; -+ -+ private Button editPatternButton; -+ private Button removePatternButton; ++import org.apache.maven.artifact.repository.ArtifactRepository; ++import org.apache.maven.artifact.repository.DefaultArtifactRepository; ++import org.apache.maven.artifact.repository.layout.DefaultRepositoryLayout; ++import org.apache.maven.plugin.testing.stubs.MavenProjectStub; + -+ @Override -+ public void init(IWorkbench workbench) { -+ setPreferenceStore(Activator.getDefault().getPreferenceStore()); -+ } ++/** ++ */ ++public class JiraUnicodeTestProjectStub ++ extends MavenProjectStub ++{ + -+ @Override -+ protected Control createContents(Composite parent) { -+ IPreferenceStore prefs = getPreferenceStore(); -+ Composite composite = new Composite(parent, SWT.SHADOW_OUT); ++ /** {@inheritDoc} */ ++ @Override ++ public List getRemoteArtifactRepositories() ++ { ++ ArtifactRepository repository = new DefaultArtifactRepository( ""central"", ""http://repo1.maven.org/maven2"", ++ new DefaultRepositoryLayout() ); + -+ composite.setLayout(new GridLayout(2,false)); -+ composite.setLayoutData(new GridData(SWT.FILL, SWT.TOP, true, true)); -+ -+ Label descriptionLabel = new Label(composite, SWT.WRAP); -+ GridData descriptionData = new GridData(SWT.FILL, SWT.TOP, false, false); -+ descriptionData.horizontalSpan=2; -+ descriptionLabel.setLayoutData(descriptionData); -+ descriptionLabel.setText(""Properties-files which match the following pattern, will not be interpreted as ResourceBundle-files""); -+ -+ table = new Table (composite, SWT.SINGLE | SWT.BORDER | SWT.FULL_SELECTION | SWT.CHECK); -+ GridData data = new GridData(SWT.FILL, SWT.FILL, true, true); -+ table.setLayoutData(data); -+ -+ table.addSelectionListener(new SelectionListener() { -+ @Override -+ public void widgetSelected(SelectionEvent e) { -+ TableItem[] selection = table.getSelection(); -+ if (selection.length > 0){ -+ editPatternButton.setEnabled(true); -+ removePatternButton.setEnabled(true); -+ }else{ -+ editPatternButton.setEnabled(false); -+ removePatternButton.setEnabled(false); -+ } -+ } -+ @Override -+ public void widgetDefaultSelected(SelectionEvent e) { -+ // TODO Auto-generated method stub -+ } -+ }); -+ -+ List patternItems = TapiJIPreferences.getNonRbPatternAsList(); -+ for (CheckItem s : patternItems){ -+ s.toTableItem(table); -+ } -+ -+ Composite sitebar = new Composite(composite, SWT.NONE); -+ sitebar.setLayout(new GridLayout(1,false)); -+ sitebar.setLayoutData(new GridData(SWT.LEFT, SWT.TOP, false, true)); -+ -+ Button addPatternButton = new Button(sitebar, SWT.NONE); -+ addPatternButton.setLayoutData(new GridData(SWT.FILL, SWT.TOP, true, true)); -+ addPatternButton.setText(""Add Pattern""); -+ addPatternButton.addMouseListener(new MouseListener() { -+ @Override -+ public void mouseUp(MouseEvent e) { -+ // TODO Auto-generated method stub -+ } -+ @Override -+ public void mouseDown(MouseEvent e) { -+ String pattern = ""^.*/""+""((_[a-z]{2,3})|(_[a-z]{2,3}_[A-Z]{2})|(_[a-z]{2,3}_[A-Z]{2}_\\w*))?""+ ""\\.properties$""; -+ CreatePatternDialoge dialog = new CreatePatternDialoge(Display.getDefault().getActiveShell(),pattern); -+ if (dialog.open() == InputDialog.OK) { -+ pattern = dialog.getPattern(); -+ -+ TableItem item = new TableItem(table, SWT.NONE); -+ item.setText(pattern); -+ item.setChecked(true); -+ } -+ } -+ @Override -+ public void mouseDoubleClick(MouseEvent e) { -+ // TODO Auto-generated method stub -+ } -+ }); -+ -+ editPatternButton = new Button(sitebar, SWT.NONE); -+ editPatternButton.setLayoutData(new GridData(SWT.FILL, SWT.TOP, true, true)); -+ editPatternButton.setText(""Edit""); -+ editPatternButton.addMouseListener(new MouseListener() { -+ @Override -+ public void mouseUp(MouseEvent e) { -+ // TODO Auto-generated method stub -+ } -+ @Override -+ public void mouseDown(MouseEvent e) { -+ TableItem[] selection = table.getSelection(); -+ if (selection.length > 0){ -+ String pattern = selection[0].getText(); -+ -+ CreatePatternDialoge dialog = new CreatePatternDialoge(Display.getDefault().getActiveShell(), pattern); -+ if (dialog.open() == InputDialog.OK) { -+ pattern = dialog.getPattern(); -+ TableItem item = selection[0]; -+ item.setText(pattern); -+ } -+ } -+ } -+ @Override -+ public void mouseDoubleClick(MouseEvent e) { -+ // TODO Auto-generated method stub -+ } -+ }); -+ -+ -+ removePatternButton = new Button(sitebar, SWT.NONE); -+ removePatternButton.setLayoutData(new GridData(SWT.FILL, SWT.TOP, true, true)); -+ removePatternButton.setText(""Remove""); -+ removePatternButton.addMouseListener(new MouseListener() { -+ @Override -+ public void mouseUp(MouseEvent e) { -+ // TODO Auto-generated method stub -+ } -+ @Override -+ public void mouseDown(MouseEvent e) { -+ TableItem[] selection = table.getSelection(); -+ if (selection.length > 0) -+ table.remove(table.indexOf(selection[0])); -+ } -+ @Override -+ public void mouseDoubleClick(MouseEvent e) { -+ // TODO Auto-generated method stub -+ } -+ }); -+ -+ composite.pack(); -+ -+ return composite; -+ } ++ return Collections.singletonList( repository ); ++ } + -+ @Override -+ protected void performDefaults() { -+ IPreferenceStore prefs = getPreferenceStore(); -+ -+ table.removeAll(); -+ -+ List patterns = TapiJIPreferences.convertStringToList(prefs.getDefaultString(TapiJIPreferences.NON_RB_PATTERN)); -+ for (CheckItem s : patterns){ -+ s.toTableItem(table); -+ } -+ } -+ -+ @Override -+ public boolean performOk() { -+ IPreferenceStore prefs = getPreferenceStore(); -+ List patterns =new LinkedList(); -+ for (TableItem i : table.getItems()){ -+ patterns.add(new CheckItem(i.getText(), i.getChecked())); -+ } -+ -+ prefs.setValue(TapiJIPreferences.NON_RB_PATTERN, TapiJIPreferences.convertListToString(patterns)); -+ -+ return super.performOk(); -+ } +} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/prefrences/TapiHomePreferencePage.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/prefrences/TapiHomePreferencePage.java +diff --git a/maven-changes-plugin/src/test/java/org/apache/maven/plugin/jira/MockJiraDownloader.java b/maven-changes-plugin/src/test/java/org/apache/maven/plugin/jira/MockJiraDownloader.java new file mode 100644 -index 00000000..42450adf +index 0000000000..02d27d8ae4 --- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/prefrences/TapiHomePreferencePage.java -@@ -0,0 +1,34 @@ -+package org.eclipselabs.tapiji.tools.core.ui.prefrences; ++++ b/maven-changes-plugin/src/test/java/org/apache/maven/plugin/jira/MockJiraDownloader.java +@@ -0,0 +1,64 @@ ++/* ++ * Licensed to the Apache Software Foundation (ASF) under one ++ * or more contributor license agreements. See the NOTICE file ++ * distributed with this work for additional information ++ * regarding copyright ownership. The ASF licenses this file ++ * to you under the Apache License, Version 2.0 (the ++ * ""License""); you may not use this file except in compliance ++ * with the License. You may obtain a copy of the License at ++ * ++ * http://www.apache.org/licenses/LICENSE-2.0 ++ * ++ * Unless required by applicable law or agreed to in writing, ++ * software distributed under the License is distributed on an ++ * ""AS IS"" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY ++ * KIND, either express or implied. See the License for the ++ * specific language governing permissions and limitations ++ * under the License. ++ */ + -+import org.eclipse.jface.preference.PreferencePage; -+import org.eclipse.swt.SWT; -+import org.eclipse.swt.layout.GridData; -+import org.eclipse.swt.layout.GridLayout; -+import org.eclipse.swt.widgets.Composite; -+import org.eclipse.swt.widgets.Control; -+import org.eclipse.swt.widgets.Label; -+import org.eclipse.ui.IWorkbench; -+import org.eclipse.ui.IWorkbenchPreferencePage; ++package org.apache.maven.plugin.jira; + -+public class TapiHomePreferencePage extends PreferencePage implements -+ IWorkbenchPreferencePage { ++import java.io.StringReader; ++import java.util.List; + -+ @Override -+ public void init(IWorkbench workbench) { -+ // TODO Auto-generated method stub ++import org.apache.maven.plugin.MojoExecutionException; ++import org.apache.maven.plugin.issues.Issue; ++import org.xml.sax.InputSource; + -+ } ++/** ++ * Allow test cases in the jira mojo without actually talking to jira. ++ * ++ */ ++public class MockJiraDownloader extends AbstractJiraDownloader ++{ ++ @Override ++ public void doExecute() ++ throws Exception ++ { ++ // do nothing ++ } + -+ @Override -+ protected Control createContents(Composite parent) { -+ Composite composite = new Composite(parent, SWT.NONE); -+ composite.setLayout(new GridLayout(1,true)); -+ composite.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true)); -+ -+ Label description = new Label(composite, SWT.WRAP); -+ description.setText(""See sub-pages for settings.""); -+ -+ return parent; -+ } ++ private String jiraXml; ++ @Override ++ public List getIssueList() ++ throws MojoExecutionException ++ { ++ JiraXML jira = new JiraXML( log, jiraDatePattern ); ++ InputSource inputSource = new InputSource ( new StringReader( jiraXml )); ++ jira.parse( inputSource ); ++ log.info( ""The JIRA version is '"" + jira.getJiraVersion() + ""'"" ); ++ return jira.getIssueList(); ++ } ++ ++ public void setJiraXml( String jiraXml ) ++ { ++ this.jiraXml = jiraXml; ++ } ++ ++ public String getJiraXml() ++ { ++ return jiraXml; ++ } + +} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/quickfix/CreateResourceBundleEntry.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/quickfix/CreateResourceBundleEntry.java +diff --git a/maven-changes-plugin/src/test/resources/org/apache/maven/plugin/jira/unicode-jira-results.xml b/maven-changes-plugin/src/test/resources/org/apache/maven/plugin/jira/unicode-jira-results.xml new file mode 100644 -index 00000000..aad8e719 +index 0000000000..00b337c77e --- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/quickfix/CreateResourceBundleEntry.java -@@ -0,0 +1,82 @@ -+package org.eclipselabs.tapiji.tools.core.ui.quickfix; ++++ b/maven-changes-plugin/src/test/resources/org/apache/maven/plugin/jira/unicode-jira-results.xml +@@ -0,0 +1,309 @@ ++ ++ ++ ++ ++ Professional Computer Services S.A. JIRA ++ http://pcsjira.slg.gr/secure/IssueNavigator.jspa?reset=true&pid=10101&status=6&resolution=1&sorter/field=created&sorter/order=DESC&sorter/field=priority&sorter/order=DESC ++ An XML representation of a search request ++ en-us ++ 3.13.2 ++ 335 ++ 26-11-2008 ++ Enterprise ++ ++ ++ ++[PCSUNIT-2] 海龟一路下跌。 Απεικόνιση σε EXCEL των data των φορμών. Περίπτωση με πολλά blocks ++http://pcsjira.slg.gr/browse/PCSUNIT-2 ++ ++ ++ ++ PCSUNIT-2 ++ 海龟一路下跌。 Απεικόνιση σε EXCEL των data των φορμών. Περίπτωση με πολλά blocks ++ ++ Improvement + -+import org.eclipse.core.filebuffers.FileBuffers; -+import org.eclipse.core.filebuffers.ITextFileBuffer; -+import org.eclipse.core.filebuffers.ITextFileBufferManager; -+import org.eclipse.core.resources.IMarker; -+import org.eclipse.core.resources.IResource; -+import org.eclipse.core.runtime.CoreException; -+import org.eclipse.core.runtime.IPath; -+import org.eclipse.jface.dialogs.InputDialog; -+import org.eclipse.jface.text.IDocument; -+import org.eclipse.swt.graphics.Image; -+import org.eclipse.swt.widgets.Display; -+import org.eclipse.ui.IMarkerResolution2; -+import org.eclipselabs.tapiji.tools.core.Logger; -+import org.eclipselabs.tapiji.tools.core.builder.StringLiteralAuditor; -+import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; -+import org.eclipselabs.tapiji.tools.core.ui.dialogs.CreateResourceBundleEntryDialog; ++ ++ Normal ++ Closed ++ Fixed ++ ++ Internal Issue ++ ++ Nikolaos Stais ++ ++ Nikolaos Stais ++ ++ Wed, 18 Mar 2009 11:04:28 +0200 (EET) ++ Thu, 23 Apr 2009 13:22:19 +0300 (EEST) + ++ ++ ++ ++ ++ ++ 0 ++ ++ + -+public class CreateResourceBundleEntry implements IMarkerResolution2 { ++ ++ Εχει πραγματοποιηθεί μια πρώτη προσέγγιση κ υλοποίηση, χρειάζεται ΤΕΣΤ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ Α/Η ++ ++ 2.0 ++ ++ ++ ++ ++ + -+ private String key; -+ private String bundleId; -+ -+ public CreateResourceBundleEntry (String key, ResourceBundleManager manager, String bundleId) { -+ this.key = key; -+ this.bundleId = bundleId; -+ } -+ -+ @Override -+ public String getDescription() { -+ return ""Creates a new Resource-Bundle entry for the property-key '"" + key + ""'""; -+ } ++ ++[PCSUNIT-1] ΔΗΜΙΟΥΡΓΙΑ ΔΙΑΔΙΚΑΣΙΑΣ ΓΙΑ UNDO CHANGES ++http://pcsjira.slg.gr/browse/PCSUNIT-1 + -+ @Override -+ public Image getImage() { -+ // TODO Auto-generated method stub -+ return null; -+ } ++ ++ ++ PCSUNIT-1 ++ ΔΗΜΙΟΥΡΓΙΑ ΔΙΑΔΙΚΑΣΙΑΣ ΓΙΑ UNDO CHANGES + -+ @Override -+ public String getLabel() { -+ return ""Create Resource-Bundle entry for '"" + key + ""'""; -+ } ++ New Feature + -+ @Override -+ public void run(IMarker marker) { -+ int startPos = marker.getAttribute(IMarker.CHAR_START, 0); -+ int endPos = marker.getAttribute(IMarker.CHAR_END, 0) - startPos; -+ IResource resource = marker.getResource(); -+ -+ ITextFileBufferManager bufferManager = FileBuffers.getTextFileBufferManager(); -+ IPath path = resource.getRawLocation(); -+ try { -+ bufferManager.connect(path, null); -+ ITextFileBuffer textFileBuffer = bufferManager.getTextFileBuffer(path); -+ IDocument document = textFileBuffer.getDocument(); -+ -+ CreateResourceBundleEntryDialog dialog = new CreateResourceBundleEntryDialog( -+ Display.getDefault().getActiveShell(), -+ ResourceBundleManager.getManager(resource.getProject()), -+ key != null ? key : """", -+ """", -+ bundleId, -+ """"); -+ if (dialog.open() != InputDialog.OK) -+ return; -+ } catch (Exception e) { -+ Logger.logError(e); -+ } finally { -+ try { -+ (new StringLiteralAuditor()).buildResource(resource, null); -+ bufferManager.disconnect(path, null); -+ } catch (CoreException e) { -+ Logger.logError(e); -+ } -+ } -+ -+ } ++ ++ Minor ++ Closed ++ Fixed ++ ++ Internal Issue ++ ++ Nikolaos Stais ++ ++ Nikolaos Stais ++ ++ Wed, 4 Feb 2009 13:47:25 +0200 (EET) ++ Wed, 13 May 2009 13:32:49 +0300 (EEST) + -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/views/messagesview/MessagesView.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/views/messagesview/MessagesView.java -new file mode 100644 -index 00000000..b2d18efa ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/views/messagesview/MessagesView.java -@@ -0,0 +1,513 @@ -+package org.eclipselabs.tapiji.tools.core.ui.views.messagesview; -+ -+import java.util.ArrayList; -+import java.util.List; -+import java.util.Locale; -+import java.util.Set; -+ -+import org.eclipse.core.runtime.IProgressMonitor; -+import org.eclipse.core.runtime.IStatus; -+import org.eclipse.core.runtime.Status; -+import org.eclipse.jface.action.Action; -+import org.eclipse.jface.action.IMenuListener; -+import org.eclipse.jface.action.IMenuManager; -+import org.eclipse.jface.action.IToolBarManager; -+import org.eclipse.jface.action.MenuManager; -+import org.eclipse.jface.action.Separator; -+import org.eclipse.jface.dialogs.InputDialog; -+import org.eclipse.swt.SWT; -+import org.eclipse.swt.events.ModifyEvent; -+import org.eclipse.swt.events.ModifyListener; -+import org.eclipse.swt.layout.GridData; -+import org.eclipse.swt.layout.GridLayout; -+import org.eclipse.swt.widgets.Composite; -+import org.eclipse.swt.widgets.Display; -+import org.eclipse.swt.widgets.Event; -+import org.eclipse.swt.widgets.Label; -+import org.eclipse.swt.widgets.Listener; -+import org.eclipse.swt.widgets.Menu; -+import org.eclipse.swt.widgets.Scale; -+import org.eclipse.swt.widgets.Text; -+import org.eclipse.ui.IActionBars; -+import org.eclipse.ui.IMemento; -+import org.eclipse.ui.IViewSite; -+import org.eclipse.ui.PartInitException; -+import org.eclipse.ui.part.ViewPart; -+import org.eclipse.ui.progress.UIJob; -+import org.eclipselabs.tapiji.tools.core.Activator; -+import org.eclipselabs.tapiji.tools.core.Logger; -+import org.eclipselabs.tapiji.tools.core.model.IResourceBundleChangedListener; -+import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleChangedEvent; -+import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; -+import org.eclipselabs.tapiji.tools.core.model.view.MessagesViewState; -+import org.eclipselabs.tapiji.tools.core.ui.dialogs.ResourceBundleSelectionDialog; -+import org.eclipselabs.tapiji.tools.core.ui.widgets.PropertyKeySelectionTree; -+import org.eclipselabs.tapiji.tools.core.util.ImageUtils; ++ ++ ++ ++ ++ ++ 0 ++ ++ + ++ ++ Έγινε μια πρώτη προσέγγιση και ενημέρωση συναδέλφων. Σε αναμονή δοκιμής από τους ενδιαφερόμενους. ++ &nbsp; ++<br/> + -+public class MessagesView extends ViewPart implements IResourceBundleChangedListener { ++<br/> ++Έχοντας λάβει κάποια requests από πελάτες οι οποίοι επιθυμούν να κάνουν undo σε batch ροές, ++<br/> + -+ /** -+ * The ID of the view as specified by the extension. -+ */ -+ public static final String ID = ""org.eclipselabs.tapiji.tools.core.views.MessagesView""; ++<br/> ++θεωρώ ότι υπάρχει ένας εύκολος τρόπος να το πετύχουμε, προκειμένου να αποφύγουμε να κρατάμε ιστορικότητα στις αλλαγές (που έως τώρα θα κάναμε commit). ++<br/> + -+ // View State -+ private IMemento memento; -+ private MessagesViewState viewState; -+ -+ // Search-Bar -+ private Text filter; -+ -+ // Property-Key widget -+ private PropertyKeySelectionTree treeViewer; -+ private Scale fuzzyScaler; -+ private Label lblScale; -+ -+ /*** ACTIONS ***/ -+ private List visibleLocaleActions; -+ private Action selectResourceBundle; -+ private Action enableFuzzyMatching; -+ private Action editable; -+ -+ // Parent component -+ Composite parent; -+ -+ // context-dependent menu actions -+ ResourceBundleEntry contextDependentMenu; -+ -+ /** -+ * The constructor. -+ */ -+ public MessagesView() { -+ } -+ -+ /** -+ * This is a callback that will allow us -+ * to create the viewer and initialize it. -+ */ -+ public void createPartControl(Composite parent) { -+ this.parent = parent; -+ -+ initLayout (parent); -+ initSearchBar (parent); -+ initMessagesTree (parent); -+ makeActions(); -+ hookContextMenu(); -+ contributeToActionBars(); -+ initListener (parent); -+ } -+ -+ protected void initListener (Composite parent) { -+ filter.addModifyListener(new ModifyListener() { -+ -+ @Override -+ public void modifyText(ModifyEvent e) { -+ treeViewer.setSearchString(filter.getText()); -+ } -+ }); -+ } -+ -+ protected void initLayout (Composite parent) { -+ GridLayout mainLayout = new GridLayout (); -+ mainLayout.numColumns = 1; -+ parent.setLayout(mainLayout); -+ -+ } -+ -+ protected void initSearchBar (Composite parent) { -+ // Construct a new parent container -+ Composite parentComp = new Composite(parent, SWT.BORDER); -+ parentComp.setLayout(new GridLayout(4, false)); -+ parentComp.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false)); -+ -+ Label lblSearchText = new Label (parentComp, SWT.NONE); -+ lblSearchText.setText(""Search expression:""); -+ -+ // define the grid data for the layout -+ GridData gridData = new GridData(); -+ gridData.horizontalAlignment = SWT.FILL; -+ gridData.grabExcessHorizontalSpace = false; -+ gridData.horizontalSpan = 1; -+ lblSearchText.setLayoutData(gridData); -+ -+ filter = new Text (parentComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER); -+ if (viewState.getSearchString() != null) { -+ if (viewState.getSearchString().length() > 1 && -+ viewState.getSearchString().startsWith(""*"") && viewState.getSearchString().endsWith(""*"")) -+ filter.setText(viewState.getSearchString().substring(1).substring(0, viewState.getSearchString().length()-2)); -+ else -+ filter.setText(viewState.getSearchString()); -+ -+ } -+ GridData gridDatas = new GridData(); -+ gridDatas.horizontalAlignment = SWT.FILL; -+ gridDatas.grabExcessHorizontalSpace = true; -+ gridDatas.horizontalSpan = 3; -+ filter.setLayoutData(gridDatas); -+ -+ lblScale = new Label (parentComp, SWT.None); -+ lblScale.setText(""\nPrecision:""); -+ GridData gdScaler = new GridData(); -+ gdScaler.verticalAlignment = SWT.CENTER; -+ gdScaler.grabExcessVerticalSpace = true; -+ gdScaler.horizontalSpan = 1; -+// gdScaler.widthHint = 150; -+ lblScale.setLayoutData(gdScaler); -+ -+ // Add a scale for specification of fuzzy Matching precision -+ fuzzyScaler = new Scale (parentComp, SWT.None); -+ fuzzyScaler.setMaximum(100); -+ fuzzyScaler.setMinimum(0); -+ fuzzyScaler.setIncrement(1); -+ fuzzyScaler.setPageIncrement(5); -+ fuzzyScaler.setSelection(Math.round((treeViewer != null ? treeViewer.getMatchingPrecision() : viewState.getMatchingPrecision())*100.f)); -+ fuzzyScaler.addListener (SWT.Selection, new Listener() { -+ public void handleEvent (Event event) { -+ float val = 1f-(Float.parseFloat( -+ (fuzzyScaler.getMaximum() - -+ fuzzyScaler.getSelection() + -+ fuzzyScaler.getMinimum()) + """") / 100.f); -+ treeViewer.setMatchingPrecision (val); -+ } -+ }); -+ fuzzyScaler.setSize(100, 10); -+ -+ GridData gdScalers = new GridData(); -+ gdScalers.verticalAlignment = SWT.BEGINNING; -+ gdScalers.horizontalAlignment = SWT.FILL; -+ gdScalers.horizontalSpan = 3; -+ fuzzyScaler.setLayoutData(gdScalers); -+ refreshSearchbarState(); -+ } -+ -+ protected void refreshSearchbarState () { -+ lblScale.setVisible(treeViewer != null ? treeViewer.isFuzzyMatchingEnabled() : viewState.isFuzzyMatchingEnabled()); -+ fuzzyScaler.setVisible(treeViewer != null ? treeViewer.isFuzzyMatchingEnabled() : viewState.isFuzzyMatchingEnabled()); -+ if (treeViewer != null ? treeViewer.isFuzzyMatchingEnabled() : viewState.isFuzzyMatchingEnabled()) { -+ ((GridData)lblScale.getLayoutData()).heightHint = 40; -+ ((GridData)fuzzyScaler.getLayoutData()).heightHint = 40; -+ } else { -+ ((GridData)lblScale.getLayoutData()).heightHint = 0; -+ ((GridData)fuzzyScaler.getLayoutData()).heightHint = 0; -+ } ++<br/> ++&nbsp; ++<br/> + -+ lblScale.getParent().layout(); -+ lblScale.getParent().getParent().layout(); -+ } -+ -+ protected void initMessagesTree(Composite parent) { -+ if (viewState.getSelectedProjectName() != null && viewState.getSelectedProjectName().trim().length() > 0 ) { -+ try { -+ ResourceBundleManager.getManager(viewState.getSelectedProjectName()) -+ .registerResourceBundleChangeListener(viewState.getSelectedBundleId(), this); -+ -+ } catch (Exception e) {} -+ } -+ treeViewer = new PropertyKeySelectionTree(getViewSite(), getSite(), parent, SWT.NONE, -+ viewState.getSelectedProjectName(), viewState.getSelectedBundleId(), -+ viewState.getVisibleLocales()); -+ if (viewState.getSelectedProjectName() != null && viewState.getSelectedProjectName().trim().length() > 0 ) { -+ if (viewState.getVisibleLocales() == null) -+ viewState.setVisibleLocales(treeViewer.getVisibleLocales()); -+ -+ if (viewState.getSortings() != null) -+ treeViewer.setSortInfo(viewState.getSortings()); -+ -+ treeViewer.enableFuzzyMatching(viewState.isFuzzyMatchingEnabled()); -+ treeViewer.setMatchingPrecision(viewState.getMatchingPrecision()); -+ treeViewer.setEditable(viewState.isEditable()); -+ -+ if (viewState.getSearchString() != null) -+ treeViewer.setSearchString(viewState.getSearchString()); -+ } -+ // define the grid data for the layout -+ GridData gridData = new GridData(); -+ gridData.horizontalAlignment = SWT.FILL; -+ gridData.verticalAlignment = SWT.FILL; -+ gridData.grabExcessHorizontalSpace = true; -+ gridData.grabExcessVerticalSpace = true; -+ treeViewer.setLayoutData(gridData); -+ } ++<br/> ++Το συγκεκριμένο θέμα δεν έχει εφαρμοστεί (από όσο ξέρω) με τρόπο που να εισάγουμε δεδομένα και να τα δείχνουμε χωρίς να έχουν γίνει commit. ++<br/> + -+ /** -+ * Passing the focus request to the viewer's control. -+ */ -+ public void setFocus() { -+ treeViewer.setFocus(); -+ } -+ -+ protected void redrawTreeViewer () { -+ parent.setRedraw(false); -+ treeViewer.dispose(); -+ try { -+ initMessagesTree(parent); -+ makeActions(); -+ contributeToActionBars(); -+ hookContextMenu(); -+ } catch (Exception e) { -+ Logger.logError(e); -+ } -+ parent.setRedraw(true); -+ parent.layout(true); -+ treeViewer.layout(true); -+ refreshSearchbarState(); -+ } -+ -+ /*** ACTIONS ***/ -+ private void makeVisibleLocalesActions () { -+ if (viewState.getSelectedProjectName() == null) { -+ return; -+ } -+ -+ visibleLocaleActions = new ArrayList(); -+ Set locales = ResourceBundleManager.getManager( -+ viewState.getSelectedProjectName()).getProvidedLocales(viewState.getSelectedBundleId()); -+ List visibleLocales = treeViewer.getVisibleLocales(); -+ for (final Locale locale : locales) { -+ Action langAction = new Action () { ++<br/> ++&nbsp; ++<br/> + -+ @Override -+ public void run() { -+ super.run(); -+ List visibleL = treeViewer.getVisibleLocales(); -+ if (this.isChecked()) { -+ if (!visibleL.contains(locale)) { -+ visibleL.add(locale); -+ } -+ } else { -+ visibleL.remove(locale); -+ } -+ viewState.setVisibleLocales(visibleL); -+ redrawTreeViewer(); -+ } -+ -+ }; -+ if (locale != null && locale.getDisplayName().trim().length() > 0) { -+ langAction.setText(locale.getDisplayName(Locale.US)); -+ } else { -+ langAction.setText(""Default""); -+ } -+ langAction.setChecked(visibleLocales.contains(locale)); -+ visibleLocaleActions.add(langAction); -+ } -+ } -+ -+ private void makeActions() { -+ makeVisibleLocalesActions(); -+ -+ selectResourceBundle = new Action () { ++<br/> ++Τολμώ να κάνω μια πρόταση προς διερεύνηση ... J ++<br/> + -+ @Override -+ public void run() { -+ super.run(); -+ ResourceBundleSelectionDialog sd = new ResourceBundleSelectionDialog (getViewSite().getShell(), null); -+ if (sd.open() == InputDialog.OK) { -+ String resourceBundle = sd.getSelectedBundleId(); -+ -+ if (resourceBundle != null) { -+ int iSep = resourceBundle.indexOf(""/""); -+ viewState.setSelectedProjectName(resourceBundle.substring(0, iSep)); -+ viewState.setSelectedBundleId(resourceBundle.substring(iSep +1)); -+ viewState.setVisibleLocales(null); -+ redrawTreeViewer(); -+ } -+ } -+ } -+ }; -+ -+ selectResourceBundle.setText(""Resource-Bundle ...""); -+ selectResourceBundle.setDescription(""Allows you to select the Resource-Bundle which is used as message-source.""); -+ selectResourceBundle.setImageDescriptor(Activator.getImageDescriptor(ImageUtils.IMAGE_RESOURCE_BUNDLE)); -+ -+ contextDependentMenu = new ResourceBundleEntry(treeViewer, !treeViewer.getViewer().getSelection().isEmpty()); -+ -+ enableFuzzyMatching = new Action () { -+ public void run () { -+ super.run(); -+ treeViewer.enableFuzzyMatching(!treeViewer.isFuzzyMatchingEnabled()); -+ viewState.setFuzzyMatchingEnabled(treeViewer.isFuzzyMatchingEnabled()); -+ refreshSearchbarState(); -+ } -+ }; -+ enableFuzzyMatching.setText(""Fuzzy-Matching""); -+ enableFuzzyMatching.setDescription(""Enables Fuzzy matching for searching Resource-Bundle entries.""); -+ enableFuzzyMatching.setChecked(viewState.isFuzzyMatchingEnabled()); -+ enableFuzzyMatching.setToolTipText(enableFuzzyMatching.getDescription()); -+ -+ editable = new Action () { -+ public void run () { -+ super.run(); -+ treeViewer.setEditable(!treeViewer.isEditable()); -+ viewState.setEditable(treeViewer.isEditable()); -+ } -+ }; -+ editable.setText(""Editable""); -+ editable.setDescription(""Allows you to edit Resource-Bundle entries.""); -+ editable.setChecked(viewState.isEditable()); -+ editable.setToolTipText(editable.getDescription()); -+ } -+ -+ private void contributeToActionBars() { -+ IActionBars bars = getViewSite().getActionBars(); -+ fillLocalPullDown(bars.getMenuManager()); -+ fillLocalToolBar(bars.getToolBarManager()); -+ } -+ -+ private void fillLocalPullDown(IMenuManager manager) { -+ manager.removeAll(); -+ manager.add(selectResourceBundle); -+ manager.add(enableFuzzyMatching); -+ manager.add(editable); -+ manager.add(new Separator()); -+ -+ manager.add(contextDependentMenu); -+ manager.add(new Separator()); -+ -+ if (visibleLocaleActions == null) return; -+ -+ for (Action loc : visibleLocaleActions) { -+ manager.add(loc); -+ } -+ } ++<br/> ++&nbsp; ++<br/> + -+ /*** CONTEXT MENU ***/ -+ private void hookContextMenu() { -+ new UIJob(""set PopupMenu""){ -+ @Override -+ public IStatus runInUIThread(IProgressMonitor monitor) { -+ MenuManager menuMgr = new MenuManager(""#PopupMenu""); -+ menuMgr.setRemoveAllWhenShown(true); -+ menuMgr.addMenuListener(new IMenuListener() { -+ public void menuAboutToShow(IMenuManager manager) { -+ fillContextMenu(manager); -+ } -+ }); -+ Menu menu = menuMgr.createContextMenu(treeViewer.getViewer().getControl()); -+ treeViewer.getViewer().getControl().setMenu(menu); -+ getViewSite().registerContextMenu(menuMgr, treeViewer.getViewer()); -+ -+ return Status.OK_STATUS; -+ } -+ }.schedule(); -+ } -+ -+ private void fillContextMenu(IMenuManager manager) { -+ manager.removeAll(); -+ manager.add(selectResourceBundle); -+ manager.add(enableFuzzyMatching); -+ manager.add(editable); -+ manager.add(new Separator()); -+ -+ manager.add(new ResourceBundleEntry(treeViewer, !treeViewer.getViewer().getSelection().isEmpty())); -+ manager.add(new Separator()); -+ -+ for (Action loc : visibleLocaleActions) { -+ manager.add(loc); -+ } -+ // Other plug-ins can contribute there actions here -+ //manager.add(new Separator(IWorkbenchActionConstants.MB_ADDITIONS)); -+ } -+ -+ private void fillLocalToolBar(IToolBarManager manager) { -+ manager.add(selectResourceBundle); -+ } -+ -+ @Override -+ public void saveState (IMemento memento) { -+ super.saveState(memento); -+ try { -+ viewState.setEditable (treeViewer.isEditable()); -+ viewState.setSortings(treeViewer.getSortInfo()); -+ viewState.setSearchString(treeViewer.getSearchString()); -+ viewState.setFuzzyMatchingEnabled(treeViewer.isFuzzyMatchingEnabled()); -+ viewState.setMatchingPrecision (treeViewer.getMatchingPrecision()); -+ viewState.saveState(memento); -+ } catch (Exception e) {} -+ } ++<br/> ++Θεωρητικά θα μπορούμε π.χ. να κάνουμε ενημέρωση με τιμή να βλέπουμε τι μερίδια έχουν κοπεί (καλώντας την inv3) και μετά αν δε πατάμε το οριστικό commit button, βγαίνοντας από την ++<br/> + -+ @Override -+ public void init(IViewSite site, IMemento memento) throws PartInitException { -+ super.init(site, memento); -+ this.memento = memento; -+ -+ // init Viewstate -+ viewState = new MessagesViewState(null, null, false, null); -+ viewState.init(memento); -+ } ++<br/> ++οθόνη θα είναι σαν να μην έχουμε κάνει τίποτα. ++<br/> + -+ @Override -+ public void resourceBundleChanged(ResourceBundleChangedEvent event) { -+ try { -+ if (!event.getBundle().equals(treeViewer.getResourceBundle())) -+ return; -+ -+ switch (event.getType()) { -+ /*case ResourceBundleChangedEvent.ADDED: -+ if ( viewState.getSelectedProjectName().trim().length() > 0 ) { -+ try { -+ ResourceBundleManager.getManager(viewState.getSelectedProjectName()) -+ .unregisterResourceBundleChangeListener(viewState.getSelectedBundleId(), this); -+ } catch (Exception e) {} -+ } -+ -+ new Thread(new Runnable() { -+ -+ public void run() { -+ try { Thread.sleep(500); } catch (Exception e) { } -+ Display.getDefault().asyncExec(new Runnable() { -+ public void run() { -+ try { -+ redrawTreeViewer(); -+ } catch (Exception e) { e.printStackTrace(); } -+ } -+ }); -+ -+ } -+ }).start(); -+ break; */ -+ case ResourceBundleChangedEvent.ADDED: -+ // update visible locales within the context menu -+ makeVisibleLocalesActions(); -+ hookContextMenu(); -+ break; -+ case ResourceBundleChangedEvent.DELETED: -+ case ResourceBundleChangedEvent.EXCLUDED: -+ if ( viewState.getSelectedProjectName().trim().length() > 0 ) { -+ try { -+ ResourceBundleManager.getManager(viewState.getSelectedProjectName()) -+ .unregisterResourceBundleChangeListener(viewState.getSelectedBundleId(), this); -+ -+ } catch (Exception e) {} -+ } -+ viewState = new MessagesViewState(null, null, false, null); -+ -+ new Thread(new Runnable() { -+ -+ public void run() { -+ try { Thread.sleep(500); } catch (Exception e) { } -+ Display.getDefault().asyncExec(new Runnable() { -+ public void run() { -+ try { -+ redrawTreeViewer(); -+ } catch (Exception e) { Logger.logError(e); } -+ } -+ }); -+ -+ } -+ }).start(); -+ } -+ } catch (Exception e) { -+ Logger.logError(e); -+ } -+ } -+ -+ @Override -+ public void dispose(){ -+ try { -+ super.dispose(); -+ treeViewer.dispose(); -+ ResourceBundleManager.getManager(viewState.getSelectedProjectName()).unregisterResourceBundleChangeListener(viewState.getSelectedBundleId(), this); -+ } catch (Exception e) {} -+ } -+} -\ No newline at end of file -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/views/messagesview/ResourceBundleEntry.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/views/messagesview/ResourceBundleEntry.java -new file mode 100644 -index 00000000..b86f66c2 ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/views/messagesview/ResourceBundleEntry.java -@@ -0,0 +1,111 @@ -+package org.eclipselabs.tapiji.tools.core.ui.views.messagesview; ++<br/> ++&nbsp; ++<br/> + -+import org.eclipse.jface.action.ContributionItem; -+import org.eclipse.jface.viewers.ISelectionChangedListener; -+import org.eclipse.jface.viewers.SelectionChangedEvent; -+import org.eclipse.swt.SWT; -+import org.eclipse.swt.events.SelectionEvent; -+import org.eclipse.swt.events.SelectionListener; -+import org.eclipse.swt.widgets.Menu; -+import org.eclipse.swt.widgets.MenuItem; -+import org.eclipse.ui.ISharedImages; -+import org.eclipse.ui.PlatformUI; -+import org.eclipselabs.tapiji.tools.core.ui.widgets.PropertyKeySelectionTree; ++<br/> ++Η όλη δουλεία γίνεται με αλλαγές στα εξής σημεία. ++<br/> + ++<br/> ++&nbsp; ++<br/> + -+public class ResourceBundleEntry extends ContributionItem implements -+ ISelectionChangedListener { ++<br/> ++Στον ON-COMMIT trigger (form-level). ++<br/> + -+ private PropertyKeySelectionTree parentView; -+ private boolean legalSelection = false; ++<br/> ++Στον ΚΕΥ-COMMIT trigger (form-level) ++<br/> + -+ // Menu-Items -+ private MenuItem addItem; -+ private MenuItem editItem; -+ private MenuItem removeItem; ++<br/> ++Στον KEY-EXIT trigger (form-level) ++<br/> + -+ public ResourceBundleEntry() { -+ } ++<br/> ++Στον KEY-ENTQRY trigger (form-level) ++<br/> + -+ public ResourceBundleEntry(PropertyKeySelectionTree view, boolean legalSelection) { -+ this.legalSelection = legalSelection; -+ this.parentView = view; -+ parentView.addSelectionChangedListener(this); -+ } ++<br/> ++&nbsp; ++<br/> + -+ @Override -+ public void fill(Menu menu, int index) { -+ -+ // MenuItem for adding a new entry -+ addItem = new MenuItem(menu, SWT.NONE, index); -+ addItem.setText(""Add ...""); -+ addItem.setImage(PlatformUI.getWorkbench().getSharedImages() -+ .getImageDescriptor(ISharedImages.IMG_OBJ_ADD).createImage()); -+ addItem.addSelectionListener( new SelectionListener() { -+ -+ @Override -+ public void widgetSelected(SelectionEvent e) { -+ parentView.addNewItem(); -+ } -+ -+ @Override -+ public void widgetDefaultSelected(SelectionEvent e) { -+ -+ } -+ }); ++<br/> ++&nbsp; ++<br/> + -+ if ((parentView == null && legalSelection) || parentView != null) { -+ // MenuItem for editing the currently selected entry -+ editItem = new MenuItem(menu, SWT.NONE, index + 1); -+ editItem.setText(""Edit""); -+ editItem.addSelectionListener(new SelectionListener() { -+ -+ @Override -+ public void widgetSelected(SelectionEvent e) { -+ parentView.editSelectedItem(); -+ } -+ -+ @Override -+ public void widgetDefaultSelected(SelectionEvent e) { -+ -+ } -+ }); ++<br/> ++Οπωσδήποτε αν υπάρχουν αντίστοιχοι triggers σε block-level, τότε πρέπει να επεξεργαστούν κατάλληλα. ++<br/> + -+ // MenuItem for deleting the currently selected entry -+ removeItem = new MenuItem(menu, SWT.NONE, index + 2); -+ removeItem.setText(""Remove""); -+ removeItem.setImage(PlatformUI.getWorkbench().getSharedImages() -+ .getImageDescriptor(ISharedImages.IMG_ETOOL_DELETE) -+ .createImage()); -+ removeItem.addSelectionListener( new SelectionListener() { -+ -+ @Override -+ public void widgetSelected(SelectionEvent e) { -+ parentView.deleteSelectedItems(); -+ } -+ -+ @Override -+ public void widgetDefaultSelected(SelectionEvent e) { -+ -+ } -+ }); -+ enableMenuItems(); -+ } -+ } ++<br/> ++&nbsp; ++<br/> + -+ protected void enableMenuItems() { -+ try { -+ editItem.setEnabled(legalSelection); -+ removeItem.setEnabled(legalSelection); -+ } catch (Exception e) { -+ // silent catch -+ } -+ } ++<br/> ++Στην ουσία, δεν κάνουμε ποτέ commit (αλλά POST;?στέλνει τα records στη βάση), παρά μόνο πατώντας το κουμπί REAL COMMIT. ++<br/> + -+ @Override -+ public void selectionChanged(SelectionChangedEvent event) { -+ legalSelection = !event.getSelection().isEmpty(); -+ // enableMenuItems (); -+ } ++<br/> ++&nbsp; ++<br/> + -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/views/messagesview/dnd/KeyTreeItemDropTarget.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/views/messagesview/dnd/KeyTreeItemDropTarget.java -new file mode 100644 -index 00000000..e1aa4c64 ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/views/messagesview/dnd/KeyTreeItemDropTarget.java -@@ -0,0 +1,107 @@ -+package org.eclipselabs.tapiji.tools.core.ui.views.messagesview.dnd; ++<br/> ++Μπορείτε να δείτε τα form1.fmb , form2.fmb που υπάρχουν στο Y:\MFHELLAS_10G\Exedir ++<br/> + -+import org.eclipse.babel.editor.api.MessagesBundleFactory; -+import org.eclipse.jface.viewers.TreeViewer; -+import org.eclipse.swt.dnd.DND; -+import org.eclipse.swt.dnd.DropTargetAdapter; -+import org.eclipse.swt.dnd.DropTargetEvent; -+import org.eclipse.swt.dnd.TextTransfer; -+import org.eclipse.swt.widgets.Display; -+import org.eclipse.swt.widgets.TreeItem; -+import org.eclipselabs.tapiji.tools.core.Logger; -+import org.eclipselabs.tapiji.tools.core.model.exception.ResourceBundleException; -+import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; -+import org.eclipselabs.tapiji.tools.core.ui.widgets.provider.ResKeyTreeContentProvider; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IAbstractKeyTreeModel; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IMessage; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IMessagesBundle; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IMessagesBundleGroup; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IValuedKeyTreeNode; -+ -+public class KeyTreeItemDropTarget extends DropTargetAdapter { -+ private final TreeViewer target; -+ -+ public KeyTreeItemDropTarget (TreeViewer viewer) { -+ super(); -+ this.target = viewer; -+ } -+ -+ public void dragEnter (DropTargetEvent event) { -+// if (((DropTarget)event.getSource()).getControl() instanceof Tree) -+// event.detail = DND.DROP_MOVE; -+ } -+ -+ private void addBundleEntry (final String keyPrefix, // new prefix -+ final String key, // leaf -+ final String oldKey, // f.q. key -+ final IMessagesBundleGroup bundleGroup, -+ final boolean removeOld) { -+ -+ try { -+ String newKey = keyPrefix + ""."" + key; -+ boolean rem = keyPrefix.contains(oldKey) ? false : removeOld; -+ -+ for (IMessage message : bundleGroup.getMessages(oldKey)) { -+ IMessagesBundle messagesBundle = bundleGroup.getMessagesBundle(message.getLocale()); -+ IMessage m = MessagesBundleFactory.createMessage(newKey, message.getLocale()); -+ m.setText(message.getValue()); -+ m.setComment(message.getComment()); -+ messagesBundle.addMessage(m); -+ } -+ -+ if (rem) { -+ bundleGroup.removeMessages(oldKey); -+ } -+ -+ } catch (Exception e) { Logger.logError(e); } ++<br/> ++ή και να τεστάρετε το εξής σενάριο(<a href="http://dioskouros:7778/forms90/f90servlet?config=test_undo">http://dioskouros:7778/forms90/f90servlet?config=test_undo</a>) ++<br/> ++ ++<br/> ++στην πράξη: ++<br/> + -+ } -+ -+ public void drop (final DropTargetEvent event) { -+ Display.getDefault().asyncExec(new Runnable() { -+ public void run() { -+ try { -+ -+ if (TextTransfer.getInstance().isSupportedType (event.currentDataType)) { -+ String newKeyPrefix = """"; -+ -+ if (event.item instanceof TreeItem && -+ ((TreeItem) event.item).getData() instanceof IValuedKeyTreeNode) { -+ newKeyPrefix = ((IValuedKeyTreeNode) ((TreeItem) event.item).getData()).getMessageKey(); -+ } -+ -+ String message = (String)event.data; -+ String oldKey = message.replaceAll(""\"""", """"); -+ -+ String[] keyArr = (oldKey).split(""\\.""); -+ String key = keyArr[keyArr.length-1]; -+ -+ ResKeyTreeContentProvider contentProvider = (ResKeyTreeContentProvider) target.getContentProvider(); -+ IAbstractKeyTreeModel keyTree = (IAbstractKeyTreeModel) target.getInput(); -+ -+ IMessagesBundleGroup bundleGroup = contentProvider.getBundle(); -+ if (!bundleGroup.containsKey(oldKey)) { -+ event.detail = DND.DROP_COPY; -+ return; -+ } -+ -+ // Adopt and add new bundle entries -+ addBundleEntry (newKeyPrefix, key, oldKey, bundleGroup, event.detail == DND.DROP_MOVE); -+ -+ // Store changes -+ ResourceBundleManager manager = contentProvider.getManager(); -+ try { -+ manager.saveResourceBundle(contentProvider.getBundleId(), bundleGroup); -+ } catch (ResourceBundleException e) { -+ Logger.logError(e); -+ } -+ -+ target.refresh(); -+ } else -+ event.detail = DND.DROP_NONE; -+ -+ } catch (Exception e) { Logger.logError(e); } -+ } -+ }); -+ } -+} -\ No newline at end of file -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/views/messagesview/dnd/KeyTreeItemTransfer.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/views/messagesview/dnd/KeyTreeItemTransfer.java -new file mode 100644 -index 00000000..5bb6d5cd ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/views/messagesview/dnd/KeyTreeItemTransfer.java -@@ -0,0 +1,106 @@ -+package org.eclipselabs.tapiji.tools.core.ui.views.messagesview.dnd; ++<br/> ++&nbsp; ++<br/> + -+import java.io.ByteArrayInputStream; -+import java.io.ByteArrayOutputStream; -+import java.io.IOException; -+import java.io.ObjectInputStream; -+import java.io.ObjectOutputStream; -+import java.util.ArrayList; -+import java.util.List; ++<br/> ++&nbsp; ++<br/> + -+import org.eclipse.swt.dnd.ByteArrayTransfer; -+import org.eclipse.swt.dnd.DND; -+import org.eclipse.swt.dnd.TransferData; -+import org.eclipselabs.tapiji.tools.core.Logger; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IKeyTreeNode; ++<br/> ++Η form1 βλέπει ένα απλό πίνακα με 2 στήλες. Κάθε φορά που εισάγω μια εγγραφή και πατάω F10 βλέπω μήνυμα ++<br/> + -+public class KeyTreeItemTransfer extends ByteArrayTransfer { ++<br/> ++&quot;1 record applied&quot; (η διαφορά φαίνεται κ εδώ, δηλ. δεν λέει: &quot;1 record applied and saved&quot;, λείπει το &quot;saved&quot;=δεν έχει κάνει commit στη βάση αλλά έχει στείλει τα record στη βάση για να μπορούν άλλες οθόνες να τα κάνουν query.) ++<br/> + -+ private static final String KEY_TREE_ITEM = ""keyTreeItem""; ++<br/> ++&nbsp; ++<br/> + -+ private static final int TYPEID = registerType(KEY_TREE_ITEM); ++<br/> ++Αν καλέσουμε τη δεύτερη οθόνη form2, μπορούμε να κάνουμε query τις αλλαγές(insert,delete,update + F10) που πραγματοποιήσαμε στην οθόνη form1. ++<br/> + -+ private static KeyTreeItemTransfer transfer = new KeyTreeItemTransfer(); ++<br/> ++Αν στην ίδια οθόνη (form1) κάνουμε F7 χωρίς να κάνουμε F10 (μετά την αλλαγή) η οθόνη χάνει τις αλλαγές (προγραμματιστικά επίτηδες, προς αποφυγή της ερώτησης save changes?) ++<br/> + -+ public static KeyTreeItemTransfer getInstance() { -+ return transfer; -+ } ++<br/> ++&nbsp; ++<br/> + -+ public void javaToNative(Object object, TransferData transferData) { -+ if (!checkType(object) || !isSupportedType(transferData)) { -+ DND.error(DND.ERROR_INVALID_DATA); -+ } -+ IKeyTreeNode[] terms = (IKeyTreeNode[]) object; -+ try { -+ ByteArrayOutputStream out = new ByteArrayOutputStream(); -+ ObjectOutputStream oOut = new ObjectOutputStream(out); -+ for (int i = 0, length = terms.length; i < length; i++) { -+ oOut.writeObject(terms[i]); -+ } -+ byte[] buffer = out.toByteArray(); -+ oOut.close(); -+ -+ super.javaToNative(buffer, transferData); -+ } catch (IOException e) { -+ Logger.logError(e); -+ } -+ } ++<br/> ++Βγαίνοντας από τις δύο οθόνες χωρίς να πατήσουμε το button REAL COMMIT, όλες οι αλλαγές που κάναμε στην form1 και τις είδαμε στη 2η οθόνη form2 δεν έχουν σωθεί. ++<br/> + -+ public Object nativeToJava(TransferData transferData) { -+ if (isSupportedType(transferData)) { ++<br/> ++&nbsp; ++<br/> + -+ byte[] buffer; -+ try { -+ buffer = (byte[]) super.nativeToJava(transferData); -+ } catch (Exception e) { -+ Logger.logError(e); -+ buffer = null; -+ } -+ if (buffer == null) -+ return null; ++<br/> ++Τα insert, update, delete μπορούν να γίνουν και προγραμματιστικά. Στην οθόνη form1 στον ON-INSERT trigger του block, υπάρχει για λόγους τεστ ένα insert, το οποίο λειτουργεί με κάθε νέα εγγραφή. ++<br/> + -+ List terms = new ArrayList(); -+ try { -+ ByteArrayInputStream in = new ByteArrayInputStream(buffer); -+ ObjectInputStream readIn = new ObjectInputStream(in); -+ //while (readIn.available() > 0) { -+ IKeyTreeNode newTerm = (IKeyTreeNode) readIn.readObject(); -+ terms.add(newTerm); -+ //} -+ readIn.close(); -+ } catch (Exception ex) { -+ Logger.logError(ex); -+ return null; -+ } -+ return terms.toArray(new IKeyTreeNode[terms.size()]); -+ } ++<br/> ++&nbsp; ++<br/> + -+ return null; -+ } ++<br/> ++&nbsp; ++<br/> + -+ protected String[] getTypeNames() { -+ return new String[] { KEY_TREE_ITEM }; -+ } ++<br/> ++&nbsp; ++<br/> + -+ protected int[] getTypeIds() { -+ return new int[] { TYPEID }; -+ } ++<br/> ++Παρακαλώ για τα σχόλια σας και ιδιαίτερα για τους ενδεχόμενους κινδύνους, αν σας το επιτρέπει ο χρόνος σας. ++<br/> + -+ boolean checkType(Object object) { -+ if (object == null || !(object instanceof IKeyTreeNode[]) -+ || ((IKeyTreeNode[]) object).length == 0) { -+ return false; -+ } -+ IKeyTreeNode[] myTypes = (IKeyTreeNode[]) object; -+ for (int i = 0; i < myTypes.length; i++) { -+ if (myTypes[i] == null) { -+ return false; -+ } -+ } -+ return true; -+ } ++<br/> ++&nbsp; ++<br/> + -+ protected boolean validate(Object object) { -+ return checkType(object); -+ } -+} -\ No newline at end of file -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/views/messagesview/dnd/MessagesDragSource.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/views/messagesview/dnd/MessagesDragSource.java -new file mode 100644 -index 00000000..dfbed204 ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/views/messagesview/dnd/MessagesDragSource.java -@@ -0,0 +1,42 @@ -+package org.eclipselabs.tapiji.tools.core.ui.views.messagesview.dnd; ++<br/> ++&nbsp; ++<br/> + -+import org.eclipse.jface.viewers.IStructuredSelection; -+import org.eclipse.jface.viewers.TreeViewer; -+import org.eclipse.swt.dnd.DragSourceEvent; -+import org.eclipse.swt.dnd.DragSourceListener; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IKeyTreeNode; ++<br/> ++&nbsp; ++<br/> + -+public class MessagesDragSource implements DragSourceListener { ++<br/> ++Ευχαριστώ πολύ, ++<br/> + -+ private final TreeViewer source; -+ private String bundleId; -+ -+ public MessagesDragSource (TreeViewer sourceView, String bundleId) { -+ source = sourceView; -+ this.bundleId = bundleId; -+ } -+ -+ @Override -+ public void dragFinished(DragSourceEvent event) { -+ -+ } ++<br/> ++ ++ ++ ++ ++ + -+ @Override -+ public void dragSetData(DragSourceEvent event) { -+ IKeyTreeNode selectionObject = (IKeyTreeNode) -+ ((IStructuredSelection)source.getSelection()).toList().get(0); -+ -+ String key = selectionObject.getMessageKey(); -+ -+ // TODO Solve the problem that its not possible to retrieve the editor position of the drop event -+ -+// event.data = ""(new ResourceBundle(\"""" + bundleId + ""\"")).getString(\"""" + key + ""\"")""; -+ event.data = ""\"""" + key + ""\""""; -+ } ++ ++ + -+ @Override -+ public void dragStart(DragSourceEvent event) { -+ event.doit = !source.getSelection().isEmpty(); -+ } ++ ++ ++ Screen Code ++ ++ ΘΕΩΡΙΤΙΚ Α ΕΦΑΡΜΟΓΗ ΣΕ ΟΛΕΣ ΤΙΣ ΟΘΟΝΕΣ + -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/views/messagesview/dnd/MessagesDropTarget.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/views/messagesview/dnd/MessagesDropTarget.java ++ ++ ++ ++ ++ ++ ++ +diff --git a/maven-changes-plugin/src/test/unit/jira-plugin-config.xml b/maven-changes-plugin/src/test/unit/jira-plugin-config.xml new file mode 100644 -index 00000000..57a65ef5 +index 0000000000..faec73ef18 --- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/views/messagesview/dnd/MessagesDropTarget.java -@@ -0,0 +1,58 @@ -+package org.eclipselabs.tapiji.tools.core.ui.views.messagesview.dnd; -+ -+import org.eclipse.jface.dialogs.InputDialog; -+import org.eclipse.jface.viewers.TreeViewer; -+import org.eclipse.swt.dnd.DND; -+import org.eclipse.swt.dnd.DropTargetAdapter; -+import org.eclipse.swt.dnd.DropTargetEvent; -+import org.eclipse.swt.dnd.TextTransfer; -+import org.eclipse.swt.widgets.Display; -+import org.eclipse.swt.widgets.TreeItem; -+import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; -+import org.eclipselabs.tapiji.tools.core.ui.dialogs.CreateResourceBundleEntryDialog; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IValuedKeyTreeNode; ++++ b/maven-changes-plugin/src/test/unit/jira-plugin-config.xml +@@ -0,0 +1,36 @@ ++ ++ + -+public class MessagesDropTarget extends DropTargetAdapter { -+ private final TreeViewer target; -+ private final ResourceBundleManager manager; -+ private String bundleName; -+ -+ public MessagesDropTarget (TreeViewer viewer, ResourceBundleManager manager, String bundleName) { -+ super(); -+ target = viewer; -+ this.manager = manager; -+ this.bundleName = bundleName; -+ } -+ -+ public void dragEnter (DropTargetEvent event) { -+ } -+ -+ public void drop (DropTargetEvent event) { -+ if (event.detail != DND.DROP_COPY) -+ return; -+ -+ if (TextTransfer.getInstance().isSupportedType (event.currentDataType)) { -+ //event.feedback = DND.FEEDBACK_INSERT_BEFORE; -+ String newKeyPrefix = """"; -+ -+ if (event.item instanceof TreeItem && -+ ((TreeItem) event.item).getData() instanceof IValuedKeyTreeNode) { -+ newKeyPrefix = ((IValuedKeyTreeNode) ((TreeItem) event.item).getData()).getMessageKey(); ++ ++ changes-plugin-test ++ ++ ++ ++ maven-changes-plugin ++ ++ ${localRepository} ++ ++ target/jira-test-output ++ Key,Summary,Status,Resolution,Assignee,Description ++ ++ ++ ++ ++" +541aae12ef82767479bcd53afb3681b46dd890a5,spring-framework,SPR-5802 - NullPointerException when using- @CookieValue annotation--,c,https://github.com/spring-projects/spring-framework,"diff --git a/org.springframework.web.portlet/src/main/java/org/springframework/web/portlet/mvc/annotation/AnnotationMethodHandlerAdapter.java b/org.springframework.web.portlet/src/main/java/org/springframework/web/portlet/mvc/annotation/AnnotationMethodHandlerAdapter.java +index f6b7c4204360..e8fa23d6ac5f 100644 +--- a/org.springframework.web.portlet/src/main/java/org/springframework/web/portlet/mvc/annotation/AnnotationMethodHandlerAdapter.java ++++ b/org.springframework.web.portlet/src/main/java/org/springframework/web/portlet/mvc/annotation/AnnotationMethodHandlerAdapter.java +@@ -600,9 +600,12 @@ protected Object resolveCookieValue(String cookieName, Class paramType, NativeWe + if (Cookie.class.isAssignableFrom(paramType)) { + return cookieValue; + } +- else { ++ else if (cookieValue != null) { + return cookieValue.getValue(); + } ++ else { ++ return null; + } -+ -+ String message = (String)event.data; -+ -+ CreateResourceBundleEntryDialog dialog = new CreateResourceBundleEntryDialog( -+ Display.getDefault().getActiveShell(), -+ manager, -+ newKeyPrefix.trim().length() > 0 ? newKeyPrefix + ""."" + ""[Platzhalter]"" : """", -+ message, -+ bundleName, -+ """" -+ ); -+ if (dialog.open() != InputDialog.OK) -+ return; -+ } else -+ event.detail = DND.DROP_NONE; -+ } -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/MVTextTransfer.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/MVTextTransfer.java -new file mode 100644 -index 00000000..c0af7f62 ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/MVTextTransfer.java -@@ -0,0 +1,10 @@ -+package org.eclipselabs.tapiji.tools.core.ui.widgets; -+ -+import org.eclipse.swt.dnd.TextTransfer; -+ -+public class MVTextTransfer { -+ -+ private MVTextTransfer () { -+ } -+ -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/PropertyKeySelectionTree.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/PropertyKeySelectionTree.java -new file mode 100644 -index 00000000..064dbf54 ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/PropertyKeySelectionTree.java -@@ -0,0 +1,659 @@ -+package org.eclipselabs.tapiji.tools.core.ui.widgets; -+ -+import java.util.ArrayList; -+import java.util.Iterator; -+import java.util.List; -+import java.util.Locale; -+import java.util.SortedMap; -+import java.util.TreeMap; -+ -+import org.eclipse.babel.core.message.manager.IMessagesEditorListener; -+import org.eclipse.babel.core.message.manager.RBManager; -+import org.eclipse.babel.editor.api.KeyTreeFactory; -+import org.eclipse.jdt.ui.JavaUI; -+import org.eclipse.jface.action.Action; -+import org.eclipse.jface.dialogs.InputDialog; -+import org.eclipse.jface.layout.TreeColumnLayout; -+import org.eclipse.jface.viewers.CellEditor; -+import org.eclipse.jface.viewers.ColumnWeightData; -+import org.eclipse.jface.viewers.DoubleClickEvent; -+import org.eclipse.jface.viewers.EditingSupport; -+import org.eclipse.jface.viewers.IDoubleClickListener; -+import org.eclipse.jface.viewers.IElementComparer; -+import org.eclipse.jface.viewers.ISelection; -+import org.eclipse.jface.viewers.ISelectionChangedListener; -+import org.eclipse.jface.viewers.IStructuredSelection; -+import org.eclipse.jface.viewers.StructuredViewer; -+import org.eclipse.jface.viewers.TextCellEditor; -+import org.eclipse.jface.viewers.TreeViewer; -+import org.eclipse.jface.viewers.TreeViewerColumn; -+import org.eclipse.swt.SWT; -+import org.eclipse.swt.dnd.DND; -+import org.eclipse.swt.dnd.DragSource; -+import org.eclipse.swt.dnd.DropTarget; -+import org.eclipse.swt.dnd.TextTransfer; -+import org.eclipse.swt.dnd.Transfer; -+import org.eclipse.swt.events.KeyAdapter; -+import org.eclipse.swt.events.KeyEvent; -+import org.eclipse.swt.events.SelectionEvent; -+import org.eclipse.swt.events.SelectionListener; -+import org.eclipse.swt.widgets.Composite; -+import org.eclipse.swt.widgets.Display; -+import org.eclipse.swt.widgets.Tree; -+import org.eclipse.swt.widgets.TreeColumn; -+import org.eclipse.ui.IViewSite; -+import org.eclipse.ui.IWorkbenchPartSite; -+import org.eclipse.ui.IWorkbenchWindow; -+import org.eclipse.ui.PlatformUI; -+import org.eclipselabs.tapiji.tools.core.Logger; -+import org.eclipselabs.tapiji.tools.core.model.IResourceBundleChangedListener; -+import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleChangedEvent; -+import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; -+import org.eclipselabs.tapiji.tools.core.model.view.SortInfo; -+import org.eclipselabs.tapiji.tools.core.ui.dialogs.CreateResourceBundleEntryDialog; -+import org.eclipselabs.tapiji.tools.core.ui.views.messagesview.dnd.KeyTreeItemDropTarget; -+import org.eclipselabs.tapiji.tools.core.ui.views.messagesview.dnd.MessagesDragSource; -+import org.eclipselabs.tapiji.tools.core.ui.views.messagesview.dnd.MessagesDropTarget; -+import org.eclipselabs.tapiji.tools.core.ui.widgets.filter.ExactMatcher; -+import org.eclipselabs.tapiji.tools.core.ui.widgets.filter.FuzzyMatcher; -+import org.eclipselabs.tapiji.tools.core.ui.widgets.provider.ResKeyTreeContentProvider; -+import org.eclipselabs.tapiji.tools.core.ui.widgets.provider.ResKeyTreeLabelProvider; -+import org.eclipselabs.tapiji.tools.core.ui.widgets.sorter.ValuedKeyTreeItemSorter; -+import org.eclipselabs.tapiji.tools.core.util.EditorUtils; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IAbstractKeyTreeModel; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IKeyTreeNode; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IMessage; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IMessagesBundle; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IMessagesBundleGroup; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IValuedKeyTreeNode; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.TreeType; -+ -+public class PropertyKeySelectionTree extends Composite implements IResourceBundleChangedListener { -+ -+ private final int KEY_COLUMN_WEIGHT = 1; -+ private final int LOCALE_COLUMN_WEIGHT = 1; + } + + @Override" +4f79b07e174ed1f57115a6b0a9f6a6e74e6733ee,hadoop,HADOOP-6932. Namenode start (init) fails because- of invalid kerberos key,c,https://github.com/apache/hadoop,"diff --git a/CHANGES.txt b/CHANGES.txt +index f43935c87233a..72a1e3e6ffa26 100644 +--- a/CHANGES.txt ++++ b/CHANGES.txt +@@ -220,6 +220,9 @@ Trunk (unreleased changes) + HADOOP-6833. IPC leaks call parameters when exceptions thrown. + (Todd Lipcon via Eli Collins) + ++ HADOOP-6932. Namenode start (init) fails because of invalid kerberos ++ key, even when security set to ""simple"" (boryas) + -+ private ResourceBundleManager manager; -+ private String resourceBundle; -+ private List visibleLocales = new ArrayList(); -+ private boolean editable; -+ -+ private IWorkbenchPartSite site; -+ private TreeColumnLayout basicLayout; -+ private TreeViewer treeViewer; -+ private TreeColumn keyColumn; -+ private boolean grouped = true; -+ private boolean fuzzyMatchingEnabled = false; -+ private float matchingPrecision = .75f; -+ private Locale uiLocale = new Locale(""en""); -+ -+ private SortInfo sortInfo; -+ -+ private ResKeyTreeContentProvider contentProvider; -+ private ResKeyTreeLabelProvider labelProvider; -+ private TreeType treeType = TreeType.Tree; + Release 0.21.0 - Unreleased + + INCOMPATIBLE CHANGES +diff --git a/src/java/org/apache/hadoop/security/SecurityUtil.java b/src/java/org/apache/hadoop/security/SecurityUtil.java +index 00187bd6f2401..44ef31ef32989 100644 +--- a/src/java/org/apache/hadoop/security/SecurityUtil.java ++++ b/src/java/org/apache/hadoop/security/SecurityUtil.java +@@ -174,7 +174,7 @@ static String getLocalHostName() throws UnknownHostException { + } + + /** +- * If a keytab has been provided, login as that user. Substitute $host in ++ * Login as a principal specified in config. Substitute $host in + * user's Kerberos principal name with a dynamically looked-up fully-qualified + * domain name of the current host. + * +@@ -192,8 +192,9 @@ public static void login(final Configuration conf, + } + + /** +- * If a keytab has been provided, login as that user. Substitute $host in +- * user's Kerberos principal name with hostname. ++ * Login as a principal specified in config. Substitute $host in user's Kerberos principal ++ * name with hostname. If non-secure mode - return. If no keytab available - ++ * bail out with an exception + * + * @param conf + * conf to use +@@ -208,9 +209,14 @@ public static void login(final Configuration conf, + public static void login(final Configuration conf, + final String keytabFileKey, final String userNameKey, String hostname) + throws IOException { +- String keytabFilename = conf.get(keytabFileKey); +- if (keytabFilename == null) + -+ private IMessagesEditorListener editorListener; -+ -+ /*** MATCHER ***/ -+ ExactMatcher matcher; -+ -+ /*** SORTER ***/ -+ ValuedKeyTreeItemSorter sorter; -+ -+ /*** ACTIONS ***/ -+ private Action doubleClickAction; -+ -+ /*** LISTENERS ***/ -+ private ISelectionChangedListener selectionChangedListener; -+ -+ public PropertyKeySelectionTree(IViewSite viewSite, IWorkbenchPartSite site, Composite parent, int style, -+ String projectName, String resources, List locales) { -+ super(parent, style); -+ this.site = site; -+ resourceBundle = resources; -+ -+ if (resourceBundle != null && resourceBundle.trim().length() > 0) { -+ manager = ResourceBundleManager.getManager(projectName); -+ if (locales == null) -+ initVisibleLocales(); -+ else -+ this.visibleLocales = locales; -+ } -+ -+ constructWidget(); -+ -+ if (resourceBundle != null && resourceBundle.trim().length() > 0) { -+ initTreeViewer(); -+ initMatchers(); -+ initSorters(); -+ treeViewer.expandAll(); -+ } -+ -+ hookDragAndDrop(); -+ registerListeners(); ++ if(! UserGroupInformation.isSecurityEnabled()) + return; ++ ++ String keytabFilename = conf.get(keytabFileKey); ++ if (keytabFilename == null || keytabFilename.length() == 0) { ++ throw new IOException(""Running in secure mode, but config doesn't have a keytab""); + } -+ + + String principalConfig = conf.get(userNameKey, System + .getProperty(""user.name"")); +diff --git a/src/test/core/org/apache/hadoop/security/TestSecurityUtil.java b/src/test/core/org/apache/hadoop/security/TestSecurityUtil.java +index 14ec74372d091..d5a3a25f90972 100644 +--- a/src/test/core/org/apache/hadoop/security/TestSecurityUtil.java ++++ b/src/test/core/org/apache/hadoop/security/TestSecurityUtil.java +@@ -16,12 +16,15 @@ + */ + package org.apache.hadoop.security; + +-import static org.junit.Assert.*; ++import static org.junit.Assert.assertFalse; ++import static org.junit.Assert.assertTrue; + + import java.io.IOException; + + import javax.security.auth.kerberos.KerberosPrincipal; + ++import org.apache.hadoop.conf.Configuration; ++import org.junit.Assert; + import org.junit.Test; + + public class TestSecurityUtil { +@@ -70,4 +73,23 @@ public void testGetServerPrincipal() throws IOException { + verify(shouldNotReplace, hostname, shouldNotReplace); + verify(shouldNotReplace, shouldNotReplace, shouldNotReplace); + } ++ ++ @Test ++ public void testStartsWithIncorrectSettings() throws IOException { ++ Configuration conf = new Configuration(); ++ conf.set( ++ org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, ++ ""kerberos""); ++ String keyTabKey=""key""; ++ conf.set(keyTabKey, """"); ++ UserGroupInformation.setConfiguration(conf); ++ boolean gotException = false; ++ try { ++ SecurityUtil.login(conf, keyTabKey, """", """"); ++ } catch (IOException e) { ++ // expected ++ gotException=true; ++ } ++ assertTrue(""Exception for empty keytabfile name was expected"", gotException); ++ } + }" +70d5d2c168bd477e3b8330fd7802b280d1f72b8e,camel,Set the isCreateCamelContextPerClass on tests- that can pass with it to speed up the tests--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@1152396 13f79535-47bb-0310-9956-ffa450edef68-,p,https://github.com/apache/camel,"diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/AbstractCXFGreeterRouterTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/AbstractCXFGreeterRouterTest.java +index 54b4ae4946afe..0e1c4525d2cfc 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/AbstractCXFGreeterRouterTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/AbstractCXFGreeterRouterTest.java +@@ -58,7 +58,6 @@ public static int getPort2() { + return CXFTestSupport.getPort2(); + } + +- + protected abstract ClassPathXmlApplicationContext createApplicationContext(); + + @Before +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/AbstractCxfWsdlFirstTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/AbstractCxfWsdlFirstTest.java +index 5c897860f286e..6bbf10faf28e6 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/AbstractCxfWsdlFirstTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/AbstractCxfWsdlFirstTest.java +@@ -50,6 +50,10 @@ public static int getPort2() { + return CXFTestSupport.getPort2(); + } + + @Override -+ public void dispose() { -+ super.dispose(); -+ unregisterListeners(); ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } -+ -+ protected void initSorters() { -+ sorter = new ValuedKeyTreeItemSorter(treeViewer, sortInfo); -+ treeViewer.setSorter(sorter); + @Test + public void testInvokingServiceFromCXFClient() throws Exception { + +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeMultiPartNoSpringTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeMultiPartNoSpringTest.java +index b514ad825954d..ded24d8c76247 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeMultiPartNoSpringTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeMultiPartNoSpringTest.java +@@ -48,6 +48,11 @@ public class CXFWsdlOnlyPayloadModeMultiPartNoSpringTest extends CamelTestSuppor + + ""/CXFWsdlOnlyPayloadModeMultiPartNoSpringTest/PersonMultiPart""; + protected Endpoint endpoint; + ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } + -+ public void enableFuzzyMatching(boolean enable) { -+ String pattern = """"; -+ if (matcher != null) { -+ pattern = matcher.getPattern(); -+ -+ if (!fuzzyMatchingEnabled && enable) { -+ if (matcher.getPattern().trim().length() > 1 && matcher.getPattern().startsWith(""*"") -+ && matcher.getPattern().endsWith(""*"")) -+ pattern = pattern.substring(1).substring(0, pattern.length() - 2); -+ matcher.setPattern(null); -+ } -+ } -+ fuzzyMatchingEnabled = enable; -+ initMatchers(); -+ -+ matcher.setPattern(pattern); -+ treeViewer.refresh(); -+ } -+ -+ public boolean isFuzzyMatchingEnabled() { -+ return fuzzyMatchingEnabled; -+ } -+ -+ protected void initMatchers() { -+ treeViewer.resetFilters(); -+ -+ if (fuzzyMatchingEnabled) { -+ matcher = new FuzzyMatcher(treeViewer); -+ ((FuzzyMatcher) matcher).setMinimumSimilarity(matchingPrecision); -+ } else -+ matcher = new ExactMatcher(treeViewer); -+ -+ } -+ -+ protected void initTreeViewer() { -+ this.setRedraw(false); -+ // init content provider -+ contentProvider = new ResKeyTreeContentProvider(manager.getResourceBundle(resourceBundle), visibleLocales, -+ manager, resourceBundle, treeType); -+ treeViewer.setContentProvider(contentProvider); -+ -+ // init label provider -+ labelProvider = new ResKeyTreeLabelProvider(visibleLocales); -+ treeViewer.setLabelProvider(labelProvider); -+ -+ // we need this to keep the tree expanded -+ treeViewer.setComparer(new IElementComparer() { -+ -+ @Override -+ public int hashCode(Object element) { -+ final int prime = 31; -+ int result = 1; -+ result = prime * result -+ + ((toString() == null) ? 0 : toString().hashCode()); -+ return result; -+ } -+ -+ @Override -+ public boolean equals(Object a, Object b) { -+ if (a == b) { -+ return true; -+ } -+ if (a instanceof IKeyTreeNode && b instanceof IKeyTreeNode) { -+ IKeyTreeNode nodeA = (IKeyTreeNode) a; -+ IKeyTreeNode nodeB = (IKeyTreeNode) b; -+ return nodeA.equals(nodeB); -+ } -+ return false; -+ } -+ }); -+ -+ setTreeStructure(); -+ this.setRedraw(true); + @Before + public void startService() { + endpoint = Endpoint.publish(SERVICE_ADDRESS, new PersonMultiPartImpl()); +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeNoSpringSoap12Test.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeNoSpringSoap12Test.java +index d89d35da58aa0..c70f8b6cf0f00 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeNoSpringSoap12Test.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeNoSpringSoap12Test.java +@@ -25,7 +25,11 @@ + + public class CXFWsdlOnlyPayloadModeNoSpringSoap12Test extends CXFWsdlOnlyPayloadModeNoSpringTest { + +- ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } + -+ public void setTreeStructure() { -+ IAbstractKeyTreeModel model = KeyTreeFactory.createModel(manager.getResourceBundle(resourceBundle)); -+ if (treeViewer.getInput() == null) { -+ treeViewer.setUseHashlookup(true); -+ } -+ org.eclipse.jface.viewers.TreePath[] expandedTreePaths = treeViewer.getExpandedTreePaths(); -+ treeViewer.setInput(model); -+ treeViewer.refresh(); -+ treeViewer.setExpandedTreePaths(expandedTreePaths); + @Before + public void startService() { + endpoint = Endpoint.publish(""http://localhost:"" + port1 + ""/"" +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeNoSpringTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeNoSpringTest.java +index 0e785aa9f3d74..80cb403d02d21 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeNoSpringTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeNoSpringTest.java +@@ -52,6 +52,11 @@ public class CXFWsdlOnlyPayloadModeNoSpringTest extends CamelTestSupport { + protected int port1 = CXFTestSupport.getPort1(); + protected int port2 = CXFTestSupport.getPort2(); + ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } + -+ protected void refreshContent(ResourceBundleChangedEvent event) { -+ if (visibleLocales == null) -+ initVisibleLocales(); -+ -+ // update content provider -+ contentProvider.setLocales(visibleLocales); -+ contentProvider.setBundleGroup(manager.getResourceBundle(resourceBundle)); -+ -+ // init label provider -+ IMessagesBundleGroup group = manager.getResourceBundle(resourceBundle); -+ labelProvider.setLocales(visibleLocales); -+ if (treeViewer.getLabelProvider() != labelProvider) -+ treeViewer.setLabelProvider(labelProvider); -+ -+ // define input of treeviewer -+ setTreeStructure(); + @Before + public void startService() { + endpoint = Endpoint.publish(""http://localhost:"" + port1 + ""/"" + getClass().getSimpleName() +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyTest.java +index 5f4730a2ffdb7..e1060f17c7daf 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyTest.java +@@ -45,6 +45,9 @@ public class CXFWsdlOnlyTest extends CamelSpringTestSupport { + private static int port3 = CXFTestSupport.getPort3(); + private static int port4 = CXFTestSupport.getPort4(); + ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } -+ -+ protected void initVisibleLocales() { -+ SortedMap locSorted = new TreeMap(); -+ sortInfo = new SortInfo(); -+ visibleLocales.clear(); -+ if (resourceBundle != null) { -+ for (Locale l : manager.getProvidedLocales(resourceBundle)) { -+ if (l == null) { -+ locSorted.put(""Default"", null); -+ } else { -+ locSorted.put(l.getDisplayName(uiLocale), l); -+ } -+ } -+ } -+ -+ for (String lString : locSorted.keySet()) { -+ visibleLocales.add(locSorted.get(lString)); -+ } -+ sortInfo.setVisibleLocales(visibleLocales); + + protected ClassPathXmlApplicationContext createApplicationContext() { + System.setProperty(""CXFWsdlOnlyTest.port1"", Integer.toString(port1)); +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerMessageTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerMessageTest.java +index b21895182a9f0..bf8a8012d4590 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerMessageTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerMessageTest.java +@@ -47,6 +47,10 @@ public class CxfConsumerMessageTest extends CamelTestSupport { + protected final String simpleEndpointURI = ""cxf://"" + simpleEndpointAddress + + ""?serviceClass=org.apache.camel.component.cxf.HelloService""; + ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } -+ -+ protected void constructWidget() { -+ basicLayout = new TreeColumnLayout(); -+ this.setLayout(basicLayout); -+ -+ treeViewer = new TreeViewer(this, SWT.FULL_SELECTION | SWT.SINGLE | SWT.BORDER); -+ Tree tree = treeViewer.getTree(); -+ -+ if (resourceBundle != null) { -+ tree.setHeaderVisible(true); -+ tree.setLinesVisible(true); -+ -+ // create tree-columns -+ constructTreeColumns(tree); -+ } else { -+ tree.setHeaderVisible(false); -+ tree.setLinesVisible(false); -+ } -+ -+ makeActions(); -+ hookDoubleClickAction(); -+ -+ // register messages table as selection provider -+ site.setSelectionProvider(treeViewer); + protected RouteBuilder createRouteBuilder() { + return new RouteBuilder() { + public void configure() { +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerPayloadFaultTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerPayloadFaultTest.java +index 8686abb06e8ac..b1fdb15996919 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerPayloadFaultTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerPayloadFaultTest.java +@@ -65,6 +65,10 @@ public class CxfConsumerPayloadFaultTest extends CamelTestSupport { + protected final String fromURI = ""cxf://"" + serviceAddress + ""?"" + + PORT_NAME_PROP + ""&"" + SERVICE_NAME_PROP + ""&"" + WSDL_URL_PROP + ""&dataFormat="" + DataFormat.PAYLOAD; + ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } -+ -+ protected void constructTreeColumns(Tree tree) { -+ tree.removeAll(); -+ //tree.getColumns().length; -+ -+ // construct key-column -+ keyColumn = new TreeColumn(tree, SWT.NONE); -+ keyColumn.setText(""Key""); -+ keyColumn.addSelectionListener(new SelectionListener() { -+ -+ @Override -+ public void widgetSelected(SelectionEvent e) { -+ updateSorter(0); -+ } -+ -+ @Override -+ public void widgetDefaultSelected(SelectionEvent e) { -+ updateSorter(0); -+ } -+ }); -+ basicLayout.setColumnData(keyColumn, new ColumnWeightData(KEY_COLUMN_WEIGHT)); -+ -+ if (visibleLocales != null) { -+ for (final Locale l : visibleLocales) { -+ TreeColumn col = new TreeColumn(tree, SWT.NONE); -+ -+ // Add editing support to this table column -+ TreeViewerColumn tCol = new TreeViewerColumn(treeViewer, col); -+ tCol.setEditingSupport(new EditingSupport(treeViewer) { -+ -+ TextCellEditor editor = null; -+ -+ @Override -+ protected void setValue(Object element, Object value) { -+ if (element instanceof IValuedKeyTreeNode) { -+ IValuedKeyTreeNode vkti = (IValuedKeyTreeNode) element; -+ String activeKey = vkti.getMessageKey(); -+ -+ if (activeKey != null) { -+ IMessagesBundleGroup bundleGroup = manager.getResourceBundle(resourceBundle); -+ IMessage entry = bundleGroup.getMessage(activeKey, l); -+ -+ if (entry == null || !value.equals(entry.getValue())) { -+ String comment = null; -+ if (entry != null) { -+ comment = entry.getComment(); -+ } -+ -+ IMessagesBundle messagesBundle = bundleGroup.getMessagesBundle(l); -+ IMessage message = messagesBundle.getMessage(activeKey); -+ if (message != null) { -+ message.setText(String.valueOf(value)); -+ message.setComment(comment); -+ } -+ // TODO: find a better way -+ setTreeStructure(); -+ -+ } -+ } -+ } -+ } -+ -+ @Override -+ protected Object getValue(Object element) { -+ return labelProvider.getColumnText(element, visibleLocales.indexOf(l) + 1); -+ } -+ -+ @Override -+ protected CellEditor getCellEditor(Object element) { -+ if (editor == null) { -+ Composite tree = (Composite) treeViewer.getControl(); -+ editor = new TextCellEditor(tree); -+ } -+ return editor; -+ } -+ -+ @Override -+ protected boolean canEdit(Object element) { -+ return editable; -+ } -+ }); -+ -+ String displayName = l == null ? ResourceBundleManager.defaultLocaleTag : l.getDisplayName(uiLocale); -+ -+ col.setText(displayName); -+ col.addSelectionListener(new SelectionListener() { -+ -+ @Override -+ public void widgetSelected(SelectionEvent e) { -+ updateSorter(visibleLocales.indexOf(l) + 1); -+ } -+ -+ @Override -+ public void widgetDefaultSelected(SelectionEvent e) { -+ updateSorter(visibleLocales.indexOf(l) + 1); -+ } -+ }); -+ basicLayout.setColumnData(col, new ColumnWeightData(LOCALE_COLUMN_WEIGHT)); -+ } -+ } + @Override + protected RouteBuilder createRouteBuilder() { + return new RouteBuilder() { +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerProviderTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerProviderTest.java +index 76e6eaa11be83..64a932d1f28ea 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerProviderTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerProviderTest.java +@@ -49,6 +49,10 @@ public class CxfConsumerProviderTest extends CamelTestSupport { + + ""?serviceClass=org.apache.camel.component.cxf.ServiceProvider""; + + ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } -+ -+ protected void updateSorter(int idx) { -+ SortInfo sortInfo = sorter.getSortInfo(); -+ if (idx == sortInfo.getColIdx()) -+ sortInfo.setDESC(!sortInfo.isDESC()); -+ else { -+ sortInfo.setColIdx(idx); -+ sortInfo.setDESC(false); -+ } -+ sortInfo.setVisibleLocales(visibleLocales); -+ sorter.setSortInfo(sortInfo); -+ treeType = idx == 0 ? TreeType.Tree : TreeType.Flat; -+ setTreeStructure(); -+ treeViewer.refresh(); + protected RouteBuilder createRouteBuilder() { + return new RouteBuilder() { + public void configure() { +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerResponseTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerResponseTest.java +index 293957ccce39e..7b89881c07e20 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerResponseTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerResponseTest.java +@@ -53,6 +53,10 @@ public class CxfConsumerResponseTest extends CamelTestSupport { + + ""&publishedEndpointUrl=http://www.simple.com/services/test""; + + ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } -+ + // START SNIPPET: example + protected RouteBuilder createRouteBuilder() { + return new RouteBuilder() { +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerTest.java +index d87323e65e403..e096593b142a5 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerTest.java +@@ -51,7 +51,12 @@ public class CxfConsumerTest extends CamelTestSupport { + private static final String ECHO_OPERATION = ""echo""; + private static final String ECHO_BOOLEAN_OPERATION = ""echoBoolean""; + private static final String TEST_MESSAGE = ""Hello World!""; +- ++ + @Override -+ public boolean setFocus() { -+ return treeViewer.getControl().setFocus(); ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } ++ + // START SNIPPET: example + protected RouteBuilder createRouteBuilder() { + return new RouteBuilder() { +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfCustomerStartStopTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfCustomerStartStopTest.java +index 66986987d7dbd..25543fb4a9d3e 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfCustomerStartStopTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfCustomerStartStopTest.java +@@ -32,7 +32,10 @@ + @org.junit.Ignore + public class CxfCustomerStartStopTest extends Assert { + static final int PORT1 = CXFTestSupport.getPort1(); +- static final int PORT2 = CXFTestSupport.getPort1(); ++ static final int PORT2 = CXFTestSupport.getPort1(); + -+ /*** DRAG AND DROP ***/ -+ protected void hookDragAndDrop() { -+ //KeyTreeItemDragSource ktiSource = new KeyTreeItemDragSource (treeViewer); -+ KeyTreeItemDropTarget ktiTarget = new KeyTreeItemDropTarget(treeViewer); -+ MessagesDragSource source = new MessagesDragSource(treeViewer, this.resourceBundle); -+ MessagesDropTarget target = new MessagesDropTarget(treeViewer, manager, resourceBundle); -+ -+ // Initialize drag source for copy event -+ DragSource dragSource = new DragSource(treeViewer.getControl(), DND.DROP_COPY | DND.DROP_MOVE); -+ dragSource.setTransfer(new Transfer[] { TextTransfer.getInstance() }); -+ //dragSource.addDragListener(ktiSource); -+ dragSource.addDragListener(source); + -+ // Initialize drop target for copy event -+ DropTarget dropTarget = new DropTarget(treeViewer.getControl(), DND.DROP_MOVE | DND.DROP_COPY); -+ dropTarget.setTransfer(new Transfer[] { TextTransfer.getInstance(), JavaUI.getJavaElementClipboardTransfer() }); -+ dropTarget.addDropListener(ktiTarget); -+ dropTarget.addDropListener(target); ++ + @Test + public void startAndStopService() throws Exception { + CamelContext context = new DefaultCamelContext(); +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfCustomizedExceptionTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfCustomizedExceptionTest.java +index 3d32fc1c8d2a7..e67a734070d7f 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfCustomizedExceptionTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfCustomizedExceptionTest.java +@@ -69,6 +69,10 @@ public class CxfCustomizedExceptionTest extends CamelTestSupport { + + private Bus bus; + ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } -+ -+ /*** ACTIONS ***/ -+ -+ private void makeActions() { -+ doubleClickAction = new Action() { -+ -+ @Override -+ public void run() { -+ editSelectedItem(); -+ } -+ -+ }; + + @Override + @Before +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfDispatchTestSupport.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfDispatchTestSupport.java +index cf85aa427887f..7812026967f45 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfDispatchTestSupport.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfDispatchTestSupport.java +@@ -52,6 +52,10 @@ public abstract class CxfDispatchTestSupport extends CamelSpringTestSupport { + + protected Endpoint endpoint; + private int port = CXFTestSupport.getPort1(); ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } -+ -+ private void hookDoubleClickAction() { -+ treeViewer.addDoubleClickListener(new IDoubleClickListener() { -+ -+ public void doubleClick(DoubleClickEvent event) { -+ doubleClickAction.run(); -+ } -+ }); + + @Before + public void startService() { +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfJavaOnlyPayloadModeTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfJavaOnlyPayloadModeTest.java +index 4b72cd6993c2b..b9e92eb47bf65 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfJavaOnlyPayloadModeTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfJavaOnlyPayloadModeTest.java +@@ -39,6 +39,10 @@ public class CxfJavaOnlyPayloadModeTest extends CamelTestSupport { + + ""&portName={http://camel.apache.org/wsdl-first}soap"" + + ""&dataFormat=PAYLOAD"" + + ""&properties.exceptionMessageCauseEnabled=true&properties.faultStackTraceEnabled=true""; ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } -+ -+ /*** SELECTION LISTENER ***/ -+ -+ protected void registerListeners() { -+ -+ this.editorListener = new MessagesEditorListener(); -+ if (manager != null) { -+ RBManager.getInstance(manager.getProject()).addMessagesEditorListener(editorListener); -+ } -+ -+ treeViewer.getControl().addKeyListener(new KeyAdapter() { -+ -+ public void keyPressed(KeyEvent event) { -+ if (event.character == SWT.DEL && event.stateMask == 0) { -+ deleteSelectedItems(); -+ } -+ } -+ }); + + @Test + public void testCxfJavaOnly() throws Exception { +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfMixedModeRouterTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfMixedModeRouterTest.java +index 9dce087c756a9..906cc0f4de7fb 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfMixedModeRouterTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfMixedModeRouterTest.java +@@ -52,7 +52,11 @@ public class CxfMixedModeRouterTest extends CamelTestSupport { + + private String routerEndpointURI = ""cxf://"" + ROUTER_ADDRESS + ""?"" + SERVICE_CLASS + ""&dataFormat=PAYLOAD""; + private String serviceEndpointURI = ""cxf://"" + SERVICE_ADDRESS + ""?"" + SERVICE_CLASS + ""&dataFormat=POJO""; +- ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } + -+ protected void unregisterListeners() { -+ if (manager != null) { -+ RBManager.getInstance(manager.getProject()).removeMessagesEditorListener(editorListener); -+ } -+ treeViewer.removeSelectionChangedListener(selectionChangedListener); + @BeforeClass + public static void startService() { + //start a service +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfNonWrapperTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfNonWrapperTest.java +index ef033e3a5616a..74c02d60e155a 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfNonWrapperTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfNonWrapperTest.java +@@ -34,7 +34,11 @@ + + public class CxfNonWrapperTest extends CamelSpringTestSupport { + int port1 = CXFTestSupport.getPort1(); +- ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } + -+ public void addSelectionChangedListener(ISelectionChangedListener listener) { -+ treeViewer.addSelectionChangedListener(listener); -+ selectionChangedListener = listener; + protected ClassPathXmlApplicationContext createApplicationContext() { + return new ClassPathXmlApplicationContext(""org/apache/camel/component/cxf/nonWrapperProcessor.xml""); + } +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfPayLoadSoapHeaderTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfPayLoadSoapHeaderTest.java +index 073deff09346b..c95d33741dbc8 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfPayLoadSoapHeaderTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfPayLoadSoapHeaderTest.java +@@ -56,7 +56,11 @@ protected String getServiceEndpointURI() { + return ""cxf:http://localhost:"" + port2 + ""/"" + getClass().getSimpleName() + + ""/new_pizza_service/services/PizzaService?wsdlURL=classpath:pizza_service.wsdl&dataFormat=PAYLOAD""; + } +- ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } + + @Override + protected RouteBuilder createRouteBuilder() { + return new RouteBuilder() { +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerProtocalHeaderTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerProtocalHeaderTest.java +index c48127dc68af9..257ca29af2856 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerProtocalHeaderTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerProtocalHeaderTest.java +@@ -38,6 +38,11 @@ public class CxfProducerProtocalHeaderTest extends CamelTestSupport { + + ""echo Hello World!"" + + """"; + + @Override -+ public void resourceBundleChanged(final ResourceBundleChangedEvent event) { -+ if (event.getType() != ResourceBundleChangedEvent.MODIFIED -+ || !event.getBundle().equals(this.getResourceBundle())) -+ return; -+ -+ if (Display.getCurrent() != null) { -+ refreshViewer(event, true); -+ return; -+ } -+ -+ Display.getDefault().asyncExec(new Runnable() { -+ -+ public void run() { -+ refreshViewer(event, true); -+ } -+ }); ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } -+ -+ private void refreshViewer(ResourceBundleChangedEvent event, boolean computeVisibleLocales) { -+ //manager.loadResourceBundle(resourceBundle); -+ if (computeVisibleLocales) { -+ refreshContent(event); -+ } -+ -+ // Display.getDefault().asyncExec(new Runnable() { -+ // public void run() { -+ treeViewer.refresh(); -+ // } -+ // }); ++ + protected RouteBuilder createRouteBuilder() { + return new RouteBuilder() { + public void configure() { +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerRouterTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerRouterTest.java +index 10727974f50cc..f0db44c96dc09 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerRouterTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerRouterTest.java +@@ -51,6 +51,10 @@ public class CxfProducerRouterTest extends CamelTestSupport { + private static final String ECHO_OPERATION = ""echo""; + private static final String TEST_MESSAGE = ""Hello World!""; + ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } -+ -+ public StructuredViewer getViewer() { -+ return this.treeViewer; + + @BeforeClass + public static void startServer() throws Exception { +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerSynchronousFalseTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerSynchronousFalseTest.java +index 074b7dd967e7a..28633d96e1322 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerSynchronousFalseTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerSynchronousFalseTest.java +@@ -40,6 +40,10 @@ public class CxfProducerSynchronousFalseTest extends CamelTestSupport { + + private String url = ""cxf://"" + SIMPLE_SERVER_ADDRESS + + ""?serviceClass=org.apache.camel.component.cxf.HelloService&dataFormat=MESSAGE&synchronous=false""; ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } -+ -+ public void setSearchString(String pattern) { -+ matcher.setPattern(pattern); -+ treeType = matcher.getPattern().trim().length() > 0 ? TreeType.Flat : TreeType.Tree; -+ labelProvider.setSearchEnabled(treeType.equals(TreeType.Flat)); -+ // WTF? -+ treeType = treeType.equals(TreeType.Tree) && sorter.getSortInfo().getColIdx() == 0 ? TreeType.Tree : TreeType.Flat; -+ treeViewer.refresh(); + + @BeforeClass + public static void startServer() throws Exception { +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerSynchronousTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerSynchronousTest.java +index e15338b2966ab..6f9760d4b3285 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerSynchronousTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerSynchronousTest.java +@@ -39,6 +39,10 @@ public class CxfProducerSynchronousTest extends CamelTestSupport { + + private String url = ""cxf://"" + SIMPLE_SERVER_ADDRESS + + ""?serviceClass=org.apache.camel.component.cxf.HelloService&dataFormat=MESSAGE&synchronous=true""; ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } -+ -+ public SortInfo getSortInfo() { -+ if (this.sorter != null) -+ return this.sorter.getSortInfo(); -+ else -+ return null; + + @BeforeClass + public static void startServer() throws Exception { +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerTest.java +index 7f5f0652ec96d..76da36e080567 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerTest.java +@@ -67,7 +67,6 @@ protected String getWrongServerAddress() { + return ""http://localhost:"" + CXFTestSupport.getPort3() + ""/"" + getClass().getSimpleName() + ""/test""; + } + +- + @Before + public void startService() throws Exception { + // start a simple front service +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfRawMessageRouterTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfRawMessageRouterTest.java +index 40cb2d44f3443..9a21fa8c98de4 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfRawMessageRouterTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfRawMessageRouterTest.java +@@ -36,7 +36,11 @@ public void configure() { + } + }; + } +- ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } + -+ public void setSortInfo(SortInfo sortInfo) { -+ sortInfo.setVisibleLocales(visibleLocales); -+ if (sorter != null) { -+ sorter.setSortInfo(sortInfo); -+ treeType = sortInfo.getColIdx() == 0 ? TreeType.Tree : TreeType.Flat; -+ treeViewer.refresh(); -+ } + @Test + public void testTheContentType() throws Exception { + MockEndpoint result = getMockEndpoint(""mock:result""); +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSimpleRouterTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSimpleRouterTest.java +index c9cad6c10711b..c8fae21f095fb 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSimpleRouterTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSimpleRouterTest.java +@@ -51,7 +51,11 @@ protected String getServiceAddress() { + protected void configureFactory(ServerFactoryBean svrBean) { + + } +- ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } + -+ public String getSearchString() { -+ return matcher.getPattern(); + @Before + public void startService() { + //start a service +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSoapMessageProviderTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSoapMessageProviderTest.java +index 4049f62f3ff1a..0b53668bea282 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSoapMessageProviderTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSoapMessageProviderTest.java +@@ -38,6 +38,10 @@ public class CxfSoapMessageProviderTest extends CamelSpringTestSupport { + protected ClassPathXmlApplicationContext createApplicationContext() { + return new ClassPathXmlApplicationContext(""org/apache/camel/component/cxf/SoapMessageProviderContext.xml""); + } ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } -+ -+ public boolean isEditable() { -+ return editable; + + @Test + public void testSOAPMessageModeDocLit() throws Exception { +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSpringCustomizedExceptionTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSpringCustomizedExceptionTest.java +index a39274bfe0406..c6e57a5e41403 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSpringCustomizedExceptionTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSpringCustomizedExceptionTest.java +@@ -49,7 +49,11 @@ public class CxfSpringCustomizedExceptionTest extends CamelTestSupport { + // END SNIPPET: FaultDefine + } + +- ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } + -+ public void setEditable(boolean editable) { -+ this.editable = editable; + @Before + public void setUp() throws Exception { + CXFTestSupport.getPort1(); +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfTimeoutTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfTimeoutTest.java +index 8971deca12021..f57ded3e6427b 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfTimeoutTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfTimeoutTest.java +@@ -39,6 +39,11 @@ public class CxfTimeoutTest extends CamelSpringTestSupport { + protected static final String JAXWS_SERVER_ADDRESS + = ""http://localhost:"" + CXFTestSupport.getPort1() + ""/CxfTimeoutTest/SoapContext/SoapPort""; + ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } + -+ public List getVisibleLocales() { -+ return visibleLocales; + @BeforeClass + public static void startService() { + Greeter implementor = new GreeterImplWithSleep(); +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstPayloadModeTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstPayloadModeTest.java +index 764040443f881..cb1ee7710a9d8 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstPayloadModeTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstPayloadModeTest.java +@@ -34,6 +34,10 @@ + + public class CxfWsdlFirstPayloadModeTest extends AbstractCxfWsdlFirstTest { + ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } -+ -+ public ResourceBundleManager getManager() { -+ return this.manager; + + @BeforeClass + public static void startService() { +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstProcessorTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstProcessorTest.java +index 0dcfc8e25cab8..64580aa5d5072 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstProcessorTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstProcessorTest.java +@@ -25,6 +25,10 @@ + import org.springframework.context.support.ClassPathXmlApplicationContext; + + public class CxfWsdlFirstProcessorTest extends AbstractCxfWsdlFirstTest { ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } -+ -+ public String getResourceBundle() { -+ return resourceBundle; + + protected ClassPathXmlApplicationContext createApplicationContext() { + return new ClassPathXmlApplicationContext(""org/apache/camel/component/cxf/WsdlFirstProcessor.xml""); +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstTest.java +index 4fdf0390318c2..71237e359aaf5 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstTest.java +@@ -41,6 +41,10 @@ + import org.springframework.context.support.ClassPathXmlApplicationContext; + + public class CxfWsdlFirstTest extends AbstractCxfWsdlFirstTest { ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } -+ -+ public void editSelectedItem() { -+ EditorUtils.openEditor(site.getPage(), manager.getRandomFile(resourceBundle), -+ EditorUtils.RESOURCE_BUNDLE_EDITOR); + + protected ClassPathXmlApplicationContext createApplicationContext() { + return new ClassPathXmlApplicationContext(""org/apache/camel/component/cxf/WsdlFirstBeans.xml""); +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/converter/CxfPayloadConverterTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/converter/CxfPayloadConverterTest.java +index adc093a53375c..e5e915ca4c6e1 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/converter/CxfPayloadConverterTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/converter/CxfPayloadConverterTest.java +@@ -41,6 +41,10 @@ public class CxfPayloadConverterTest extends ExchangeTestSupport { + private CxfPayload payload; + private CxfPayload emptyPayload; + private FileInputStream inputStream; ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; + } + + @Override + @Before +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsProducerTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsProducerTest.java +index 00bb7c29633f0..e4fd5798a1f54 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsProducerTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsProducerTest.java +@@ -49,6 +49,10 @@ public void process(Exchange exchange) throws Exception { + exchange.getOut().setBody(inMessage.getHeader(Exchange.HTTP_QUERY, String.class)); + } + } ++ @Override ++ public boolean isCreateCamelContextPerClass() { ++ return true; ++ } + + public int getPort1() { + return port1; +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsRouterTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsRouterTest.java +index 3506b2d6fa37e..7527d4784b45c 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsRouterTest.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsRouterTest.java +@@ -21,6 +21,7 @@ + import org.apache.camel.test.junit4.CamelSpringTestSupport; + import org.apache.http.HttpResponse; + import org.apache.http.client.HttpClient; ++import org.apache.http.client.methods.HttpDelete; + import org.apache.http.client.methods.HttpGet; + import org.apache.http.client.methods.HttpPost; + import org.apache.http.client.methods.HttpPut; +@@ -154,6 +155,9 @@ public void testPostConsumer() throws Exception { + assertEquals(200, response.getStatusLine().getStatusCode()); + assertEquals(""124Jack"", + EntityUtils.toString(response.getEntity())); ++ ++ HttpDelete del = new HttpDelete(""http://localhost:"" + PORT0 + ""/CxfRsRouterTest/route/customerservice/customers/124/""); ++ httpclient.execute(del); + } finally { + httpclient.getConnectionManager().shutdown(); + } +@@ -174,6 +178,9 @@ public void testPostConsumerUniqueResponseCode() throws Exception { + assertEquals(201, response.getStatusLine().getStatusCode()); + assertEquals(""124Jack"", + EntityUtils.toString(response.getEntity())); + -+ public void deleteSelectedItems() { -+ List keys = new ArrayList(); -+ -+ IWorkbenchWindow window = PlatformUI.getWorkbench().getActiveWorkbenchWindow(); -+ ISelection selection = window.getActivePage().getSelection(); -+ if (selection instanceof IStructuredSelection) { -+ for (Iterator iter = ((IStructuredSelection) selection).iterator(); iter.hasNext();) { -+ Object elem = iter.next(); -+ if (elem instanceof IKeyTreeNode) { -+ addKeysToRemove((IKeyTreeNode)elem, keys); -+ } -+ } ++ HttpDelete del = new HttpDelete(""http://localhost:"" + PORT0 + ""/CxfRsRouterTest/route/customerservice/customers/124/""); ++ httpclient.execute(del); + } finally { + httpclient.getConnectionManager().shutdown(); + } +diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/testbean/CustomerService.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/testbean/CustomerService.java +index 1dbdb72f4422e..c90b4bb1ed600 100644 +--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/testbean/CustomerService.java ++++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/testbean/CustomerService.java +@@ -120,7 +120,9 @@ public Response deleteCustomer(@PathParam(""id"") String id) { + } else { + r = Response.notModified().build(); + } +- ++ if (idNumber == currentId) { ++ --currentId; + } + return r; + } + +diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/BrowsableQueueTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/BrowsableQueueTest.java +index 896977f5456ff..966b5a1631e32 100644 +--- a/components/camel-jms/src/test/java/org/apache/camel/component/jms/BrowsableQueueTest.java ++++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/BrowsableQueueTest.java +@@ -24,11 +24,14 @@ + import org.apache.camel.Exchange; + import org.apache.camel.builder.RouteBuilder; + import org.apache.camel.test.junit4.CamelTestSupport; + -+ try { -+ manager.removeResourceBundleEntry(getResourceBundle(), keys); -+ } catch (Exception ex) { -+ Logger.logError(ex); -+ } ++import org.junit.After; ++import org.junit.Before; + import org.junit.Test; + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + +-import static org.apache.camel.component.jms.JmsComponent.jmsComponentAutoAcknowledge; ++import static org.apache.camel.component.jms.JmsComponent.jmsComponent; + + /** + * @version +@@ -41,16 +44,29 @@ public class BrowsableQueueTest extends CamelTestSupport { + protected int counter; + protected Object[] expectedBodies = {""body1"", ""body2""}; + ++ @Before ++ public void setUp() throws Exception { ++ long start = System.currentTimeMillis(); ++ super.setUp(); ++ System.out.println(""Start: "" + (System.currentTimeMillis() - start)); ++ } ++ @After ++ public void tearDown() throws Exception { ++ long start = System.currentTimeMillis(); ++ super.tearDown(); ++ System.out.println(""Stop: "" + (System.currentTimeMillis() - start)); + } + -+ private void addKeysToRemove(IKeyTreeNode node, List keys) { -+ keys.add(node.getMessageKey()); -+ for (IKeyTreeNode ktn : node.getChildren()) { -+ addKeysToRemove(ktn, keys); -+ } -+ } -+ -+ public void addNewItem() { -+ //event.feedback = DND.FEEDBACK_INSERT_BEFORE; -+ String newKeyPrefix = """"; -+ -+ IWorkbenchWindow window = PlatformUI.getWorkbench().getActiveWorkbenchWindow(); -+ ISelection selection = window.getActivePage().getSelection(); -+ if (selection instanceof IStructuredSelection) { -+ for (Iterator iter = ((IStructuredSelection) selection).iterator(); iter.hasNext();) { -+ Object elem = iter.next(); -+ if (elem instanceof IKeyTreeNode) { -+ newKeyPrefix = ((IKeyTreeNode) elem).getMessageKey(); -+ break; -+ } -+ } -+ } -+ -+ CreateResourceBundleEntryDialog dialog = new CreateResourceBundleEntryDialog(Display.getDefault() -+ .getActiveShell(), manager, newKeyPrefix.trim().length() > 0 ? newKeyPrefix + ""."" + ""[Platzhalter]"" -+ : """", """", getResourceBundle(), """"); -+ if (dialog.open() != InputDialog.OK) -+ return; + @Test + public void testSendMessagesThenBrowseQueue() throws Exception { + // send some messages + for (int i = 0; i < expectedBodies.length; i++) { + Object expectedBody = expectedBodies[i]; +- template.sendBodyAndHeader(""activemq:test.b"", expectedBody, ""counter"", i); ++ template.sendBodyAndHeader(""activemq:BrowsableQueueTest.b"", expectedBody, ""counter"", i); + } + + // now lets browse the queue +- JmsQueueEndpoint endpoint = getMandatoryEndpoint(""activemq:test.b?maximumBrowseSize=6"", JmsQueueEndpoint.class); ++ JmsQueueEndpoint endpoint = getMandatoryEndpoint(""activemq:BrowsableQueueTest.b?maximumBrowseSize=6"", JmsQueueEndpoint.class); + assertEquals(6, endpoint.getMaximumBrowseSize()); + List list = endpoint.getExchanges(); + LOG.debug(""Received: "" + list); +@@ -80,8 +96,8 @@ protected void sendExchange(final Object expectedBody) { + protected CamelContext createCamelContext() throws Exception { + CamelContext camelContext = super.createCamelContext(); + +- ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory(); +- camelContext.addComponent(componentName, jmsComponentAutoAcknowledge(connectionFactory)); ++ JmsComponent comp = jmsComponent(CamelJmsTestHelper.getSharedConfig()); ++ camelContext.addComponent(componentName, comp); + + return camelContext; + } +@@ -89,7 +105,7 @@ protected CamelContext createCamelContext() throws Exception { + protected RouteBuilder createRouteBuilder() throws Exception { + return new RouteBuilder() { + public void configure() throws Exception { +- from(""activemq:test.a"").to(""activemq:test.b""); ++ from(""activemq:BrowsableQueueTest.a"").to(""activemq:BrowsableQueueTest.b""); + } + }; + } +diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/ConsumeMessageConverterTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/ConsumeMessageConverterTest.java +index 2e06ccb8942a2..ac4fd990b7dd6 100644 +--- a/components/camel-jms/src/test/java/org/apache/camel/component/jms/ConsumeMessageConverterTest.java ++++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/ConsumeMessageConverterTest.java +@@ -32,7 +32,7 @@ + import org.springframework.jms.support.converter.MessageConversionException; + import org.springframework.jms.support.converter.MessageConverter; + +-import static org.apache.camel.component.jms.JmsComponent.jmsComponentAutoAcknowledge; ++import static org.apache.camel.component.jms.JmsComponent.jmsComponent; + + /** + * @version +@@ -49,8 +49,7 @@ protected JndiRegistry createRegistry() throws Exception { + protected CamelContext createCamelContext() throws Exception { + CamelContext camelContext = super.createCamelContext(); + +- ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory(); +- camelContext.addComponent(""activemq"", jmsComponentAutoAcknowledge(connectionFactory)); ++ camelContext.addComponent(""activemq"", jmsComponent(CamelJmsTestHelper.getSharedConfig())); + + return camelContext; + } +@@ -61,7 +60,7 @@ public void testTextMessage() throws Exception { + mock.expectedMessageCount(1); + mock.message(0).body().isInstanceOf(TextMessage.class); + +- template.sendBody(""activemq:queue:hello"", ""Hello World""); ++ template.sendBody(""activemq:queue:ConsumeMessageConverterTest.hello"", ""Hello World""); + + assertMockEndpointsSatisfied(); + } +@@ -72,7 +71,7 @@ public void testBytesMessage() throws Exception { + mock.expectedMessageCount(1); + mock.message(0).body().isInstanceOf(BytesMessage.class); + +- template.sendBody(""activemq:queue:hello"", ""Hello World"".getBytes()); ++ template.sendBody(""activemq:queue:ConsumeMessageConverterTest.hello"", ""Hello World"".getBytes()); + + assertMockEndpointsSatisfied(); + } +@@ -80,7 +79,7 @@ public void testBytesMessage() throws Exception { + protected RouteBuilder createRouteBuilder() throws Exception { + return new RouteBuilder() { + public void configure() throws Exception { +- from(""activemq:queue:hello?messageConverter=#myMessageConverter"").to(""mock:result""); ++ from(""activemq:queue:ConsumeMessageConverterTest.hello?messageConverter=#myMessageConverter"").to(""mock:result""); + } + }; + } +diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/FileRouteToJmsToFileTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/FileRouteToJmsToFileTest.java +index c1b570225fa34..ca060781651f8 100644 +--- a/components/camel-jms/src/test/java/org/apache/camel/component/jms/FileRouteToJmsToFileTest.java ++++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/FileRouteToJmsToFileTest.java +@@ -27,7 +27,7 @@ + import org.apache.camel.component.mock.MockEndpoint; + import org.apache.camel.test.junit4.CamelTestSupport; + import org.junit.Test; +-import static org.apache.camel.component.jms.JmsComponent.jmsComponentAutoAcknowledge; ++import static org.apache.camel.component.jms.JmsComponent.jmsComponent; + + /** + * Unit test that we can do file over JMS to file. +@@ -39,7 +39,7 @@ public class FileRouteToJmsToFileTest extends CamelTestSupport { + @Test + public void testRouteFileToFile() throws Exception { + deleteDirectory(""target/file2file""); +- NotifyBuilder notify = new NotifyBuilder(context).from(""activemq:queue:hello"").whenDone(1).create(); ++ NotifyBuilder notify = new NotifyBuilder(context).from(""activemq:queue:FileRouteToJmsToFileTest.hello"").whenDone(1).create(); + + MockEndpoint mock = getMockEndpoint(""mock:result""); + mock.expectedMessageCount(1); +@@ -58,8 +58,7 @@ public void testRouteFileToFile() throws Exception { + protected CamelContext createCamelContext() throws Exception { + CamelContext camelContext = super.createCamelContext(); + +- ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory(); +- camelContext.addComponent(componentName, jmsComponentAutoAcknowledge(connectionFactory)); ++ camelContext.addComponent(componentName, jmsComponent(CamelJmsTestHelper.getSharedConfig())); + + return camelContext; + } +@@ -67,9 +66,9 @@ protected CamelContext createCamelContext() throws Exception { + protected RouteBuilder createRouteBuilder() throws Exception { + return new RouteBuilder() { + public void configure() throws Exception { +- from(""file://target/file2file/in"").to(""activemq:queue:hello""); ++ from(""file://target/file2file/in"").to(""activemq:queue:FileRouteToJmsToFileTest.hello""); + +- from(""activemq:queue:hello"").to(""file://target/file2file/out"", ""mock:result""); ++ from(""activemq:queue:FileRouteToJmsToFileTest.hello"").to(""file://target/file2file/out"", ""mock:result""); + } + }; + } +diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsAutoStartupTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsAutoStartupTest.java +index a62787c32009c..e643a301812d6 100644 +--- a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsAutoStartupTest.java ++++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsAutoStartupTest.java +@@ -44,7 +44,7 @@ public void testAutoStartup() throws Exception { + // should be stopped by default + mock.expectedMessageCount(0); + +- template.sendBody(""activemq:queue:foo"", ""Hello World""); ++ template.sendBody(""activemq:queue:JmsAutoStartupTest.foo"", ""Hello World""); + + Thread.sleep(2000); + +@@ -64,7 +64,7 @@ protected RouteBuilder createRouteBuilder() throws Exception { + return new RouteBuilder() { + @Override + public void configure() throws Exception { +- endpoint = context.getEndpoint(""activemq:queue:foo?autoStartup=false"", JmsEndpoint.class); ++ endpoint = context.getEndpoint(""activemq:queue:JmsAutoStartupTest.foo?autoStartup=false"", JmsEndpoint.class); + + from(endpoint).to(""mock:result""); + } +diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsBatchResequencerJMSPriorityTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsBatchResequencerJMSPriorityTest.java +index a86c1e38dceb0..2460e0265bc51 100644 +--- a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsBatchResequencerJMSPriorityTest.java ++++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsBatchResequencerJMSPriorityTest.java +@@ -40,14 +40,14 @@ public void testBatchResequencerJMSPriority() throws Exception { + mock.expectedBodiesReceived(""G"", ""A"", ""B"", ""E"", ""H"", ""C"", ""D"", ""F""); + + // must use preserveMessageQos=true to be able to specify the JMSPriority to be used +- template.sendBodyAndHeader(""jms:queue:foo?preserveMessageQos=true"", ""A"", ""JMSPriority"", 6); +- template.sendBodyAndHeader(""jms:queue:foo?preserveMessageQos=true"", ""B"", ""JMSPriority"", 6); +- template.sendBodyAndHeader(""jms:queue:foo?preserveMessageQos=true"", ""C"", ""JMSPriority"", 4); +- template.sendBodyAndHeader(""jms:queue:foo?preserveMessageQos=true"", ""D"", ""JMSPriority"", 4); +- template.sendBodyAndHeader(""jms:queue:foo?preserveMessageQos=true"", ""E"", ""JMSPriority"", 6); +- template.sendBodyAndHeader(""jms:queue:foo?preserveMessageQos=true"", ""F"", ""JMSPriority"", 4); +- template.sendBodyAndHeader(""jms:queue:foo?preserveMessageQos=true"", ""G"", ""JMSPriority"", 8); +- template.sendBodyAndHeader(""jms:queue:foo?preserveMessageQos=true"", ""H"", ""JMSPriority"", 6); ++ template.sendBodyAndHeader(""jms:queue:JmsBatchResequencerJMSPriorityTest.foo?preserveMessageQos=true"", ""A"", ""JMSPriority"", 6); ++ template.sendBodyAndHeader(""jms:queue:JmsBatchResequencerJMSPriorityTest.foo?preserveMessageQos=true"", ""B"", ""JMSPriority"", 6); ++ template.sendBodyAndHeader(""jms:queue:JmsBatchResequencerJMSPriorityTest.foo?preserveMessageQos=true"", ""C"", ""JMSPriority"", 4); ++ template.sendBodyAndHeader(""jms:queue:JmsBatchResequencerJMSPriorityTest.foo?preserveMessageQos=true"", ""D"", ""JMSPriority"", 4); ++ template.sendBodyAndHeader(""jms:queue:JmsBatchResequencerJMSPriorityTest.foo?preserveMessageQos=true"", ""E"", ""JMSPriority"", 6); ++ template.sendBodyAndHeader(""jms:queue:JmsBatchResequencerJMSPriorityTest.foo?preserveMessageQos=true"", ""F"", ""JMSPriority"", 4); ++ template.sendBodyAndHeader(""jms:queue:JmsBatchResequencerJMSPriorityTest.foo?preserveMessageQos=true"", ""G"", ""JMSPriority"", 8); ++ template.sendBodyAndHeader(""jms:queue:JmsBatchResequencerJMSPriorityTest.foo?preserveMessageQos=true"", ""H"", ""JMSPriority"", 6); + + assertMockEndpointsSatisfied(); + } +@@ -55,7 +55,7 @@ public void testBatchResequencerJMSPriority() throws Exception { + protected CamelContext createCamelContext() throws Exception { + CamelContext camelContext = super.createCamelContext(); + +- ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory(); ++ ConnectionFactory connectionFactory = CamelJmsTestHelper.getSharedConnectionFactory(); + camelContext.addComponent(""jms"", jmsComponentAutoAcknowledge(connectionFactory)); + + return camelContext; +@@ -67,7 +67,7 @@ protected RouteBuilder createRouteBuilder() throws Exception { + @Override + public void configure() throws Exception { + // START SNIPPET: e1 +- from(""jms:queue:foo"") ++ from(""jms:queue:JmsBatchResequencerJMSPriorityTest.foo"") + // sort by JMSPriority by allowing duplicates (message can have same JMSPriority) + // and use reverse ordering so 9 is first output (most important), and 0 is last + // use batch mode and fire every 3th second +diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsComponentTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsComponentTest.java +index 40d74a424df78..fb97686fda8d0 100644 +--- a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsComponentTest.java ++++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsComponentTest.java +@@ -34,7 +34,7 @@ public class JmsComponentTest extends CamelTestSupport { + + @Test + public void testComponentOptions() throws Exception { +- String reply = template.requestBody(""activemq123:queue:hello?requestTimeout=5000"", ""Hello World"", String.class); ++ String reply = template.requestBody(""activemq123:queue:JmsComponentTest.hello?requestTimeout=5000"", ""Hello World"", String.class); + assertEquals(""Bye World"", reply); + + assertEquals(true, endpoint.isAcceptMessagesWhileStopping()); +@@ -60,7 +60,7 @@ public void testComponentOptions() throws Exception { + protected CamelContext createCamelContext() throws Exception { + CamelContext camelContext = super.createCamelContext(); + +- ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory(); ++ ConnectionFactory connectionFactory = CamelJmsTestHelper.getSharedConnectionFactory(); + JmsComponent comp = jmsComponentAutoAcknowledge(connectionFactory); + + comp.setAcceptMessagesWhileStopping(true); +@@ -84,7 +84,7 @@ protected CamelContext createCamelContext() throws Exception { + + camelContext.addComponent(componentName, comp); + +- endpoint = (JmsEndpoint) comp.createEndpoint(""queue:hello""); ++ endpoint = (JmsEndpoint) comp.createEndpoint(""queue:JmsComponentTest.hello""); + + return camelContext; + } +diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsConsumerRestartPickupConfigurationChangesTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsConsumerRestartPickupConfigurationChangesTest.java +index 8b5e8e1eb070e..f421e09088aa6 100644 +--- a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsConsumerRestartPickupConfigurationChangesTest.java ++++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsConsumerRestartPickupConfigurationChangesTest.java +@@ -33,7 +33,7 @@ public class JmsConsumerRestartPickupConfigurationChangesTest extends CamelTestS + + @Test + public void testRestartJmsConsumerPickupChanges() throws Exception { +- JmsEndpoint endpoint = context.getEndpoint(""activemq:queue:foo"", JmsEndpoint.class); ++ JmsEndpoint endpoint = context.getEndpoint(""activemq:queue:JmsConsumerRestartPickupConfigurationChangesTest.foo"", JmsEndpoint.class); + JmsConsumer consumer = endpoint.createConsumer(new Processor() { + public void process(Exchange exchange) throws Exception { + template.send(""mock:result"", exchange); +@@ -44,7 +44,7 @@ public void process(Exchange exchange) throws Exception { + + MockEndpoint result = getMockEndpoint(""mock:result""); + result.expectedBodiesReceived(""Hello World""); +- template.sendBody(""activemq:queue:foo"", ""Hello World""); ++ template.sendBody(""activemq:queue:JmsConsumerRestartPickupConfigurationChangesTest.foo"", ""Hello World""); + assertMockEndpointsSatisfied(); + + consumer.stop(); +@@ -58,7 +58,7 @@ public void process(Exchange exchange) throws Exception { + + result.reset(); + result.expectedBodiesReceived(""Bye World""); +- template.sendBody(""activemq:queue:bar"", ""Bye World""); ++ template.sendBody(""activemq:queue:JmsConsumerRestartPickupConfigurationChangesTest.bar"", ""Bye World""); + assertMockEndpointsSatisfied(); + + consumer.stop(); +@@ -67,7 +67,7 @@ public void process(Exchange exchange) throws Exception { + protected CamelContext createCamelContext() throws Exception { + CamelContext camelContext = super.createCamelContext(); + +- ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory(); ++ ConnectionFactory connectionFactory = CamelJmsTestHelper.getSharedConnectionFactory(); + camelContext.addComponent(""activemq"", jmsComponentAutoAcknowledge(connectionFactory)); + + return camelContext; +diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsProduerConcurrentTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsProduerConcurrentTest.java +index ba919e5addbf6..fd9dab74a9bb2 100644 +--- a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsProduerConcurrentTest.java ++++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsProduerConcurrentTest.java +@@ -64,7 +64,7 @@ public Object call() throws Exception { + protected CamelContext createCamelContext() throws Exception { + CamelContext camelContext = super.createCamelContext(); + +- ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory(); ++ ConnectionFactory connectionFactory = CamelJmsTestHelper.getSharedConnectionFactory(); + camelContext.addComponent(""jms"", jmsComponentAutoAcknowledge(connectionFactory)); + + return camelContext; +@@ -75,9 +75,9 @@ protected RouteBuilder createRouteBuilder() throws Exception { + return new RouteBuilder() { + @Override + public void configure() throws Exception { +- from(""direct:start"").to(""jms:queue:foo""); ++ from(""direct:start"").to(""jms:queue:foo-JmsProducerConcurrentTest""); + +- from(""jms:queue:foo"").to(""mock:result""); ++ from(""jms:queue:foo-JmsProducerConcurrentTest"").to(""mock:result""); + } + }; + } +diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsProduerConcurrentWithReplyTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsProduerConcurrentWithReplyTest.java +index d7bf2dc9b6f50..15fcead7bf1e8 100644 +--- a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsProduerConcurrentWithReplyTest.java ++++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsProduerConcurrentWithReplyTest.java +@@ -73,7 +73,7 @@ public Object call() throws Exception { + protected CamelContext createCamelContext() throws Exception { + CamelContext camelContext = super.createCamelContext(); + +- ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory(); ++ ConnectionFactory connectionFactory = CamelJmsTestHelper.getSharedConnectionFactory(); + camelContext.addComponent(""jms"", jmsComponentAutoAcknowledge(connectionFactory)); + + return camelContext; +@@ -84,9 +84,9 @@ protected RouteBuilder createRouteBuilder() throws Exception { + return new RouteBuilder() { + @Override + public void configure() throws Exception { +- from(""direct:start"").to(""jms:queue:foo""); ++ from(""direct:start"").to(""jms:queue:foo-JmsProduerConcurrentWithReplyTest""); + +- from(""jms:queue:foo?concurrentConsumers=5"").transform(simple(""Bye ${in.body}"")).to(""mock:result""); ++ from(""jms:queue:foo-JmsProduerConcurrentWithReplyTest?concurrentConsumers=5"").transform(simple(""Bye ${in.body}"")).to(""mock:result""); + } + }; + }" +8c475876dda2507977fd7282c37462136400daf2,drools,-Added fixes for waltz to run waltz50--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@7071 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-,c,https://github.com/kiegroup/drools,"diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Edge.java b/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Edge.java +index d114ffd5441..b1ec6212171 100644 +--- a/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Edge.java ++++ b/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Edge.java +@@ -39,6 +39,10 @@ public class Edge { + + final public static String MINUS = ""-""; + ++ public Edge() { ++ + } -+ -+ public void setMatchingPrecision(float value) { -+ matchingPrecision = value; -+ if (matcher instanceof FuzzyMatcher) { -+ ((FuzzyMatcher) matcher).setMinimumSimilarity(value); -+ treeViewer.refresh(); -+ } ++ + public Edge(final int p1, + final int p2, + final boolean joined, +diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Junction.java b/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Junction.java +index dc50219454d..ca1e1b51585 100644 +--- a/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Junction.java ++++ b/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Junction.java +@@ -39,7 +39,11 @@ public class Junction { + private int basePoint; + + private String type; +- ++ ++ public Junction() { ++ + } -+ -+ public float getMatchingPrecision() { -+ return matchingPrecision; ++ + public Junction(final int p1, + final int p2, + final int p3, +diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Line.java b/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Line.java +index 0c424db3448..158900158fd 100644 +--- a/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Line.java ++++ b/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Line.java +@@ -26,6 +26,10 @@ public class Line { + + private int p2; + ++ public Line() { ++ + } + -+ private class MessagesEditorListener implements IMessagesEditorListener { -+ @Override -+ public void onSave() { -+ if (resourceBundle != null) { -+ setTreeStructure(); -+ } -+ } -+ -+ @Override -+ public void onModify() { -+ if (resourceBundle != null) { -+ setTreeStructure(); -+ } -+ } + public Line(final int p1, + final int p2) { + this.p1 = p1; +diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Stage.java b/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Stage.java +index 252ba075d3a..f5c0ba87174 100644 +--- a/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Stage.java ++++ b/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Stage.java +@@ -51,6 +51,10 @@ public class Stage + + private int value; + ++ public Stage() { ++ + } -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/ResourceSelector.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/ResourceSelector.java ++ + public Stage(final int value) { + this.value = value; + } +diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Waltz.java b/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Waltz.java +index 4cfe78e93dd..2bca544472d 100644 +--- a/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Waltz.java ++++ b/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Waltz.java +@@ -56,10 +56,15 @@ public void testWaltz() { + // workingMemory.addEventListener( agendaListener ); + + //go ! +- //this.loadLines( workingMemory, ""waltz12.dat"" ); +- +- final Stage stage = new Stage( Stage.START ); +- workingMemory.assertObject( stage ); ++ this.loadLines( workingMemory, ++ ""waltz50.dat"" ); ++ ++ //final Stage stage = new Stage( Stage.START ); ++ //workingMemory.assertObject( stage ); ++ ++ Stage stage = new Stage(Stage.DUPLICATE); ++ workingMemory.assertObject( stage ); ++ + workingMemory.fireAllRules(); + } catch ( final Throwable t ) { + t.printStackTrace(); +@@ -100,7 +105,7 @@ private void loadLines(final WorkingMemory wm, + final Matcher m = pat.matcher( line ); + if ( m.matches() ) { + final Line l = new Line( Integer.parseInt( m.group( 1 ) ), +- Integer.parseInt( m.group( 2 ) ) ); ++ Integer.parseInt( m.group( 2 ) ) ); + wm.assertObject( l ); + } + line = reader.readLine();" +19152416a44473325a6c3605f9accc4fee379b63,elasticsearch,add an index level setting to disable/enable- purging of expired docs --,a,https://github.com/elastic/elasticsearch,"diff --git a/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java b/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java +index ae4a010e95c40..f000ba6987225 100644 +--- a/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java ++++ b/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java +@@ -31,6 +31,8 @@ + import org.elasticsearch.action.bulk.BulkResponse; + import org.elasticsearch.action.delete.DeleteRequest; + import org.elasticsearch.client.Client; ++import org.elasticsearch.cluster.ClusterService; ++import org.elasticsearch.cluster.metadata.IndexMetaData; + import org.elasticsearch.cluster.metadata.MetaData; + import org.elasticsearch.common.component.AbstractLifecycleComponent; + import org.elasticsearch.common.inject.Inject; +@@ -65,8 +67,13 @@ public class IndicesTTLService extends AbstractLifecycleComponent getShardsToPurge() { + List shardsToPurge = new ArrayList(); + for (IndexService indexService : indicesService) { ++ // check the value of disable_purge for this index ++ IndexMetaData indexMetaData = clusterService.state().metaData().index(indexService.index().name()); ++ boolean disablePurge = indexMetaData.settings().getAsBoolean(""index.ttl.disable_purge"", false); ++ if (disablePurge) { ++ continue; ++ } ++ + // should be optimized with the hasTTL flag + FieldMappers ttlFieldMappers = indexService.mapperService().name(TTLFieldMapper.NAME); + if (ttlFieldMappers == null) {" +ca849f196990eec942468efaef3719f829c265eb,orientdb,Improved management of distributed cluster nodes--,p,https://github.com/orientechnologies/orientdb,"diff --git a/client/src/main/java/com/orientechnologies/orient/client/remote/OServerAdmin.java b/client/src/main/java/com/orientechnologies/orient/client/remote/OServerAdmin.java +index 6ad17e70a96..80b8a0fa780 100644 +--- a/client/src/main/java/com/orientechnologies/orient/client/remote/OServerAdmin.java ++++ b/client/src/main/java/com/orientechnologies/orient/client/remote/OServerAdmin.java +@@ -117,7 +117,7 @@ public OServerAdmin deleteDatabase() throws IOException { + } + + public OServerAdmin shareDatabase(final String iDatabaseName, final String iDatabaseUserName, final String iDatabaseUserPassword, +- final String iRemoteName) throws IOException { ++ final String iRemoteName, final String iMode) throws IOException { + + try { + storage.writeCommand(OChannelDistributedProtocol.REQUEST_DISTRIBUTED_DB_SHARE_SENDER); +@@ -125,11 +125,13 @@ public OServerAdmin shareDatabase(final String iDatabaseName, final String iData + storage.getNetwork().writeString(iDatabaseUserName); + storage.getNetwork().writeString(iDatabaseUserPassword); + storage.getNetwork().writeString(iRemoteName); ++ storage.getNetwork().writeString(iMode); + storage.getNetwork().flush(); + + storage.getNetwork().readStatus(); + +- OLogManager.instance().debug(this, ""Database %s has been shared with the server %s."", iDatabaseName, iRemoteName); ++ OLogManager.instance().debug(this, ""Database '%s' has been shared in mode '%s' with the server '%s'"", iDatabaseName, iMode, ++ iRemoteName); + + } catch (Exception e) { + OLogManager.instance().exception(""Can't share the database: "" + iDatabaseName, e, OStorageException.class); +diff --git a/server/src/main/java/com/orientechnologies/orient/server/handler/OServerHandler.java b/server/src/main/java/com/orientechnologies/orient/server/handler/OServerHandler.java +index 6a1603e0075..e3e174cf388 100644 +--- a/server/src/main/java/com/orientechnologies/orient/server/handler/OServerHandler.java ++++ b/server/src/main/java/com/orientechnologies/orient/server/handler/OServerHandler.java +@@ -40,17 +40,20 @@ public interface OServerHandler extends OService { + /** + * Callback invoked before a client request is processed. + */ +- public void onBeforeClientRequest(OClientConnection iConnection, byte iRequestType); ++ public void onBeforeClientRequest(OClientConnection iConnection, Object iRequestType); + + /** + * Callback invoked after a client request is processed. + */ +- public void onAfterClientRequest(OClientConnection iConnection, byte iRequestType); ++ public void onAfterClientRequest(OClientConnection iConnection, Object iRequestType); + + /** + * Callback invoked when a client connection has errors. ++ * ++ * @param iThrowable ++ * Throwable instance received + */ +- public void onClientError(OClientConnection iConnection); ++ public void onClientError(OClientConnection iConnection, Throwable iThrowable); + + /** + * Configures the handler. Called at startup. +diff --git a/server/src/main/java/com/orientechnologies/orient/server/handler/OServerHandlerAbstract.java b/server/src/main/java/com/orientechnologies/orient/server/handler/OServerHandlerAbstract.java +index 39a5c4d457d..a146ac509fa 100644 +--- a/server/src/main/java/com/orientechnologies/orient/server/handler/OServerHandlerAbstract.java ++++ b/server/src/main/java/com/orientechnologies/orient/server/handler/OServerHandlerAbstract.java +@@ -25,12 +25,12 @@ public void onClientConnection(final OClientConnection iConnection) { + public void onClientDisconnection(final OClientConnection iConnection) { + } + +- public void onBeforeClientRequest(final OClientConnection iConnection, final byte iRequestType) { ++ public void onBeforeClientRequest(final OClientConnection iConnection, final Object iRequestType) { + } + +- public void onAfterClientRequest(final OClientConnection iConnection, final byte iRequestType) { ++ public void onAfterClientRequest(final OClientConnection iConnection, final Object iRequestType) { + } + +- public void onClientError(final OClientConnection iConnection) { ++ public void onClientError(final OClientConnection iConnection, final Throwable iThrowable) { + } + } +diff --git a/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerManager.java b/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerManager.java +index 483c76c6359..8794e058c3f 100644 +--- a/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerManager.java ++++ b/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerManager.java +@@ -19,6 +19,7 @@ + import java.net.InetAddress; + import java.util.ArrayList; + import java.util.HashMap; ++import java.util.LinkedHashMap; + import java.util.List; + + import javax.crypto.SecretKey; +@@ -26,12 +27,15 @@ + import com.orientechnologies.common.concur.resource.OSharedResourceExternal; + import com.orientechnologies.common.log.OLogManager; + import com.orientechnologies.orient.core.Orient; ++import com.orientechnologies.orient.core.db.ODatabase; ++import com.orientechnologies.orient.core.db.ODatabaseComplex; ++import com.orientechnologies.orient.core.db.ODatabaseLifecycleListener; + import com.orientechnologies.orient.core.exception.OConfigurationException; + import com.orientechnologies.orient.core.record.ORecordInternal; ++import com.orientechnologies.orient.core.record.impl.ODocument; + import com.orientechnologies.orient.core.security.OSecurityManager; + import com.orientechnologies.orient.core.serialization.OBase64Utils; + import com.orientechnologies.orient.core.tx.OTransactionEntry; +-import com.orientechnologies.orient.enterprise.channel.binary.OChannelBinaryProtocol; + import com.orientechnologies.orient.server.OClientConnection; + import com.orientechnologies.orient.server.OServer; + import com.orientechnologies.orient.server.config.OServerHandlerConfiguration; +@@ -57,19 +61,20 @@ + * @see ODistributedServerDiscoveryListener, ODistributedServerDiscoverySignaler + * + */ +-public class ODistributedServerManager extends OServerHandlerAbstract { ++public class ODistributedServerManager extends OServerHandlerAbstract { + protected OServer server; + + protected String name; ++ protected String id; + protected SecretKey securityKey; + protected String securityAlgorithm; + protected InetAddress networkMulticastAddress; + protected int networkMulticastPort; +- protected int networkMulticastHeartbeat; // IN MS +- protected int networkTimeoutLeader; // IN MS +- protected int networkTimeoutNode; // IN MS +- private int networkHeartbeatDelay; // IN MS +- protected int serverUpdateDelay; // IN MS ++ protected int networkMulticastHeartbeat; // IN ++ protected int networkTimeoutLeader; // IN ++ protected int networkTimeoutNode; // IN ++ private int networkHeartbeatDelay; // IN ++ protected int serverUpdateDelay; // IN + protected int serverOutSynchMaxBuffers; + + private ODistributedServerDiscoverySignaler discoverySignaler; +@@ -78,7 +83,7 @@ public class ODistributedServerManager extends OServerHandlerAbstract { + private ODistributedServerRecordHook trigger; + private final OSharedResourceExternal lock = new OSharedResourceExternal(); + +- private final HashMap nodes = new HashMap(); ; ++ private final HashMap nodes = new LinkedHashMap(); ; + + static final String CHECKSUM = ""ChEcKsUm1976""; + +@@ -88,8 +93,11 @@ public class ODistributedServerManager extends OServerHandlerAbstract { + private OServerNetworkListener distributedNetworkListener; + private ONetworkProtocolDistributed leaderConnection; + public long lastHeartBeat; ++ private ODocument clusterConfiguration; + + public void startup() { ++ trigger = new ODistributedServerRecordHook(this); ++ + // LAUNCH THE SIGNAL AND WAIT FOR A CONNECTION + discoverySignaler = new ODistributedServerDiscoverySignaler(this, distributedNetworkListener); + } +@@ -195,6 +203,9 @@ else if (leaderConnection != null) + // STOP THE CHECK OF HEART-BEAT + leaderCheckerTask.cancel(); + ++ if (clusterConfiguration == null) ++ clusterConfiguration = createDatabaseConfiguration(); ++ + // NO NODE HAS JOINED: BECAME THE LEADER AND LISTEN FOR OTHER NODES + discoveryListener = new ODistributedServerDiscoveryListener(this, distributedNetworkListener); + +@@ -203,21 +214,8 @@ else if (leaderConnection != null) + } + } + +- /** +- * Install the trigger to catch all the events on records +- */ +- @Override +- public void onAfterClientRequest(final OClientConnection iConnection, final byte iRequestType) { +- if (iRequestType == OChannelBinaryProtocol.REQUEST_DB_OPEN || iRequestType == OChannelBinaryProtocol.REQUEST_DB_CREATE) { +- trigger = new ODistributedServerRecordHook(this, iConnection); +- iConnection.database.registerHook(trigger); +- +- // TODO: SEND THE CLUSTER CONFIG TO THE CLIENT +- } +- } +- + @Override +- public void onClientError(final OClientConnection iConnection) { ++ public void onClientError(final OClientConnection iConnection, final Throwable iThrowable) { + // handleNodeFailure(node); + } + +@@ -295,6 +293,8 @@ else if (""server.outsynch.maxbuffers"".equalsIgnoreCase(param.name)) + ""Can't find a configured network listener with 'distributed' protocol. Can't start distributed node"", null, + OConfigurationException.class); + ++ id = distributedNetworkListener.getInboundAddr().getHostName() + "":"" + distributedNetworkListener.getInboundAddr().getPort(); ++ + } catch (Exception e) { + throw new OConfigurationException(""Can't configure OrientDB Server as Cluster Node"", e); + } +@@ -357,10 +357,8 @@ public String getName() { + /** + * Distributed the request to all the configured nodes. Each node has the responsibility to bring the message early (synch-mode) + * or using an asynchronous queue. +- * +- * @param iConnection + */ +- public void distributeRequest(final OClientConnection iConnection, final OTransactionEntry> iTransactionEntry) { ++ public void distributeRequest(final OTransactionEntry> iTransactionEntry) { + lock.acquireSharedLock(); + + try { +@@ -382,10 +380,6 @@ public int getNetworkHeartbeatDelay() { + return networkHeartbeatDelay; + } + +- private static String getNodeName(final String iServerAddress, final int iServerPort) { +- return iServerAddress + "":"" + iServerPort; +- } +- + public long getLastHeartBeat() { + return lastHeartBeat; + } +@@ -393,4 +387,25 @@ public long getLastHeartBeat() { + public void updateHeartBeatTime() { + this.lastHeartBeat = System.currentTimeMillis(); + } ++ ++ public ODocument getClusterConfiguration() { ++ return clusterConfiguration; ++ } ++ ++ public String getId() { ++ return id; ++ } ++ ++ private static String getNodeName(final String iServerAddress, final int iServerPort) { ++ return iServerAddress + "":"" + iServerPort; ++ } ++ ++ private ODocument createDatabaseConfiguration() { ++ clusterConfiguration = new ODocument(); ++ ++ clusterConfiguration.field(""servers"", new ODocument(getId(), new ODocument(""update-delay"", getServerUpdateDelay()))); ++ clusterConfiguration.field(""clusters"", new ODocument(""*"", new ODocument(""owner"", getId()))); ++ ++ return clusterConfiguration; ++ } + } +diff --git a/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerNode.java b/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerNode.java +index 3d82d444c6b..9d608990124 100644 +--- a/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerNode.java ++++ b/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerNode.java +@@ -22,12 +22,15 @@ + import java.util.Map; + + import com.orientechnologies.common.log.OLogManager; ++import com.orientechnologies.orient.core.command.OCommandOutputListener; + import com.orientechnologies.orient.core.config.OContextConfiguration; + import com.orientechnologies.orient.core.config.OGlobalConfiguration; ++import com.orientechnologies.orient.core.db.record.ODatabaseRecord; ++import com.orientechnologies.orient.core.db.tool.ODatabaseExport; + import com.orientechnologies.orient.core.record.ORecordInternal; +-import com.orientechnologies.orient.core.record.impl.ODocument; + import com.orientechnologies.orient.core.tx.OTransactionEntry; + import com.orientechnologies.orient.enterprise.channel.binary.OChannelBinaryClient; ++import com.orientechnologies.orient.enterprise.channel.binary.OChannelBinaryOutputStream; + import com.orientechnologies.orient.enterprise.channel.distributed.OChannelDistributedProtocol; + + /** +@@ -36,18 +39,19 @@ + * @author Luca Garulli (l.garulli--at--orientechnologies.com) + * + */ +-public class ODistributedServerNode { ++public class ODistributedServerNode implements OCommandOutputListener { + public enum STATUS { + DISCONNECTED, CONNECTING, CONNECTED, SYNCHRONIZING + } + ++ private String id; + public String networkAddress; + public int networkPort; + public Date joinedOn; + private ODistributedServerManager manager; + public OChannelBinaryClient channel; + private OContextConfiguration configuration; +- private STATUS status = STATUS.DISCONNECTED; ++ private volatile STATUS status = STATUS.DISCONNECTED; + private Map storages = new HashMap(); + private List>> bufferedChanges = new ArrayList>>(); + +@@ -57,6 +61,7 @@ public ODistributedServerNode(final ODistributedServerManager iNode, final Strin + networkPort = iServerPort; + joinedOn = new Date(); + configuration = new OContextConfiguration(); ++ id = networkAddress + "":"" + networkPort; + status = STATUS.CONNECTING; + } + +@@ -83,9 +88,13 @@ public void sendRequest(final OTransactionEntry> iRequest) th + // BUFFER EXCEEDS THE CONFIGURED LIMIT: REMOVE MYSELF AS NODE + manager.removeNode(this); + bufferedChanges.clear(); +- } else ++ } else { + // BUFFERIZE THE REQUEST + bufferedChanges.add(iRequest); ++ ++ OLogManager.instance().info(this, ""Server node '%s' is temporary disconnected, buffering change %d/%d for the record %s"", ++ id, bufferedChanges.size(), manager.serverOutSynchMaxBuffers, iRequest.getRecord().getIdentity()); ++ } + } + } else { + final ORecordInternal record = iRequest.getRecord(); +@@ -93,42 +102,58 @@ public void sendRequest(final OTransactionEntry> iRequest) th + try { + switch (iRequest.status) { + case OTransactionEntry.CREATED: +- channel.writeByte(OChannelDistributedProtocol.REQUEST_RECORD_CREATE); +- channel.writeInt(0); +- channel.writeShort((short) record.getIdentity().getClusterId()); +- channel.writeBytes(record.toStream()); +- channel.writeByte(record.getRecordType()); +- channel.flush(); +- +- channel.readStatus(); ++ channel.acquireExclusiveLock(); ++ try { ++ channel.writeByte(OChannelDistributedProtocol.REQUEST_RECORD_CREATE); ++ channel.writeInt(0); ++ channel.writeShort((short) record.getIdentity().getClusterId()); ++ channel.writeBytes(record.toStream()); ++ channel.writeByte(record.getRecordType()); ++ channel.flush(); ++ ++ channel.readStatus(); ++ ++ } finally { ++ channel.releaseExclusiveLock(); ++ } + break; + + case OTransactionEntry.UPDATED: +- channel.writeByte(OChannelDistributedProtocol.REQUEST_RECORD_UPDATE); +- channel.writeInt(0); +- channel.writeShort((short) record.getIdentity().getClusterId()); +- channel.writeLong(record.getIdentity().getClusterPosition()); +- channel.writeBytes(record.toStream()); +- channel.writeInt(record.getVersion()); +- channel.writeByte(record.getRecordType()); +- channel.flush(); +- +- readStatus(); +- +- channel.readInt(); ++ channel.acquireExclusiveLock(); ++ try { ++ channel.writeByte(OChannelDistributedProtocol.REQUEST_RECORD_UPDATE); ++ channel.writeInt(0); ++ channel.writeShort((short) record.getIdentity().getClusterId()); ++ channel.writeLong(record.getIdentity().getClusterPosition()); ++ channel.writeBytes(record.toStream()); ++ channel.writeInt(record.getVersion()); ++ channel.writeByte(record.getRecordType()); ++ channel.flush(); ++ ++ readStatus(); ++ ++ channel.readInt(); ++ } finally { ++ channel.releaseExclusiveLock(); ++ } + break; + + case OTransactionEntry.DELETED: +- channel.writeByte(OChannelDistributedProtocol.REQUEST_RECORD_DELETE); +- channel.writeInt(0); +- channel.writeShort((short) record.getIdentity().getClusterId()); +- channel.writeLong(record.getIdentity().getClusterPosition()); +- channel.writeInt(record.getVersion()); +- channel.flush(); +- +- readStatus(); +- +- channel.readLong(); ++ channel.acquireExclusiveLock(); ++ try { ++ channel.writeByte(OChannelDistributedProtocol.REQUEST_RECORD_DELETE); ++ channel.writeInt(0); ++ channel.writeShort((short) record.getIdentity().getClusterId()); ++ channel.writeLong(record.getIdentity().getClusterPosition()); ++ channel.writeInt(record.getVersion()); ++ channel.flush(); ++ ++ readStatus(); ++ ++ channel.readLong(); ++ } finally { ++ channel.releaseExclusiveLock(); ++ } + break; + } + } catch (RuntimeException e) { +@@ -175,15 +200,13 @@ public void setAsTemporaryDisconnected(final int iServerOutSynchMaxBuffers) { + } + + public void startSynchronization() { +- final ODocument config = createDatabaseConfiguration(); +- + // SEND THE LAST CONFIGURATION TO THE NODE + channel.acquireExclusiveLock(); + + try { + channel.out.writeByte(OChannelDistributedProtocol.REQUEST_DISTRIBUTED_DB_CONFIG); + channel.out.writeInt(0); +- channel.writeBytes(config.toStream()); ++ channel.writeBytes(manager.getClusterConfiguration().toStream()); + channel.flush(); + + readStatus(); +@@ -202,9 +225,51 @@ public void startSynchronization() { + + @Override + public String toString() { +- final StringBuilder builder = new StringBuilder(); +- builder.append(networkAddress).append("":"").append(networkPort); +- return builder.toString(); ++ return id; ++ } ++ ++ public STATUS getStatus() { ++ return status; ++ } ++ ++ public void shareDatabase(final ODatabaseRecord iDatabase, final String iRemoteServerName, final String iEngineName, ++ final String iMode) throws IOException { ++ if (status == STATUS.DISCONNECTED) ++ throw new ODistributedSynchronizationException(""Can't share database '"" + iDatabase.getName() + ""' on remote server node '"" ++ + iRemoteServerName + ""' because is disconnected""); ++ ++ channel.acquireExclusiveLock(); ++ ++ try { ++ status = STATUS.SYNCHRONIZING; ++ ++ OLogManager.instance().info(this, ++ ""Sharing database '"" + iDatabase.getName() + ""' to remote server "" + iRemoteServerName + ""...""); ++ ++ // EXECUTE THE REQUEST ON REMOTE SERVER NODE ++ channel.writeByte(OChannelDistributedProtocol.REQUEST_DISTRIBUTED_DB_SHARE_RECEIVER); ++ channel.writeInt(0); ++ channel.writeString(iDatabase.getName()); ++ channel.writeString(iEngineName); ++ ++ OLogManager.instance().info(this, ""Exporting database '%s' via streaming to remote server node: %s..."", iDatabase.getName(), ++ iRemoteServerName); ++ ++ // START THE EXPORT GIVING AS OUTPUTSTREAM THE CHANNEL TO STREAM THE EXPORT ++ new ODatabaseExport(iDatabase, new OChannelBinaryOutputStream(channel), this).exportDatabase(); ++ ++ OLogManager.instance().info(this, ""Database exported correctly""); ++ ++ channel.readStatus(); ++ ++ status = STATUS.CONNECTED; ++ ++ } finally { ++ channel.releaseExclusiveLock(); ++ } ++ } ++ ++ public void onMessage(String iText) { + } + + private void synchronizeDelta() throws IOException { +@@ -212,8 +277,8 @@ private void synchronizeDelta() throws IOException { + if (bufferedChanges.isEmpty()) + return; + +- OLogManager.instance().info(this, ""Started realignment of remote node %s:%d after a reconnection. Found %d updates"", +- networkAddress, networkPort, bufferedChanges.size()); ++ OLogManager.instance().info(this, ""Started realignment of remote node '%s' after a reconnection. Found %d updates"", id, ++ bufferedChanges.size()); + + status = STATUS.SYNCHRONIZING; + +@@ -222,6 +287,8 @@ private void synchronizeDelta() throws IOException { + } + bufferedChanges.clear(); + ++ OLogManager.instance().info(this, ""Realignment of remote node '%s' done"", id); ++ + status = STATUS.CONNECTED; + } + +@@ -231,17 +298,4 @@ private void synchronizeDelta() throws IOException { + private int readStatus() throws IOException { + return channel.readStatus(); + } +- +- private ODocument createDatabaseConfiguration() { +- final ODocument config = new ODocument(); +- +- config.field(""servers"", new ODocument(manager.getName(), new ODocument(""update-delay"", manager.getServerUpdateDelay()))); +- config.field(""clusters"", new ODocument(""*"", new ODocument(""owner"", manager.getName()))); +- +- return config; +- } +- +- public STATUS getStatus() { +- return status; +- } + } +diff --git a/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerNodeChecker.java b/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerNodeChecker.java +index efdbf3cf902..c669a9f1338 100644 +--- a/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerNodeChecker.java ++++ b/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerNodeChecker.java +@@ -45,7 +45,7 @@ public void run() { + + // CHECK EVERY SINGLE NODE + for (ODistributedServerNode node : nodeList) { +- if (node.getStatus() != STATUS.DISCONNECTED) ++ if (node.getStatus() == STATUS.CONNECTED) + if (!node.sendHeartBeat(manager.networkTimeoutLeader)) { + manager.handleNodeFailure(node); + } +diff --git a/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerRecordHook.java b/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerRecordHook.java +index 37bc038dd3c..8ee04caf27a 100644 +--- a/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerRecordHook.java ++++ b/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerRecordHook.java +@@ -15,52 +15,75 @@ + */ + package com.orientechnologies.orient.server.handler.distributed; + ++import com.orientechnologies.common.log.OLogManager; ++import com.orientechnologies.orient.core.Orient; ++import com.orientechnologies.orient.core.db.ODatabase; ++import com.orientechnologies.orient.core.db.ODatabaseComplex; ++import com.orientechnologies.orient.core.db.ODatabaseLifecycleListener; + import com.orientechnologies.orient.core.hook.ORecordHook; + import com.orientechnologies.orient.core.record.ORecord; + import com.orientechnologies.orient.core.record.ORecordInternal; + import com.orientechnologies.orient.core.tx.OTransactionEntry; +-import com.orientechnologies.orient.server.OClientConnection; + + /** + * Record hook implementation. Catches all the relevant events and propagates to the cluster's slave nodes. + * + * @author Luca Garulli (l.garulli--at--orientechnologies.com) + */ +-public class ODistributedServerRecordHook implements ORecordHook { ++public class ODistributedServerRecordHook implements ORecordHook, ODatabaseLifecycleListener { + + private ODistributedServerManager manager; +- private OClientConnection connection; + +- public ODistributedServerRecordHook(final ODistributedServerManager iDistributedServerManager, final OClientConnection iConnection) { ++ /** ++ * Auto install itself as lifecycle listener for databases. ++ */ ++ public ODistributedServerRecordHook(final ODistributedServerManager iDistributedServerManager) { + manager = iDistributedServerManager; +- connection = iConnection; ++ Orient.instance().addDbLifecycleListener(this); + } + + public void onTrigger(final TYPE iType, final ORecord iRecord) { + if (!manager.isDistributedConfiguration()) + return; + ++ OLogManager.instance().info( ++ this, ++ ""Caught change "" + iType + "" in database '"" + iRecord.getDatabase().getName() + ""', record: "" + iRecord.getIdentity() ++ + "". Distribute the change in all the cluster nodes""); ++ + switch (iType) { + case AFTER_CREATE: +- manager.distributeRequest(connection, new OTransactionEntry>((ORecordInternal) iRecord, +- OTransactionEntry.CREATED, null)); ++ manager.distributeRequest(new OTransactionEntry>((ORecordInternal) iRecord, OTransactionEntry.CREATED, ++ null)); + break; + + case AFTER_UPDATE: +- manager.distributeRequest(connection, new OTransactionEntry>((ORecordInternal) iRecord, +- OTransactionEntry.UPDATED, null)); ++ manager.distributeRequest(new OTransactionEntry>((ORecordInternal) iRecord, OTransactionEntry.UPDATED, ++ null)); + break; + + case AFTER_DELETE: +- manager.distributeRequest(connection, new OTransactionEntry>((ORecordInternal) iRecord, +- OTransactionEntry.DELETED, null)); ++ manager.distributeRequest(new OTransactionEntry>((ORecordInternal) iRecord, OTransactionEntry.DELETED, ++ null)); + break; + + default: + // NOT DISTRIBUTED REQUEST, JUST RETURN + return; + } ++ } ++ ++ /** ++ * Install the itself as trigger to catch all the events against records ++ */ ++ public void onOpen(final ODatabase iDatabase) { ++ ((ODatabaseComplex) iDatabase).registerHook(this); ++ } + +- System.out.println(""\nCatched update to database: "" + iType + "" record: "" + iRecord); ++ /** ++ * Remove itself as trigger to catch all the events against records ++ */ ++ public void onClose(final ODatabase iDatabase) { ++ ((ODatabaseComplex) iDatabase).unregisterHook(this); + } + } +diff --git a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java +index fe7f6dbedaa..33fb84a9357 100644 +--- a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java ++++ b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java +@@ -21,7 +21,6 @@ + import java.net.SocketException; + import java.util.Collection; + import java.util.HashSet; +-import java.util.List; + import java.util.Map; + import java.util.Set; + +@@ -70,7 +69,7 @@ + import com.orientechnologies.orient.server.OServer; + import com.orientechnologies.orient.server.OServerMain; + import com.orientechnologies.orient.server.config.OServerUserConfiguration; +-import com.orientechnologies.orient.server.handler.OServerHandler; ++import com.orientechnologies.orient.server.handler.OServerHandlerHelper; + import com.orientechnologies.orient.server.network.protocol.ONetworkProtocol; + import com.orientechnologies.orient.server.tx.OTransactionOptimisticProxy; + import com.orientechnologies.orient.server.tx.OTransactionRecordProxy; +@@ -120,20 +119,24 @@ protected void execute() throws Exception { + + data.lastCommandReceived = System.currentTimeMillis(); + +- invokeHandlerCallbackOnBeforeClientRequest((byte) requestType); ++ OServerHandlerHelper.invokeHandlerCallbackOnBeforeClientRequest(connection, (byte) requestType); + + parseCommand(); + +- invokeHandlerCallbackOnAfterClientRequest((byte) requestType); ++ OServerHandlerHelper.invokeHandlerCallbackOnAfterClientRequest(connection, (byte) requestType); + + } catch (EOFException eof) { ++ OServerHandlerHelper.invokeHandlerCallbackOnClientError(connection, eof); + sendShutdown(); + } catch (SocketException e) { ++ OServerHandlerHelper.invokeHandlerCallbackOnClientError(connection, e); + sendShutdown(); + } catch (OException e) { ++ OServerHandlerHelper.invokeHandlerCallbackOnClientError(connection, e); + channel.clearInput(); + sendError(clientTxId, e); + } catch (Throwable t) { ++ OServerHandlerHelper.invokeHandlerCallbackOnClientError(connection, t); + OLogManager.instance().error(this, ""Error on executing request"", t); + channel.clearInput(); + sendError(clientTxId, t); +@@ -679,7 +682,7 @@ else if (iLinked instanceof Map) + + @Override + public void startup() { +- invokeHandlerCallbackOnClientDisconnection(); ++ OServerHandlerHelper.invokeHandlerCallbackOnClientConnection(connection); + } + + @Override +@@ -687,7 +690,7 @@ public void shutdown() { + sendShutdown(); + channel.close(); + +- invokeHandlerCallbackOnClientDisconnection(); ++ OServerHandlerHelper.invokeHandlerCallbackOnClientDisconnection(connection); + + OClientConnectionManager.instance().onClientDisconnection(connection.id); + } +@@ -780,46 +783,6 @@ private void writeRecord(final ORecordInternal iRecord) throws IOException { + } + } + +- private void invokeHandlerCallbackOnClientConnection() { +- final List handlers = OServerMain.server().getHandlers(); +- if (handlers != null) +- for (OServerHandler handler : handlers) { +- handler.onClientConnection(connection); +- } +- } +- +- private void invokeHandlerCallbackOnClientDisconnection() { +- final List handlers = OServerMain.server().getHandlers(); +- if (handlers != null) +- for (OServerHandler handler : handlers) { +- handler.onClientDisconnection(connection); +- } +- } +- +- private void invokeHandlerCallbackOnBeforeClientRequest(final byte iRequestType) { +- final List handlers = OServerMain.server().getHandlers(); +- if (handlers != null) +- for (OServerHandler handler : handlers) { +- handler.onBeforeClientRequest(connection, iRequestType); +- } +- } +- +- private void invokeHandlerCallbackOnAfterClientRequest(final byte iRequestType) { +- final List handlers = OServerMain.server().getHandlers(); +- if (handlers != null) +- for (OServerHandler handler : handlers) { +- handler.onAfterClientRequest(connection, iRequestType); +- } +- } +- +- private void invokeHandlerCallbackOnClientError() { +- final List handlers = OServerMain.server().getHandlers(); +- if (handlers != null) +- for (OServerHandler handler : handlers) { +- handler.onClientError(connection); +- } +- } +- + protected ODatabaseDocumentTx openDatabase(final String dbName, final String iUser, final String iPassword) { + // SEARCH THE DB IN MEMORY FIRST + ODatabaseDocumentTx db = (ODatabaseDocumentTx) OServerMain.server().getMemoryDatabases().get(dbName); +@@ -853,8 +816,6 @@ protected void createDatabase(final ODatabaseDocumentTx iDatabase) { + } + + underlyingDatabase = ((ODatabaseRaw) ((ODatabaseComplex) iDatabase.getUnderlying()).getUnderlying()); +- +- invokeHandlerCallbackOnClientConnection(); + } + + protected ODatabaseDocumentTx getDatabaseInstance(final String iDbName, final String iStorageMode) { +diff --git a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/distributed/ONetworkProtocolDistributed.java b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/distributed/ONetworkProtocolDistributed.java +index bf8cd470dd1..6bbaa7bfd48 100644 +--- a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/distributed/ONetworkProtocolDistributed.java ++++ b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/distributed/ONetworkProtocolDistributed.java +@@ -20,20 +20,16 @@ + import com.orientechnologies.common.log.OLogManager; + import com.orientechnologies.orient.core.command.OCommandOutputListener; + import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; +-import com.orientechnologies.orient.core.db.tool.ODatabaseExport; + import com.orientechnologies.orient.core.db.tool.ODatabaseImport; + import com.orientechnologies.orient.core.exception.OConfigurationException; + import com.orientechnologies.orient.core.metadata.security.OUser; + import com.orientechnologies.orient.core.record.impl.ODocument; + import com.orientechnologies.orient.core.storage.impl.local.OStorageLocal; + import com.orientechnologies.orient.enterprise.channel.binary.OChannelBinaryInputStream; +-import com.orientechnologies.orient.enterprise.channel.binary.OChannelBinaryOutputStream; + import com.orientechnologies.orient.enterprise.channel.distributed.OChannelDistributedProtocol; + import com.orientechnologies.orient.server.OServerMain; + import com.orientechnologies.orient.server.handler.distributed.ODistributedServerManager; + import com.orientechnologies.orient.server.handler.distributed.ODistributedServerNode; +-import com.orientechnologies.orient.server.handler.distributed.ODistributedServerNode.STATUS; +-import com.orientechnologies.orient.server.handler.distributed.ODistributedSynchronizationException; + import com.orientechnologies.orient.server.network.protocol.binary.ONetworkProtocolBinary; + + /** +@@ -88,6 +84,7 @@ protected void parseCommand() throws IOException { + final String dbUser = channel.readString(); + final String dbPassword = channel.readString(); + final String remoteServerName = channel.readString(); ++ final String mode = channel.readString(); + + checkServerAccess(""database.share""); + +@@ -96,34 +93,8 @@ protected void parseCommand() throws IOException { + final String engineName = db.getStorage() instanceof OStorageLocal ? ""local"" : ""memory""; + + final ODistributedServerNode remoteServerNode = manager.getNode(remoteServerName); +- if (remoteServerNode.getStatus() == STATUS.DISCONNECTED) +- throw new ODistributedSynchronizationException(""Can't share database '"" + dbName + ""' on remote server node '"" +- + remoteServerName + ""' because is disconnected""); + +- try { +- remoteServerNode.channel.acquireExclusiveLock(); +- +- OLogManager.instance().info(this, ""Sharing database '"" + dbName + ""' to remote server "" + remoteServerName + ""...""); +- +- // EXECUTE THE REQUEST ON REMOTE SERVER NODE +- remoteServerNode.channel.writeByte(OChannelDistributedProtocol.REQUEST_DISTRIBUTED_DB_SHARE_RECEIVER); +- remoteServerNode.channel.writeInt(0); +- remoteServerNode.channel.writeString(dbName); +- remoteServerNode.channel.writeString(engineName); +- +- OLogManager.instance().info(this, ""Exporting database '%s' via streaming to remote server node: %s..."", dbName, +- remoteServerName); +- +- // START THE EXPORT GIVING AS OUTPUTSTREAM THE CHANNEL TO STREAM THE EXPORT +- new ODatabaseExport(db, new OChannelBinaryOutputStream(remoteServerNode.channel), this).exportDatabase(); +- +- OLogManager.instance().info(this, ""Database exported correctly""); +- +- remoteServerNode.channel.readStatus(); +- +- } finally { +- remoteServerNode.channel.releaseExclusiveLock(); +- } ++ remoteServerNode.shareDatabase(db, remoteServerName, engineName, mode); + + sendOk(0); + +diff --git a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/http/ONetworkProtocolHttpAbstract.java b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/http/ONetworkProtocolHttpAbstract.java +index 2eb8225aaba..f2d4a6a2e9c 100644 +--- a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/http/ONetworkProtocolHttpAbstract.java ++++ b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/http/ONetworkProtocolHttpAbstract.java +@@ -44,6 +44,7 @@ + import com.orientechnologies.orient.server.OClientConnectionManager; + import com.orientechnologies.orient.server.OServer; + import com.orientechnologies.orient.server.config.OServerConfiguration; ++import com.orientechnologies.orient.server.handler.OServerHandlerHelper; + import com.orientechnologies.orient.server.network.protocol.ONetworkProtocol; + import com.orientechnologies.orient.server.network.protocol.http.command.OServerCommand; + +@@ -120,10 +121,15 @@ public void service() throws ONetworkProtocolException, IOException { + + if (cmd != null) + try { ++ OServerHandlerHelper.invokeHandlerCallbackOnBeforeClientRequest(connection, cmd); ++ + if (cmd.beforeExecute(request)) { + // EXECUTE THE COMMAND + cmd.execute(request); + } ++ ++ OServerHandlerHelper.invokeHandlerCallbackOnAfterClientRequest(connection, cmd); ++ + } catch (Exception e) { + handleError(e); + }" +4f4829ba44ea261c80e6d7971be664c157b48e9b,Valadoc,"Embedded: Search images relative to the file +",a,https://github.com/GNOME/vala/,"diff --git a/src/libvaladoc/content/blockcontent.vala b/src/libvaladoc/content/blockcontent.vala +index e466177978..d3d8a984ab 100755 +--- a/src/libvaladoc/content/blockcontent.vala ++++ b/src/libvaladoc/content/blockcontent.vala +@@ -1,6 +1,7 @@ + /* blockcontent.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -34,9 +35,9 @@ public abstract class Valadoc.Content.BlockContent : ContentElement { + public override void configure (Settings settings, ResourceLocator locator) { + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + foreach (Block element in _content) { +- element.check (api_root, container, reporter, settings); ++ element.check (api_root, container, file_path, reporter, settings); + } + } + +diff --git a/src/libvaladoc/content/comment.vala b/src/libvaladoc/content/comment.vala +index 6978b8f117..0c6f4496fe 100755 +--- a/src/libvaladoc/content/comment.vala ++++ b/src/libvaladoc/content/comment.vala +@@ -1,6 +1,7 @@ + /* comment.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -36,11 +37,11 @@ public class Valadoc.Content.Comment : BlockContent { + public override void configure (Settings settings, ResourceLocator locator) { + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { +- base.check (api_root, container, reporter, settings); ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { ++ base.check (api_root, container, file_path, reporter, settings); + + foreach (Taglet element in _taglets) { +- element.check (api_root, container, reporter, settings); ++ element.check (api_root, container, file_path, reporter, settings); + } + } + +diff --git a/src/libvaladoc/content/contentelement.vala b/src/libvaladoc/content/contentelement.vala +index 427cf66071..be15e5bbdd 100755 +--- a/src/libvaladoc/content/contentelement.vala ++++ b/src/libvaladoc/content/contentelement.vala +@@ -1,6 +1,7 @@ + /* contentelement.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -28,7 +29,7 @@ public abstract class Valadoc.Content.ContentElement : Object { + public virtual void configure (Settings settings, ResourceLocator locator) { + } + +- public abstract void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings); ++ public abstract void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings); + + public abstract void accept (ContentVisitor visitor); + +diff --git a/src/libvaladoc/content/contentfactory.vala b/src/libvaladoc/content/contentfactory.vala +index 19cf41be21..dda8a87636 100755 +--- a/src/libvaladoc/content/contentfactory.vala ++++ b/src/libvaladoc/content/contentfactory.vala +@@ -1,6 +1,7 @@ + /* contentfactory.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +diff --git a/src/libvaladoc/content/contentvisitor.vala b/src/libvaladoc/content/contentvisitor.vala +index 3eb3a4b3d4..d1b14c4538 100755 +--- a/src/libvaladoc/content/contentvisitor.vala ++++ b/src/libvaladoc/content/contentvisitor.vala +@@ -1,6 +1,7 @@ + /* contentvisitor.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +diff --git a/src/libvaladoc/content/embedded.vala b/src/libvaladoc/content/embedded.vala +index f158800aae..b8f4222589 100755 +--- a/src/libvaladoc/content/embedded.vala ++++ b/src/libvaladoc/content/embedded.vala +@@ -1,6 +1,7 @@ + /* embedded.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -42,7 +43,18 @@ public class Valadoc.Content.Embedded : ContentElement, Inline, StyleAttributes + _locator = locator; + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { ++ // search relative to our file ++ if (!Path.is_absolute (url)) { ++ string relative_to_file = Path.build_path (Path.DIR_SEPARATOR_S, Path.get_dirname (file_path), url); ++ if (FileUtils.test (relative_to_file, FileTest.EXISTS | FileTest.IS_REGULAR)) { ++ url = (owned) relative_to_file; ++ package = container.package; ++ return ; ++ } ++ } ++ ++ // search relative to the current directory / absoulte path + if (!FileUtils.test (url, FileTest.EXISTS | FileTest.IS_REGULAR)) { + reporter.simple_error (""%s does not exist"", url); + } else { +diff --git a/src/libvaladoc/content/headline.vala b/src/libvaladoc/content/headline.vala +index c7fcb29ce9..36b19c23b0 100755 +--- a/src/libvaladoc/content/headline.vala ++++ b/src/libvaladoc/content/headline.vala +@@ -1,6 +1,7 @@ + /* headline.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -31,12 +32,12 @@ public class Valadoc.Content.Headline : Block, InlineContent { + _level = 0; + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + // TODO report error if level == 0 ? + // TODO: content.size == 0? + + // Check inline content +- base.check (api_root, container, reporter, settings); ++ base.check (api_root, container, file_path, reporter, settings); + } + + public override void accept (ContentVisitor visitor) { +diff --git a/src/libvaladoc/content/inlinecontent.vala b/src/libvaladoc/content/inlinecontent.vala +index 389c66b96b..119f0569d8 100755 +--- a/src/libvaladoc/content/inlinecontent.vala ++++ b/src/libvaladoc/content/inlinecontent.vala +@@ -1,6 +1,7 @@ + /* inlinecontent.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -35,9 +36,9 @@ public abstract class Valadoc.Content.InlineContent : ContentElement { + internal InlineContent () { + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + foreach (Inline element in _content) { +- element.check (api_root, container, reporter, settings); ++ element.check (api_root, container, file_path, reporter, settings); + } + } + +diff --git a/src/libvaladoc/content/inlinetaglet.vala b/src/libvaladoc/content/inlinetaglet.vala +index 813bcf3390..151395095f 100755 +--- a/src/libvaladoc/content/inlinetaglet.vala ++++ b/src/libvaladoc/content/inlinetaglet.vala +@@ -1,6 +1,7 @@ + /* taglet.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -48,9 +49,9 @@ public abstract class Valadoc.Content.InlineTaglet : ContentElement, Taglet, Inl + this.locator = locator; + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + ContentElement element = get_content (); +- element.check (api_root, container, reporter, settings); ++ element.check (api_root, container, file_path, reporter, settings); + } + + public override void accept (ContentVisitor visitor) { +diff --git a/src/libvaladoc/content/link.vala b/src/libvaladoc/content/link.vala +index e114e96947..e2648981ad 100755 +--- a/src/libvaladoc/content/link.vala ++++ b/src/libvaladoc/content/link.vala +@@ -1,6 +1,7 @@ + /* link.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -33,7 +34,7 @@ public class Valadoc.Content.Link : InlineContent, Inline { + public override void configure (Settings settings, ResourceLocator locator) { + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + //TODO: check url + } + +diff --git a/src/libvaladoc/content/list.vala b/src/libvaladoc/content/list.vala +index c7bae59a85..e55489fe1e 100755 +--- a/src/libvaladoc/content/list.vala ++++ b/src/libvaladoc/content/list.vala +@@ -1,6 +1,7 @@ + /* list.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -108,10 +109,10 @@ public class Valadoc.Content.List : ContentElement, Block { + _items = new ArrayList (); + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + // Check individual list items + foreach (ListItem element in _items) { +- element.check (api_root, container, reporter, settings); ++ element.check (api_root, container, file_path, reporter, settings); + } + } + +diff --git a/src/libvaladoc/content/listitem.vala b/src/libvaladoc/content/listitem.vala +index f6907964e8..ef5f589f7c 100755 +--- a/src/libvaladoc/content/listitem.vala ++++ b/src/libvaladoc/content/listitem.vala +@@ -1,6 +1,7 @@ + /* listitem.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -30,12 +31,12 @@ public class Valadoc.Content.ListItem : InlineContent { + base (); + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + // Check inline content +- base.check (api_root, container, reporter, settings); ++ base.check (api_root, container, file_path, reporter, settings); + + if (sub_list != null) { +- sub_list.check (api_root, container, reporter, settings); ++ sub_list.check (api_root, container, file_path, reporter, settings); + } + } + +diff --git a/src/libvaladoc/content/note.vala b/src/libvaladoc/content/note.vala +index d2b16c557c..40bb930198 100755 +--- a/src/libvaladoc/content/note.vala ++++ b/src/libvaladoc/content/note.vala +@@ -1,6 +1,7 @@ + /* note.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -28,9 +29,9 @@ public class Valadoc.Content.Note : BlockContent, Block { + base (); + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + // Check inline content +- base.check (api_root, container, reporter, settings); ++ base.check (api_root, container, file_path, reporter, settings); + } + + public override void accept (ContentVisitor visitor) { +diff --git a/src/libvaladoc/content/paragraph.vala b/src/libvaladoc/content/paragraph.vala +index 8f89b48b37..4998952d40 100755 +--- a/src/libvaladoc/content/paragraph.vala ++++ b/src/libvaladoc/content/paragraph.vala +@@ -1,6 +1,7 @@ + /* paragraph.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -32,9 +33,9 @@ public class Valadoc.Content.Paragraph : InlineContent, Block, StyleAttributes { + base (); + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + // Check inline content +- base.check (api_root, container, reporter, settings); ++ base.check (api_root, container, file_path, reporter, settings); + } + + public override void accept (ContentVisitor visitor) { +diff --git a/src/libvaladoc/content/run.vala b/src/libvaladoc/content/run.vala +index 519b424fe2..146d9c29e2 100755 +--- a/src/libvaladoc/content/run.vala ++++ b/src/libvaladoc/content/run.vala +@@ -1,6 +1,7 @@ + /* run.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -117,9 +118,9 @@ public class Valadoc.Content.Run : InlineContent, Inline { + _style = style; + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + // Check inline content +- base.check (api_root, container, reporter, settings); ++ base.check (api_root, container, file_path, reporter, settings); + } + + public override void accept (ContentVisitor visitor) { +diff --git a/src/libvaladoc/content/sourcecode.vala b/src/libvaladoc/content/sourcecode.vala +index fb2bd625e9..338d23b24f 100755 +--- a/src/libvaladoc/content/sourcecode.vala ++++ b/src/libvaladoc/content/sourcecode.vala +@@ -1,6 +1,7 @@ + /* sourcecode.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -64,7 +65,7 @@ public class Valadoc.Content.SourceCode : ContentElement, Inline{ + _language = Language.VALA; + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + } + + public override void accept (ContentVisitor visitor) { +diff --git a/src/libvaladoc/content/symbollink.vala b/src/libvaladoc/content/symbollink.vala +index ef52b30101..aff9476e26 100755 +--- a/src/libvaladoc/content/symbollink.vala ++++ b/src/libvaladoc/content/symbollink.vala +@@ -1,6 +1,7 @@ + /* symbollink.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -36,7 +37,7 @@ public class Valadoc.Content.SymbolLink : ContentElement, Inline { + public override void configure (Settings settings, ResourceLocator locator) { + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + } + + public override void accept (ContentVisitor visitor) { +diff --git a/src/libvaladoc/content/table.vala b/src/libvaladoc/content/table.vala +index dc232ae208..17fa589e8b 100755 +--- a/src/libvaladoc/content/table.vala ++++ b/src/libvaladoc/content/table.vala +@@ -1,6 +1,7 @@ + /* table.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -33,12 +34,12 @@ public class Valadoc.Content.Table : ContentElement, Block { + _rows = new ArrayList (); + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + // Check the table consistency in term of row/column number + + // Check individual rows + foreach (var row in _rows) { +- row.check (api_root, container, reporter, settings); ++ row.check (api_root, container, file_path, reporter, settings); + } + } + +diff --git a/src/libvaladoc/content/tablecell.vala b/src/libvaladoc/content/tablecell.vala +index 01e732f090..733aad1ca6 100755 +--- a/src/libvaladoc/content/tablecell.vala ++++ b/src/libvaladoc/content/tablecell.vala +@@ -1,6 +1,7 @@ + /* tablecell.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -36,9 +37,9 @@ public class Valadoc.Content.TableCell : InlineContent, StyleAttributes { + _rowspan = 1; + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + // Check inline content +- base.check (api_root, container, reporter, settings); ++ base.check (api_root, container, file_path, reporter, settings); + } + + public override void accept (ContentVisitor visitor) { +diff --git a/src/libvaladoc/content/tablerow.vala b/src/libvaladoc/content/tablerow.vala +index 943c95f646..5cd59fd4bb 100755 +--- a/src/libvaladoc/content/tablerow.vala ++++ b/src/libvaladoc/content/tablerow.vala +@@ -1,6 +1,7 @@ + /* tablerow.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -33,10 +34,10 @@ public class Valadoc.Content.TableRow : ContentElement { + _cells = new ArrayList (); + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + // Check individual cells + foreach (var cell in _cells) { +- cell.check (api_root, container, reporter, settings); ++ cell.check (api_root, container, file_path, reporter, settings); + } + } + +diff --git a/src/libvaladoc/content/text.vala b/src/libvaladoc/content/text.vala +index 2b147235d5..6d3877e87e 100755 +--- a/src/libvaladoc/content/text.vala ++++ b/src/libvaladoc/content/text.vala +@@ -1,6 +1,7 @@ + /* text.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -36,7 +37,7 @@ public class Valadoc.Content.Text : ContentElement, Inline { + } + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + } + + public override void accept (ContentVisitor visitor) { +diff --git a/src/libvaladoc/content/warning.vala b/src/libvaladoc/content/warning.vala +index e848c43f30..3ab9b3c736 100755 +--- a/src/libvaladoc/content/warning.vala ++++ b/src/libvaladoc/content/warning.vala +@@ -1,6 +1,7 @@ + /* warning.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -28,9 +29,9 @@ public class Valadoc.Content.Warning : BlockContent, Block { + base (); + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + // Check inline content +- base.check (api_root, container, reporter, settings); ++ base.check (api_root, container, file_path, reporter, settings); + } + + public override void accept (ContentVisitor visitor) { +diff --git a/src/libvaladoc/content/wikilink.vala b/src/libvaladoc/content/wikilink.vala +index e88bc18f71..e08719f6ab 100755 +--- a/src/libvaladoc/content/wikilink.vala ++++ b/src/libvaladoc/content/wikilink.vala +@@ -1,6 +1,7 @@ + /* link.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -34,7 +35,7 @@ public class Valadoc.Content.WikiLink : InlineContent, Inline { + public override void configure (Settings settings, ResourceLocator locator) { + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + page = api_root.wikitree.search (name); + if (page == null) { + reporter.simple_warning (""%s does not exist"".printf (name)); +diff --git a/src/libvaladoc/documentation/documentationparser.vala b/src/libvaladoc/documentation/documentationparser.vala +index f4a9667cd1..5fa736a5a6 100755 +--- a/src/libvaladoc/documentation/documentationparser.vala ++++ b/src/libvaladoc/documentation/documentationparser.vala +@@ -1,6 +1,7 @@ + /* documentationparser.vala + * +- * Copyright (C) 2008-2011 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -76,7 +77,7 @@ public class Valadoc.DocumentationParser : Object, ResourceLocator { + public Comment? parse_comment_str (Api.Node element, string content, string filename, int first_line, int first_column) { + try { + Comment doc_comment = parse_comment (content, filename, first_line, first_column); +- doc_comment.check (_tree, element, _reporter, _settings); ++ doc_comment.check (_tree, element, filename, _reporter, _settings); + return doc_comment; + } catch (ParserError error) { + return null; +@@ -94,7 +95,7 @@ public class Valadoc.DocumentationParser : Object, ResourceLocator { + + try { + Page documentation = parse_wiki (page.documentation_str, page.get_filename ()); +- documentation.check (_tree, pkg, _reporter, _settings); ++ documentation.check (_tree, pkg, page.path, _reporter, _settings); + return documentation; + } catch (ParserError error) { + return null; +diff --git a/src/libvaladoc/documentation/gtkdoccommentparser.vala b/src/libvaladoc/documentation/gtkdoccommentparser.vala +index 574e65fabb..b1486069d4 100644 +--- a/src/libvaladoc/documentation/gtkdoccommentparser.vala ++++ b/src/libvaladoc/documentation/gtkdoccommentparser.vala +@@ -1,6 +1,6 @@ + /* gtkcommentparser.vala + * +- * Copyright (C) 2011 Florian Brosch ++ * Copyright (C) 2011-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -260,7 +260,7 @@ public class Valadoc.Gtkdoc.Parser : Object, ResourceLocator { + first = false; + } + +- comment.check (tree, element, reporter, settings); ++ comment.check (tree, element, gir_comment.file.relative_path, reporter, settings); + return comment; + } + +diff --git a/src/libvaladoc/taglets/tagletdeprecated.vala b/src/libvaladoc/taglets/tagletdeprecated.vala +index f51adc174f..468f6ec55f 100755 +--- a/src/libvaladoc/taglets/tagletdeprecated.vala ++++ b/src/libvaladoc/taglets/tagletdeprecated.vala +@@ -1,6 +1,7 @@ + /* tagletdeprecated.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -28,8 +29,8 @@ public class Valadoc.Taglets.Deprecated : InlineContent, Taglet, Block { + return run_rule; + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { +- base.check (api_root, container, reporter, settings); ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { ++ base.check (api_root, container, file_path, reporter, settings); + } + + public override void accept (ContentVisitor visitor) { +diff --git a/src/libvaladoc/taglets/tagletinheritdoc.vala b/src/libvaladoc/taglets/tagletinheritdoc.vala +index 329db68696..653a448a10 100755 +--- a/src/libvaladoc/taglets/tagletinheritdoc.vala ++++ b/src/libvaladoc/taglets/tagletinheritdoc.vala +@@ -1,6 +1,7 @@ + /* tagletinheritdoc.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -30,7 +31,7 @@ public class Valadoc.Taglets.InheritDoc : InlineTaglet { + return null; + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + // TODO Check that the container is an override of an abstract symbol + // Also retrieve that abstract symbol _inherited + +diff --git a/src/libvaladoc/taglets/tagletlink.vala b/src/libvaladoc/taglets/tagletlink.vala +index 3ebb6de3eb..cbe8dc499e 100755 +--- a/src/libvaladoc/taglets/tagletlink.vala ++++ b/src/libvaladoc/taglets/tagletlink.vala +@@ -1,6 +1,7 @@ + /* taglet.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -50,7 +51,7 @@ public class Valadoc.Taglets.Link : InlineTaglet { + }); + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + if (symbol_name.has_prefix (""c::"")) { + _symbol_name = _symbol_name.substring (3); + _symbol = api_root.search_symbol_cstr (container, symbol_name); +@@ -75,7 +76,7 @@ public class Valadoc.Taglets.Link : InlineTaglet { + reporter.simple_warning (""%s: %s does not exist"", container.get_full_name (), symbol_name); + } + +- base.check (api_root, container, reporter, settings); ++ base.check (api_root, container, file_path, reporter, settings); + } + + public override ContentElement produce_content () { +diff --git a/src/libvaladoc/taglets/tagletparam.vala b/src/libvaladoc/taglets/tagletparam.vala +index 6dfb235ef2..bbff4030b5 100755 +--- a/src/libvaladoc/taglets/tagletparam.vala ++++ b/src/libvaladoc/taglets/tagletparam.vala +@@ -1,6 +1,7 @@ + /* taglet.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -40,7 +41,7 @@ public class Valadoc.Taglets.Param : InlineContent, Taglet, Block { + } + + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + // Check for the existence of such a parameter + this.parameter = null; + +@@ -72,7 +73,7 @@ public class Valadoc.Taglets.Param : InlineContent, Taglet, Block { + reporter.simple_warning (""%s: Unknown parameter `%s'"", container.get_full_name (), parameter_name); + } + +- base.check (api_root, container, reporter, settings); ++ base.check (api_root, container, file_path, reporter, settings); + } + + public override void accept (ContentVisitor visitor) { +diff --git a/src/libvaladoc/taglets/tagletreturn.vala b/src/libvaladoc/taglets/tagletreturn.vala +index a4beee0c6a..5a2797737c 100755 +--- a/src/libvaladoc/taglets/tagletreturn.vala ++++ b/src/libvaladoc/taglets/tagletreturn.vala +@@ -1,6 +1,7 @@ + /* taglet.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -29,10 +30,10 @@ public class Valadoc.Taglets.Return : InlineContent, Taglet, Block { + return run_rule; + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + // TODO check for the existence of a return type + +- base.check (api_root, container, reporter, settings); ++ base.check (api_root, container, file_path, reporter, settings); + } + + public override void accept (ContentVisitor visitor) { +diff --git a/src/libvaladoc/taglets/tagletsee.vala b/src/libvaladoc/taglets/tagletsee.vala +index 68ed636ed6..895e3c2a22 100755 +--- a/src/libvaladoc/taglets/tagletsee.vala ++++ b/src/libvaladoc/taglets/tagletsee.vala +@@ -1,6 +1,7 @@ + /* tagletsee.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -38,7 +39,7 @@ public class Valadoc.Taglets.See : ContentElement, Taglet, Block { + }); + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + if (symbol_name.has_prefix (""c::"")) { + symbol_name = symbol_name.substring (3); + symbol = api_root.search_symbol_cstr (container, symbol_name); +diff --git a/src/libvaladoc/taglets/tagletsince.vala b/src/libvaladoc/taglets/tagletsince.vala +index 5b68ecfe82..49acafbc0d 100755 +--- a/src/libvaladoc/taglets/tagletsince.vala ++++ b/src/libvaladoc/taglets/tagletsince.vala +@@ -1,6 +1,7 @@ + /* tagletsince.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -37,7 +38,7 @@ public class Valadoc.Taglets.Since : ContentElement, Taglet, Block { + }); + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + } + + public override void accept (ContentVisitor visitor) { +diff --git a/src/libvaladoc/taglets/tagletthrows.vala b/src/libvaladoc/taglets/tagletthrows.vala +index b592f6ca1e..547fbc7ca6 100755 +--- a/src/libvaladoc/taglets/tagletthrows.vala ++++ b/src/libvaladoc/taglets/tagletthrows.vala +@@ -1,6 +1,7 @@ + /* tagletthrows.vala + * +- * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * Copyright (C) 2008-2009 Didier Villevalois ++ * Copyright (C) 2008-2012 Florian Brosch + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public +@@ -35,14 +36,14 @@ public class Valadoc.Taglets.Throws : InlineContent, Taglet, Block { + }); + } + +- public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { + error_domain = api_root.search_symbol_str (container, error_domain_name); + if (error_domain == null) { + // TODO use ContentElement's source reference + reporter.simple_error (""%s does not exist"", error_domain_name); + } + +- base.check (api_root, container, reporter, settings); ++ base.check (api_root, container, file_path, reporter, settings); + } + + public override void accept (ContentVisitor visitor) {" +1ec7b1afab5fd7c98cda53520c46eeec884fb672,Vala,"gdk-2.0: update to 2.18.6 + +Fixes bug 609293. +",a,https://github.com/GNOME/vala/,"diff --git a/vapi/gdk-2.0.vapi b/vapi/gdk-2.0.vapi +index efe8e7e811..332b11eb01 100644 +--- a/vapi/gdk-2.0.vapi ++++ b/vapi/gdk-2.0.vapi +@@ -90,7 +90,12 @@ namespace Gdk { + public weak Gdk.Device core_pointer; + public uint double_click_distance; + public uint double_click_time; ++ public uint ignore_core_events; ++ public weak Gdk.KeyboardGrabInfo keyboard_grab; ++ public uint32 last_event_time; ++ public weak GLib.List pointer_grabs; + public weak Gdk.DisplayPointerHooks pointer_hooks; ++ public weak Gdk.PointerWindowInfo pointer_info; + public weak GLib.List queued_events; + public weak GLib.List queued_tail; + public void add_client_message_filter (Gdk.Atom message_type, Gdk.FilterFunc func); +@@ -170,6 +175,8 @@ namespace Gdk { + public class Drawable : GLib.Object { + public unowned Gdk.Image copy_to_image (Gdk.Image image, int src_x, int src_y, int dest_x, int dest_y, int width, int height); + [NoWrapper] ++ public virtual unowned Cairo.Surface create_cairo_surface (int width, int height); ++ [NoWrapper] + public virtual unowned Gdk.GC create_gc (Gdk.GCValues values, Gdk.GCValuesMask mask); + [CCode (cname = ""gdk_draw_arc"")] + public virtual void draw_arc (Gdk.GC gc, bool filled, int x, int y, int width, int height, int angle1, int angle2); +@@ -208,10 +215,14 @@ namespace Gdk { + public virtual unowned Gdk.Image get_image (int x, int y, int width, int height); + public virtual unowned Gdk.Screen get_screen (); + public virtual void get_size (out int width, out int height); ++ [NoWrapper] ++ public virtual unowned Gdk.Drawable get_source_drawable (); + public virtual unowned Gdk.Region get_visible_region (); + public virtual unowned Gdk.Visual get_visual (); + [NoWrapper] + public virtual unowned Cairo.Surface ref_cairo_surface (); ++ [NoWrapper] ++ public virtual void set_cairo_clip (Cairo.Context cr); + public virtual void set_colormap (Gdk.Colormap colormap); + } + [Compact] +@@ -244,7 +255,6 @@ namespace Gdk { + public static unowned Gdk.Event @get (); + public bool get_axis (Gdk.AxisUse axis_use, out double value); + public bool get_coords (out double x_win, out double y_win); +- public static unowned Gdk.Event get_graphics_expose (Gdk.Window window); + public bool get_root_coords (out double x_root, out double y_root); + public unowned Gdk.Screen get_screen (); + public bool get_state (out Gdk.ModifierType state); +@@ -326,6 +336,16 @@ namespace Gdk { + public void put_pixel (int x, int y, uint32 pixel); + public void set_colormap (Gdk.Colormap colormap); + } ++ [Compact] ++ [CCode (cheader_filename = ""gdk/gdk.h"")] ++ public class KeyboardGrabInfo { ++ public weak Gdk.Window native_window; ++ public bool owner_events; ++ public ulong serial; ++ public uint32 time; ++ public weak Gdk.Window window; ++ public static bool libgtk_only (Gdk.Display display, out unowned Gdk.Window grab_window, bool owner_events); ++ } + [CCode (cheader_filename = ""gdk/gdk.h"")] + public class Keymap : GLib.Object { + public weak Gdk.Display display; +@@ -376,6 +396,17 @@ namespace Gdk { + public weak GLib.Callback window_at_pointer; + } + [Compact] ++ [CCode (cheader_filename = ""gdk/gdk.h"")] ++ public class PointerWindowInfo { ++ public uint32 button; ++ public ulong motion_hint_serial; ++ public uint32 state; ++ public weak Gdk.Window toplevel_under_pointer; ++ public double toplevel_x; ++ public double toplevel_y; ++ public weak Gdk.Window window_under_pointer; ++ } ++ [Compact] + [CCode (copy_function = ""gdk_region_copy"", free_function = ""gdk_region_destroy"", cheader_filename = ""gdk/gdk.h"")] + public class Region { + [CCode (has_construct_function = false)] +@@ -389,6 +420,7 @@ namespace Gdk { + public void offset (int dx, int dy); + public bool point_in (int x, int y); + public static Gdk.Region polygon (Gdk.Point[] points, Gdk.FillRule fill_rule); ++ public bool rect_equal (Gdk.Rectangle rectangle); + public Gdk.OverlapType rect_in (Gdk.Rectangle rectangle); + public static Gdk.Region rectangle (Gdk.Rectangle rectangle); + public void shrink (int dx, int dy); +@@ -405,6 +437,8 @@ namespace Gdk { + public weak Gdk.GC[] exposure_gcs; + [CCode (array_length = false)] + public weak Gdk.GC[] normal_gcs; ++ [CCode (array_length = false)] ++ public weak Gdk.GC[] subwindow_gcs; + public void broadcast_client_message (Gdk.Event event); + public unowned Gdk.Window get_active_window (); + public static unowned Gdk.Screen get_default (); +@@ -493,17 +527,21 @@ namespace Gdk { + public static void constrain_size (Gdk.Geometry geometry, uint flags, int width, int height, out int new_width, out int new_height); + public void deiconify (); + public void destroy (); +- public void destroy_notify (); + public void enable_synchronized_configure (); + public void end_paint (); ++ public bool ensure_native (); ++ public void flush (); + public void focus (uint32 timestamp); + public static unowned Gdk.Window foreign_new (Gdk.NativeWindow anid); + public static unowned Gdk.Window foreign_new_for_display (Gdk.Display display, Gdk.NativeWindow anid); + public void freeze_toplevel_updates_libgtk_only (); + public void freeze_updates (); + public void fullscreen (); ++ public void geometry_changed (); + public unowned GLib.List get_children (); ++ public unowned Gdk.Cursor? get_cursor (); + public bool get_decorations (out Gdk.WMDecoration decorations); ++ public bool get_deskrelative_origin (out int x, out int y); + public Gdk.EventMask get_events (); + public void get_frame_extents (out Gdk.Rectangle rect); + public void get_geometry (out int x, out int y, out int width, out int height, out int depth); +@@ -513,6 +551,7 @@ namespace Gdk { + public unowned Gdk.Window get_parent (); + public unowned Gdk.Window get_pointer (out int x, out int y, out Gdk.ModifierType mask); + public void get_position (out int x, out int y); ++ public void get_root_coords (int x, int y, int root_x, int root_y); + public void get_root_origin (out int x, out int y); + public Gdk.WindowState get_state (); + public unowned Gdk.Window get_toplevel (); +@@ -527,6 +566,7 @@ namespace Gdk { + public void invalidate_maybe_recurse (Gdk.Region region, GLib.Callback child_func); + public void invalidate_rect (Gdk.Rectangle? rect, bool invalidate_children); + public void invalidate_region (Gdk.Region region, bool invalidate_children); ++ public bool is_destroyed (); + public bool is_viewable (); + public bool is_visible (); + public static unowned Gdk.Window lookup (Gdk.NativeWindow anid); +@@ -547,6 +587,7 @@ namespace Gdk { + public void remove_redirection (); + public void reparent (Gdk.Window new_parent, int x, int y); + public void resize (int width, int height); ++ public void restack (Gdk.Window sibling, bool above); + public void scroll (int dx, int dy); + public void set_accept_focus (bool accept_focus); + public void set_back_pixmap (Gdk.Pixmap? pixmap, bool parent_relative); +@@ -591,6 +632,10 @@ namespace Gdk { + public void unmaximize (); + public void unstick (); + public void withdraw (); ++ public Gdk.Cursor cursor { get; set; } ++ public virtual signal void from_embedder (double p0, double p1, void* p2, void* p3); ++ public virtual signal unowned Gdk.Window pick_embedded_child (double p0, double p1); ++ public virtual signal void to_embedder (double p0, double p1, void* p2, void* p3); + } + [CCode (cheader_filename = ""gdk/gdk.h"")] + [SimpleType] +@@ -1172,7 +1217,8 @@ namespace Gdk { + SETTING, + OWNER_CHANGE, + GRAB_BROKEN, +- DAMAGE ++ DAMAGE, ++ EVENT_LAST + } + [CCode (cprefix = ""GDK_EXTENSION_EVENTS_"", cheader_filename = ""gdk/gdk.h"")] + public enum ExtensionMode { +@@ -1485,7 +1531,8 @@ namespace Gdk { + CHILD, + DIALOG, + TEMP, +- FOREIGN ++ FOREIGN, ++ OFFSCREEN + } + [CCode (cprefix = ""GDK_WINDOW_TYPE_HINT_"", cheader_filename = ""gdk/gdk.h"")] + public enum WindowTypeHint { +@@ -1539,6 +1586,8 @@ namespace Gdk { + [CCode (cheader_filename = ""gdk/gdk.h"")] + public static void cairo_region (Cairo.Context cr, Gdk.Region region); + [CCode (cheader_filename = ""gdk/gdk.h"")] ++ public static void cairo_reset_clip (Cairo.Context cr, Gdk.Drawable drawable); ++ [CCode (cheader_filename = ""gdk/gdk.h"")] + public static void cairo_set_source_color (Cairo.Context cr, Gdk.Color color); + [CCode (cheader_filename = ""gdk/gdk.h"")] + public static void cairo_set_source_pixbuf (Cairo.Context cr, Gdk.Pixbuf pixbuf, double pixbuf_x, double pixbuf_y); +@@ -1661,8 +1710,6 @@ namespace Gdk { + [CCode (cheader_filename = ""gdk/gdk.h"")] + public static Gdk.GrabStatus keyboard_grab (Gdk.Window window, bool owner_events, uint32 time_); + [CCode (cheader_filename = ""gdk/gdk.h"")] +- public static bool keyboard_grab_info_libgtk_only (Gdk.Display display, out unowned Gdk.Window grab_window, bool owner_events); +- [CCode (cheader_filename = ""gdk/gdk.h"")] + public static void keyboard_ungrab (uint32 time_); + [CCode (cheader_filename = ""gdk/gdk.h"")] + public static void keyval_convert_case (uint symbol, uint lower, uint upper); +@@ -1687,6 +1734,12 @@ namespace Gdk { + [CCode (cheader_filename = ""gdk/gdk.h"")] + public static void notify_startup_complete_with_id (string startup_id); + [CCode (cheader_filename = ""gdk/gdk.h"")] ++ public static unowned Gdk.Window? offscreen_window_get_embedder (Gdk.Window window); ++ [CCode (cheader_filename = ""gdk/gdk.h"")] ++ public static unowned Gdk.Pixmap? offscreen_window_get_pixmap (Gdk.Window window); ++ [CCode (cheader_filename = ""gdk/gdk.h"")] ++ public static void offscreen_window_set_embedder (Gdk.Window window, Gdk.Window embedder); ++ [CCode (cheader_filename = ""gdk/gdk.h"")] + public static unowned Pango.Context pango_context_get (); + [CCode (cheader_filename = ""gdk/gdk.h"")] + public static unowned Pango.Context pango_context_get_for_screen (Gdk.Screen screen); +@@ -1753,7 +1806,7 @@ namespace Gdk { + [CCode (cheader_filename = ""gdk/gdk.h"")] + public static bool selection_owner_set_for_display (Gdk.Display display, Gdk.Window owner, Gdk.Atom selection, uint32 time_, bool send_event); + [CCode (cheader_filename = ""gdk/gdk.h"")] +- public static bool selection_property_get (Gdk.Window requestor, uchar[] data, Gdk.Atom prop_type, int prop_format); ++ public static int selection_property_get (Gdk.Window requestor, uchar[] data, Gdk.Atom prop_type, int prop_format); + [CCode (cheader_filename = ""gdk/gdk.h"")] + public static void selection_send_notify (Gdk.NativeWindow requestor, Gdk.Atom selection, Gdk.Atom target, Gdk.Atom property, uint32 time_); + [CCode (cheader_filename = ""gdk/gdk.h"")] +@@ -1783,8 +1836,6 @@ namespace Gdk { + [CCode (cheader_filename = ""gdk/gdk.h"")] + public static int string_to_compound_text_for_display (Gdk.Display display, string str, Gdk.Atom encoding, int format, uchar[] ctext, int length); + [CCode (cheader_filename = ""gdk/gdk.h"")] +- public static void synthesize_window_state (Gdk.Window window, Gdk.WindowState unset_flags, Gdk.WindowState set_flags); +- [CCode (cheader_filename = ""gdk/gdk.h"")] + public static void test_render_sync (Gdk.Window window); + [CCode (cheader_filename = ""gdk/gdk.h"")] + public static bool test_simulate_button (Gdk.Window window, int x, int y, uint button, Gdk.ModifierType modifiers, Gdk.EventType button_pressrelease); +diff --git a/vapi/packages/gdk-2.0/gdk-2.0.excludes b/vapi/packages/gdk-2.0/gdk-2.0.excludes +index e44260e8fc..9ca941ac4a 100644 +--- a/vapi/packages/gdk-2.0/gdk-2.0.excludes ++++ b/vapi/packages/gdk-2.0/gdk-2.0.excludes +@@ -1,3 +1,6 @@ + gdkalias.h + gdkkeysyms.h + gdkx.h ++gdkdirectfb.h ++gdkprivate.h ++gdkprivate-directfb.h +diff --git a/vapi/packages/gdk-2.0/gdk-2.0.gi b/vapi/packages/gdk-2.0/gdk-2.0.gi +index e2f5c77807..ed82bd46b4 100644 +--- a/vapi/packages/gdk-2.0/gdk-2.0.gi ++++ b/vapi/packages/gdk-2.0/gdk-2.0.gi +@@ -38,6 +38,13 @@ + + + ++ ++ ++ ++ ++ ++ ++ + + + +@@ -587,14 +594,6 @@ + + + +- +- +- +- +- +- +- +- + + + +@@ -663,6 +662,25 @@ + + + ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ + + + +@@ -921,7 +939,7 @@ + + + +- ++ + + + +@@ -1050,14 +1068,6 @@ + + + +- +- +- +- +- +- +- +- + + + +@@ -1523,6 +1533,21 @@ + + + ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ + + + +@@ -1576,6 +1601,15 @@ + + + ++ ++ ++ ++ ++ ++ ++ ++ ++ + + + +@@ -1651,6 +1685,13 @@ + + + ++ ++ ++ ++ ++ ++ ++ + + + +@@ -1775,35 +1816,6 @@ + + + +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- + + + +@@ -1969,12 +1981,6 @@ + + + +- +- +- +- +- +- + + + +@@ -2305,6 +2311,7 @@ + + + ++ + + + +@@ -2481,6 +2488,7 @@ + + + ++ + + + +@@ -3111,9 +3119,14 @@ + + + ++ + + + ++ ++ ++ ++ + + + +@@ -3243,6 +3256,14 @@ + + + ++ ++ ++ ++ ++ ++ ++ ++ + + + +@@ -3279,6 +3300,21 @@ + + + ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ + + + +@@ -3469,6 +3505,12 @@ + + + ++ ++ ++ ++ ++ ++ + + + +@@ -3487,6 +3529,13 @@ + + + ++ ++ ++ ++ ++ ++ ++ + + + +@@ -4254,6 +4303,7 @@ + + + ++ + + + +@@ -4422,19 +4472,25 @@ + + + +- ++ + + + + + +- ++ + + + + + +- ++ ++ ++ ++ ++ ++ ++ + + + +@@ -4478,12 +4534,24 @@ + + + ++ ++ ++ ++ ++ ++ + + + + + + ++ ++ ++ ++ ++ ++ + + + +@@ -4491,6 +4559,14 @@ + + + ++ ++ ++ ++ ++ ++ ++ ++ + + + +@@ -4561,6 +4637,16 @@ + + + ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ + + + +@@ -4661,6 +4747,12 @@ + + + ++ ++ ++ ++ ++ ++ + + + +@@ -4804,6 +4896,14 @@ + + + ++ ++ ++ ++ ++ ++ ++ ++ + + + +@@ -5109,6 +5209,35 @@ + + + ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ + + + +diff --git a/vapi/packages/gdk-2.0/gdk-2.0.metadata b/vapi/packages/gdk-2.0/gdk-2.0.metadata +index 261ea0e141..68669dd3ba 100644 +--- a/vapi/packages/gdk-2.0/gdk-2.0.metadata ++++ b/vapi/packages/gdk-2.0/gdk-2.0.metadata +@@ -63,6 +63,8 @@ gdk_keymap_get_entries_for_keycode.keyvals is_array=""1"" is_out=""1"" + gdk_keymap_get_entries_for_keyval.keys is_array=""1"" is_out=""1"" + GdkKeymapKey is_value_type=""1"" + GdkNativeWindow is_value_type=""1"" simple_type=""1"" ++gdk_offscreen_window_get_embedder nullable=""1"" ++gdk_offscreen_window_get_pixmap nullable=""1"" + GdkPangoAttr* is_value_type=""1"" + gdk_pixbuf_get_from_drawable.dest nullable=""1"" + gdk_pixbuf_get_from_drawable.cmap nullable=""1"" +@@ -126,6 +128,7 @@ GdkWindowClass common_prefix=""GDK_"" + GdkWindowObject hidden=""1"" + GdkWindowObjectClass hidden=""1"" + GdkWindowRedirect is_value_type=""1"" ++gdk_window_get_cursor nullable=""1"" + gdk_window_get_geometry.x is_out=""1"" + gdk_window_get_geometry.y is_out=""1"" + gdk_window_get_geometry.width is_out=""1""" +494289e5b21e6f48c2ef108f61f1626a3ef9c59e,csemike$oneswarm,"refactor out the abstract class OverlayEndpoint and use it to share functionality between overlaytransport and service connection +",p,https://github.com/csemike/oneswarm,"diff --git a/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/OSF2FAzSwtUi.java b/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/OSF2FAzSwtUi.java +index 1539bbf8..07584769 100644 +--- a/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/OSF2FAzSwtUi.java ++++ b/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/OSF2FAzSwtUi.java +@@ -31,8 +31,9 @@ + import org.gudy.azureus2.ui.swt.plugins.UISWTViewEventListener; + + import edu.washington.cs.oneswarm.f2f.network.FriendConnection; +-import edu.washington.cs.oneswarm.f2f.network.OverlayTransport; + import edu.washington.cs.oneswarm.f2f.network.FriendConnection.OverlayForward; ++import edu.washington.cs.oneswarm.f2f.network.OverlayEndpoint; ++import edu.washington.cs.oneswarm.f2f.network.OverlayTransport; + + public class OSF2FAzSwtUi { + public static final String KEY_OVERLAY_TRANSPORT = ""key_peer_transport""; +@@ -220,7 +221,7 @@ public void refresh(TableCell cell) { + cell.setText(""""); + return; + } +- OverlayTransport tr = (OverlayTransport) peer.getPEPeer().getData(KEY_OVERLAY_TRANSPORT); ++ OverlayEndpoint tr = (OverlayEndpoint) peer.getPEPeer().getData(KEY_OVERLAY_TRANSPORT); + if (tr == null) { + cell.setText(""""); + return; +@@ -550,9 +551,9 @@ private void addOverlays(FriendConnection sel) { + item.setText(new String[] { ""forward"", f.getChannelId() + """", sel.getRemoteFriend().getNick(), sel.getRemoteIp().getHostAddress() + "":"" + sel.getRemotePort(), f.getRemoteFriend().getNick(), f.getRemoteIpPort(), formatter.formatTimeFromSeconds(f.getAge() / 1000), formatter.formatTimeFromSeconds(f.getLastMsgTime() / 1000), formatter.formatByteCountToKiBEtc(f.getBytesForwarded()), formatter.formatByteCountToKiBEtc(f.getBytesForwarded()), f.getSourceMessage().getDescription() }); + } + +- Map transports = sel.getOverlayTransports(); ++ Map transports = sel.getOverlayTransports(); + for (Integer id : transports.keySet()) { +- OverlayTransport f = transports.get(id); ++ OverlayEndpoint f = transports.get(id); + TableItem item = new TableItem(overlayConnectionTable, SWT.NONE); + item.setText(new String[] { ""transport"", f.getPathID() + """", ""Me"", ""N/A"", sel.getRemoteFriend().getNick(), sel.getRemoteIp().getHostAddress() + "":"" + sel.getRemotePort(), formatter.formatTimeFromSeconds(f.getAge() / 1000), formatter.formatTimeFromSeconds(f.getLastMsgTime() / 1000), formatter.formatByteCountToKiBEtc(f.getBytesIn()), formatter.formatByteCountToKiBEtc(f.getBytesOut()) }); + } +diff --git a/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/OSF2FPlugin.java b/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/OSF2FPlugin.java +index d712973e..ea8eb383 100644 +--- a/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/OSF2FPlugin.java ++++ b/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/OSF2FPlugin.java +@@ -42,9 +42,10 @@ + import edu.washington.cs.oneswarm.f2f.messaging.OSF2FSearch; + import edu.washington.cs.oneswarm.f2f.messaging.OSF2FTextSearch; + import edu.washington.cs.oneswarm.f2f.network.FriendConnection; ++import edu.washington.cs.oneswarm.f2f.network.FriendConnection.OverlayForward; ++import edu.washington.cs.oneswarm.f2f.network.OverlayEndpoint; + import edu.washington.cs.oneswarm.f2f.network.OverlayManager; + import edu.washington.cs.oneswarm.f2f.network.OverlayTransport; +-import edu.washington.cs.oneswarm.f2f.network.FriendConnection.OverlayForward; + import edu.washington.cs.oneswarm.f2f.permissions.PermissionsDAO; + import edu.washington.cs.oneswarm.plugins.PluginCallback; + import edu.washington.cs.publickey.PublicKeyFriend; +@@ -605,11 +606,11 @@ public String getDebugInfo() { + for (OverlayForward of : overlayForwards.values()) { + b.append("" channel="" + Integer.toHexString(of.getChannelId()) + "" "" + of.getRemoteFriend().getNick() + "" lastSent="" + of.getLastMsgTime() + "" src="" + of.getSourceMessage().getDescription() + ""\n""); + } +- Collection transports = f.getOverlayTransports().values(); ++ Collection transports = f.getOverlayTransports().values(); + if (transports.size() > 0) { + b.append("" Transports: \n""); + } +- for (OverlayTransport ot : transports) { ++ for (OverlayEndpoint ot : transports) { + b.append("" channel="" + Integer.toHexString(ot.getChannelId()) + "" path="" + Integer.toHexString(ot.getPathID()) + "" lastSent="" + ot.getLastMsgTime() + ""\n""); + } + } +diff --git a/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/FriendConnection.java b/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/FriendConnection.java +index 2e8e8eaa..7b5b5c4a 100644 +--- a/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/FriendConnection.java ++++ b/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/FriendConnection.java +@@ -154,7 +154,7 @@ public class FriendConnection { + + private final ConcurrentHashMap overlayTransportPathsId = new ConcurrentHashMap(); + +- private final ConcurrentHashMap overlayTransports = new ConcurrentHashMap(); ++ private final ConcurrentHashMap overlayTransports = new ConcurrentHashMap(); + + /* + * map to keep track of received searches to avoid sending the search back +@@ -447,9 +447,9 @@ public void close() { + } + + // we need to terminate all overlay transports +- List transports = new LinkedList( ++ List transports = new LinkedList( + overlayTransports.values()); +- for (OverlayTransport overlayTransport : transports) { ++ for (OverlayEndpoint overlayTransport : transports) { + overlayTransport.closeConnectionClosed(""friend closed connection""); + } + +@@ -539,12 +539,12 @@ private void deregisterOverlayForward(int channelId, boolean sendReset) { + + } + +- void deregisterOverlayTransport(OverlayTransport transport) { ++ void deregisterOverlayTransport(OverlayEndpoint transport) { + lock.lock(); + try { + int channelId = transport.getChannelId(); + +- OverlayTransport exists = overlayTransports.remove(channelId); ++ OverlayEndpoint exists = overlayTransports.remove(channelId); + recentlyClosedChannels.put(channelId, System.currentTimeMillis()); + int pathID = transport.getPathID(); + overlayTransportPathsId.remove(pathID); +@@ -643,7 +643,7 @@ public Map getOverlayForwards() { + return overlayForwards; + } + +- public Map getOverlayTransports() { ++ public Map getOverlayTransports() { + return overlayTransports; + } + +@@ -685,7 +685,7 @@ private void handleChannelMsg(Message message) { + + if (overlayTransports.containsKey(channelId)) { + // ok, this is a msg to us +- OverlayTransport t = overlayTransports.get(channelId); ++ OverlayEndpoint t = overlayTransports.get(channelId); + msg.setForward(false); + // this might we the first message we get in this channel + // means that the other side responded to our channel setup +@@ -1235,7 +1235,7 @@ void registerOverlayForward(OSF2FSearchResp currentSetupMsg, FriendConnection co + } + } + +- void registerOverlayTransport(OverlayTransport transport) throws OverlayRegistrationError { ++ void registerOverlayTransport(OverlayEndpoint transport) throws OverlayRegistrationError { + lock.lock(); + try { + int channelId = transport.getChannelId(); +diff --git a/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/OverlayEndpoint.java b/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/OverlayEndpoint.java new file mode 100644 -index 00000000..544d52bf +index 00000000..d186b29e --- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/ResourceSelector.java -@@ -0,0 +1,233 @@ -+package org.eclipselabs.tapiji.tools.core.ui.widgets; ++++ b/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/OverlayEndpoint.java +@@ -0,0 +1,222 @@ ++package edu.washington.cs.oneswarm.f2f.network; + -+import java.util.HashSet; -+import java.util.Iterator; -+import java.util.Locale; -+import java.util.Set; ++import java.util.TimerTask; ++import java.util.logging.Logger; + -+import org.eclipse.babel.editor.api.KeyTreeFactory; -+import org.eclipse.jface.layout.TreeColumnLayout; -+import org.eclipse.jface.viewers.ColumnWeightData; -+import org.eclipse.jface.viewers.IElementComparer; -+import org.eclipse.jface.viewers.ISelection; -+import org.eclipse.jface.viewers.ISelectionChangedListener; -+import org.eclipse.jface.viewers.IStructuredSelection; -+import org.eclipse.jface.viewers.SelectionChangedEvent; -+import org.eclipse.jface.viewers.StyledCellLabelProvider; -+import org.eclipse.jface.viewers.TreeViewer; -+import org.eclipse.swt.SWT; -+import org.eclipse.swt.widgets.Composite; -+import org.eclipse.swt.widgets.Tree; -+import org.eclipse.swt.widgets.TreeColumn; -+import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; -+import org.eclipselabs.tapiji.tools.core.ui.widgets.event.ResourceSelectionEvent; -+import org.eclipselabs.tapiji.tools.core.ui.widgets.listener.IResourceSelectionListener; -+import org.eclipselabs.tapiji.tools.core.ui.widgets.provider.ResKeyTreeContentProvider; -+import org.eclipselabs.tapiji.tools.core.ui.widgets.provider.ResKeyTreeLabelProvider; -+import org.eclipselabs.tapiji.tools.core.ui.widgets.provider.ValueKeyTreeLabelProvider; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IAbstractKeyTreeModel; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IKeyTreeNode; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IMessagesBundleGroup; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.TreeType; ++import org.gudy.azureus2.core3.util.Average; ++import org.gudy.azureus2.core3.util.DirectByteBuffer; + ++import com.aelitis.azureus.core.networkmanager.NetworkManager; + -+public class ResourceSelector extends Composite { ++import edu.washington.cs.oneswarm.f2f.Friend; ++import edu.washington.cs.oneswarm.f2f.messaging.OSF2FChannelDataMsg; ++import edu.washington.cs.oneswarm.f2f.messaging.OSF2FChannelReset; ++import edu.washington.cs.oneswarm.f2f.messaging.OSF2FMessage; ++import edu.washington.cs.oneswarm.f2f.network.DelayedExecutorService.DelayedExecutor; + -+ public static final int DISPLAY_KEYS = 0; -+ public static final int DISPLAY_TEXT = 1; -+ -+ private Locale displayLocale; -+ private int displayMode; -+ private String resourceBundle; -+ private ResourceBundleManager manager; -+ private boolean showTree; -+ -+ private TreeViewer viewer; -+ private TreeColumnLayout basicLayout; -+ private TreeColumn entries; -+ private Set listeners = new HashSet(); -+ -+ // Viewer model -+ private TreeType treeType = TreeType.Tree; -+ private StyledCellLabelProvider labelProvider; -+ -+ public ResourceSelector(Composite parent, -+ int style, -+ ResourceBundleManager manager, -+ String resourceBundle, -+ int displayMode, -+ Locale displayLocale, -+ boolean showTree) { -+ super(parent, style); -+ this.manager = manager; -+ this.resourceBundle = resourceBundle; -+ this.displayMode = displayMode; -+ this.displayLocale = displayLocale; -+ this.showTree = showTree; -+ this.treeType = showTree ? TreeType.Tree : TreeType.Flat; -+ -+ initLayout (this); -+ initViewer (this); -+ -+ updateViewer (true); -+ } ++public abstract class OverlayEndpoint { ++ private final static Logger logger = Logger.getLogger(OverlayEndpoint.class.getName()); ++ /* ++ * max number of ms that a message can be delivered earlier than ++ * overlayDelayMs if that avoids a call to Thread.sleep() ++ */ ++ private final static int INCOMING_MESSAGE_DELAY_SLACK = 10; + -+ protected void updateContentProvider (IMessagesBundleGroup group) { -+ // define input of treeviewer -+ if (!showTree || displayMode == DISPLAY_TEXT) { -+ treeType = TreeType.Flat; -+ } -+ -+ IAbstractKeyTreeModel model = KeyTreeFactory.createModel(manager.getResourceBundle(resourceBundle)); -+ ((ResKeyTreeContentProvider)viewer.getContentProvider()).setBundleGroup(manager.getResourceBundle(resourceBundle)); -+ ((ResKeyTreeContentProvider)viewer.getContentProvider()).setTreeType(treeType); -+ if (viewer.getInput() == null) { -+ viewer.setUseHashlookup(true); -+ } -+ -+// viewer.setAutoExpandLevel(AbstractTreeViewer.ALL_LEVELS); -+ org.eclipse.jface.viewers.TreePath[] expandedTreePaths = viewer.getExpandedTreePaths(); -+ viewer.setInput(model); -+ viewer.refresh(); -+ viewer.setExpandedTreePaths(expandedTreePaths); -+ } -+ -+ protected void updateViewer (boolean updateContent) { -+ IMessagesBundleGroup group = manager.getResourceBundle(resourceBundle); -+ -+ if (group == null) -+ return; -+ -+ if (displayMode == DISPLAY_TEXT) { -+ labelProvider = new ValueKeyTreeLabelProvider(group.getMessagesBundle(displayLocale)); -+ treeType = TreeType.Flat; -+ ((ResKeyTreeContentProvider)viewer.getContentProvider()).setTreeType(treeType); -+ } else { -+ labelProvider = new ResKeyTreeLabelProvider(null); -+ treeType = TreeType.Tree; -+ ((ResKeyTreeContentProvider)viewer.getContentProvider()).setTreeType(treeType); -+ } -+ -+ viewer.setLabelProvider(labelProvider); -+ if (updateContent) -+ updateContentProvider(group); -+ } -+ -+ protected void initLayout (Composite parent) { -+ basicLayout = new TreeColumnLayout(); -+ parent.setLayout(basicLayout); -+ } -+ -+ protected void initViewer (Composite parent) { -+ viewer = new TreeViewer (parent, SWT.BORDER | SWT.SINGLE | SWT.FULL_SELECTION); -+ Tree table = viewer.getTree(); -+ -+ // Init table-columns -+ entries = new TreeColumn (table, SWT.NONE); -+ basicLayout.setColumnData(entries, new ColumnWeightData(1)); -+ -+ viewer.setContentProvider(new ResKeyTreeContentProvider()); -+ viewer.addSelectionChangedListener(new ISelectionChangedListener() { -+ -+ @Override -+ public void selectionChanged(SelectionChangedEvent event) { -+ ISelection selection = event.getSelection(); -+ String selectionSummary = """"; -+ String selectedKey = """"; -+ -+ if (selection instanceof IStructuredSelection) { -+ Iterator itSel = ((IStructuredSelection) selection).iterator(); -+ if (itSel.hasNext()) { -+ IKeyTreeNode selItem = itSel.next(); -+ IMessagesBundleGroup group = manager.getResourceBundle(resourceBundle); -+ selectedKey = selItem.getMessageKey(); -+ -+ if (group == null) -+ return; -+ Iterator itLocales = manager.getProvidedLocales(resourceBundle).iterator(); -+ while (itLocales.hasNext()) { -+ Locale l = itLocales.next(); -+ try { -+ selectionSummary += (l == null ? ResourceBundleManager.defaultLocaleTag : l.getDisplayLanguage()) + "":\n""; -+ selectionSummary += ""\t"" + group.getMessagesBundle(l).getMessage(selItem.getMessageKey()).getValue() + ""\n""; -+ } catch (Exception e) {} -+ } -+ } -+ } -+ -+ // construct ResourceSelectionEvent -+ ResourceSelectionEvent e = new ResourceSelectionEvent(selectedKey, selectionSummary); -+ fireSelectionChanged(e); -+ } -+ }); -+ -+ // we need this to keep the tree expanded -+ viewer.setComparer(new IElementComparer() { -+ -+ @Override -+ public int hashCode(Object element) { -+ final int prime = 31; -+ int result = 1; -+ result = prime * result -+ + ((toString() == null) ? 0 : toString().hashCode()); -+ return result; -+ } -+ -+ @Override -+ public boolean equals(Object a, Object b) { -+ if (a == b) { -+ return true; -+ } -+ if (a instanceof IKeyTreeNode && b instanceof IKeyTreeNode) { -+ IKeyTreeNode nodeA = (IKeyTreeNode) a; -+ IKeyTreeNode nodeB = (IKeyTreeNode) b; -+ return nodeA.equals(nodeB); -+ } -+ return false; -+ } -+ }); -+ } -+ -+ public Locale getDisplayLocale() { -+ return displayLocale; -+ } ++ protected long bytesIn = 0; + -+ public void setDisplayLocale(Locale displayLocale) { -+ this.displayLocale = displayLocale; -+ updateViewer(false); -+ } ++ protected long bytesOut = 0; ++ protected boolean started = false; + -+ public int getDisplayMode() { -+ return displayMode; -+ } ++ protected final int channelId; + -+ public void setDisplayMode(int displayMode) { -+ this.displayMode = displayMode; -+ updateViewer(true); -+ } ++ protected boolean closed = false; ++ protected String closeReason = """"; + -+ public void setResourceBundle(String resourceBundle) { -+ this.resourceBundle = resourceBundle; -+ updateViewer(true); -+ } ++ private String desc = null; ++ private final DelayedExecutor delayedOverlayMessageTimer; + -+ public String getResourceBundle() { -+ return resourceBundle; -+ } -+ -+ public void addSelectionChangedListener (IResourceSelectionListener l) { -+ listeners.add(l); -+ } -+ -+ public void removeSelectionChangedListener (IResourceSelectionListener l) { -+ listeners.remove(l); -+ } -+ -+ private void fireSelectionChanged (ResourceSelectionEvent event) { -+ Iterator itResList = listeners.iterator(); -+ while (itResList.hasNext()) { -+ itResList.next().selectionChanged(event); -+ } -+ } -+ -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/event/ResourceSelectionEvent.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/event/ResourceSelectionEvent.java -new file mode 100644 -index 00000000..349c385c ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/event/ResourceSelectionEvent.java -@@ -0,0 +1,31 @@ -+package org.eclipselabs.tapiji.tools.core.ui.widgets.event; -+ -+public class ResourceSelectionEvent { -+ -+ private String selectionSummary; -+ private String selectedKey; -+ -+ public ResourceSelectionEvent (String selectedKey, String selectionSummary) { -+ this.setSelectionSummary(selectionSummary); -+ this.setSelectedKey(selectedKey); -+ } -+ -+ public void setSelectedKey (String key) { -+ selectedKey = key; -+ } -+ -+ public void setSelectionSummary(String selectionSummary) { -+ this.selectionSummary = selectionSummary; -+ } -+ -+ public String getSelectionSummary() { -+ return selectionSummary; -+ } -+ -+ public String getSelectedKey() { -+ return selectedKey; -+ } -+ -+ -+ -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/filter/ExactMatcher.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/filter/ExactMatcher.java -new file mode 100644 -index 00000000..6d83fc3c ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/filter/ExactMatcher.java -@@ -0,0 +1,77 @@ -+package org.eclipselabs.tapiji.tools.core.ui.widgets.filter; -+ -+import java.util.Locale; -+ -+import org.eclipse.jface.viewers.StructuredViewer; -+import org.eclipse.jface.viewers.Viewer; -+import org.eclipse.jface.viewers.ViewerFilter; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IValuedKeyTreeNode; -+ -+public class ExactMatcher extends ViewerFilter { -+ -+ protected final StructuredViewer viewer; -+ protected String pattern = """"; -+ protected StringMatcher matcher; -+ -+ public ExactMatcher (StructuredViewer viewer) { -+ this.viewer = viewer; -+ } -+ -+ public String getPattern () { -+ return pattern; -+ } -+ -+ public void setPattern (String p) { -+ boolean filtering = matcher != null; -+ if (p != null && p.trim().length() > 0) { -+ pattern = p; -+ matcher = new StringMatcher (""*"" + pattern + ""*"", true, false); -+ if (!filtering) -+ viewer.addFilter(this); -+ else -+ viewer.refresh(); -+ } else { -+ pattern = """"; -+ matcher = null; -+ if (filtering) { -+ viewer.removeFilter(this); -+ } -+ } -+ } -+ -+ @Override -+ public boolean select(Viewer viewer, Object parentElement, Object element) { -+ IValuedKeyTreeNode vEle = (IValuedKeyTreeNode) element; -+ FilterInfo filterInfo = new FilterInfo(); -+ boolean selected = matcher.match(vEle.getMessageKey()); -+ -+ if (selected) { -+ int start = -1; -+ while ((start = vEle.getMessageKey().toLowerCase().indexOf(pattern.toLowerCase(), start+1)) >= 0) { -+ filterInfo.addKeyOccurrence(start, pattern.length()); -+ } -+ filterInfo.setFoundInKey(selected); -+ filterInfo.setFoundInKey(true); -+ } else -+ filterInfo.setFoundInKey(false); -+ -+ // Iterate translations -+ for (Locale l : vEle.getLocales()) { -+ String value = vEle.getValue(l); -+ if (matcher.match(value)) { -+ filterInfo.addFoundInLocale(l); -+ filterInfo.addSimilarity(l, 1d); -+ int start = -1; -+ while ((start = value.toLowerCase().indexOf(pattern.toLowerCase(), start+1)) >= 0) { -+ filterInfo.addFoundInLocaleRange(l, start, pattern.length()); -+ } -+ selected = true; -+ } -+ } -+ -+ vEle.setInfo(filterInfo); -+ return selected; -+ } -+ -+ -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/filter/FilterInfo.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/filter/FilterInfo.java -new file mode 100644 -index 00000000..cb02d50f ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/filter/FilterInfo.java -@@ -0,0 +1,85 @@ -+package org.eclipselabs.tapiji.tools.core.ui.widgets.filter; -+ -+import java.util.ArrayList; -+import java.util.HashMap; -+import java.util.List; -+import java.util.Locale; -+import java.util.Map; -+ -+import org.eclipse.jface.text.IRegion; -+import org.eclipse.jface.text.Region; -+ -+public class FilterInfo { -+ -+ private boolean foundInKey; -+ private List foundInLocales = new ArrayList (); -+ private List keyOccurrences = new ArrayList (); -+ private Double keySimilarity; -+ private Map> occurrences = new HashMap>(); -+ private Map localeSimilarity = new HashMap(); -+ -+ public FilterInfo() { -+ -+ } -+ -+ public void setKeySimilarity (Double similarity) { -+ keySimilarity = similarity; -+ } -+ -+ public Double getKeySimilarity () { -+ return keySimilarity; -+ } -+ -+ public void addSimilarity (Locale l, Double similarity) { -+ localeSimilarity.put (l, similarity); -+ } -+ -+ public Double getSimilarityLevel (Locale l) { -+ return localeSimilarity.get(l); -+ } -+ -+ public void setFoundInKey(boolean foundInKey) { -+ this.foundInKey = foundInKey; -+ } -+ -+ public boolean isFoundInKey() { -+ return foundInKey; -+ } -+ -+ public void addFoundInLocale (Locale loc) { -+ foundInLocales.add(loc); -+ } -+ -+ public void removeFoundInLocale (Locale loc) { -+ foundInLocales.remove(loc); -+ } -+ -+ public void clearFoundInLocale () { -+ foundInLocales.clear(); -+ } -+ -+ public boolean hasFoundInLocale (Locale l) { -+ return foundInLocales.contains(l); -+ } -+ -+ public List getFoundInLocaleRanges (Locale locale) { -+ List reg = occurrences.get(locale); -+ return (reg == null ? new ArrayList() : reg); -+ } -+ -+ public void addFoundInLocaleRange (Locale locale, int start, int length) { -+ List regions = occurrences.get(locale); -+ if (regions == null) -+ regions = new ArrayList(); -+ regions.add(new Region(start, length)); -+ occurrences.put(locale, regions); -+ } -+ -+ public List getKeyOccurrences () { -+ return keyOccurrences; -+ } -+ -+ public void addKeyOccurrence (int start, int length) { -+ keyOccurrences.add(new Region (start, length)); -+ } -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/filter/FuzzyMatcher.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/filter/FuzzyMatcher.java -new file mode 100644 -index 00000000..227a2ba0 ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/filter/FuzzyMatcher.java -@@ -0,0 +1,54 @@ -+package org.eclipselabs.tapiji.tools.core.ui.widgets.filter; -+ -+import java.util.Locale; -+ -+import org.eclipse.babel.editor.api.AnalyzerFactory; -+import org.eclipse.jface.viewers.StructuredViewer; -+import org.eclipse.jface.viewers.Viewer; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IValuedKeyTreeNode; -+import org.eclipselabs.tapiji.translator.rbe.model.analyze.ILevenshteinDistanceAnalyzer; -+ -+public class FuzzyMatcher extends ExactMatcher { -+ -+ protected ILevenshteinDistanceAnalyzer lvda; -+ protected float minimumSimilarity = 0.75f; -+ -+ public FuzzyMatcher(StructuredViewer viewer) { -+ super(viewer); -+ lvda = AnalyzerFactory.getLevenshteinDistanceAnalyzer();; -+ } -+ -+ public double getMinimumSimilarity () { -+ return minimumSimilarity; -+ } -+ -+ public void setMinimumSimilarity (float similarity) { -+ this.minimumSimilarity = similarity; -+ } -+ -+ @Override -+ public boolean select(Viewer viewer, Object parentElement, Object element) { -+ boolean exactMatch = super.select(viewer, parentElement, element); -+ boolean match = exactMatch; -+ -+ IValuedKeyTreeNode vkti = (IValuedKeyTreeNode) element; -+ FilterInfo filterInfo = (FilterInfo) vkti.getInfo(); -+ -+ for (Locale l : vkti.getLocales()) { -+ String value = vkti.getValue(l); -+ if (filterInfo.hasFoundInLocale(l)) -+ continue; -+ double dist = lvda.analyse(value, getPattern()); -+ if (dist >= minimumSimilarity) { -+ filterInfo.addFoundInLocale(l); -+ filterInfo.addSimilarity(l, dist); -+ match = true; -+ filterInfo.addFoundInLocaleRange(l, 0, value.length()); -+ } -+ } -+ -+ vkti.setInfo(filterInfo); -+ return match; -+ } -+ -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/filter/StringMatcher.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/filter/StringMatcher.java -new file mode 100644 -index 00000000..d48ea0e9 ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/filter/StringMatcher.java -@@ -0,0 +1,441 @@ -+package org.eclipselabs.tapiji.tools.core.ui.widgets.filter; -+ -+import java.util.Vector; -+ -+/** -+ * A string pattern matcher, suppporting ""*"" and ""?"" wildcards. -+ */ -+public class StringMatcher { -+ protected String fPattern; ++ protected Average downloadRateAverage = Average.getInstance(1000, 10); + -+ protected int fLength; // pattern length ++ protected final FriendConnection friendConnection; ++ protected long lastMsgTime; ++ private final long overlayDelayMs; ++ protected final int pathID; ++ private boolean sentReset = false; + -+ protected boolean fIgnoreWildCards; ++ private final long startTime; ++ private final int TIMEOUT = 2 * 60 * 1000; + -+ protected boolean fIgnoreCase; ++ protected Average uploadRateAverage = Average.getInstance(1000, 10); + -+ protected boolean fHasLeadingStar; -+ -+ protected boolean fHasTrailingStar; -+ -+ protected String fSegments[]; //the given pattern is split into * separated segments -+ -+ /* boundary value beyond which we don't need to search in the text */ -+ protected int fBound = 0; -+ -+ protected static final char fSingleWildCard = '\u0000'; -+ -+ public static class Position { -+ int start; //inclusive -+ -+ int end; //exclusive -+ -+ public Position(int start, int end) { -+ this.start = start; -+ this.end = end; -+ } -+ -+ public int getStart() { -+ return start; -+ } -+ -+ public int getEnd() { -+ return end; -+ } ++ public OverlayEndpoint(FriendConnection friendConnection, int channelId, int pathID, ++ long overlayDelayMs) { ++ this.friendConnection = friendConnection; ++ this.channelId = channelId; ++ this.pathID = pathID; ++ this.overlayDelayMs = overlayDelayMs; ++ this.lastMsgTime = System.currentTimeMillis(); ++ this.startTime = System.currentTimeMillis(); ++ delayedOverlayMessageTimer = DelayedExecutorService.getInstance().getFixedDelayExecutor( ++ overlayDelayMs); + } + -+ /** -+ * StringMatcher constructor takes in a String object that is a simple -+ * pattern which may contain '*' for 0 and many characters and -+ * '?' for exactly one character. -+ * -+ * Literal '*' and '?' characters must be escaped in the pattern -+ * e.g., ""\*"" means literal ""*"", etc. -+ * -+ * Escaping any other character (including the escape character itself), -+ * just results in that character in the pattern. -+ * e.g., ""\a"" means ""a"" and ""\\"" means ""\"" -+ * -+ * If invoking the StringMatcher with string literals in Java, don't forget -+ * escape characters are represented by ""\\"". -+ * -+ * @param pattern the pattern to match text against -+ * @param ignoreCase if true, case is ignored -+ * @param ignoreWildCards if true, wild cards and their escape sequences are ignored -+ * (everything is taken literally). -+ */ -+ public StringMatcher(String pattern, boolean ignoreCase, -+ boolean ignoreWildCards) { -+ if (pattern == null) { -+ throw new IllegalArgumentException(); -+ } -+ fIgnoreCase = ignoreCase; -+ fIgnoreWildCards = ignoreWildCards; -+ fPattern = pattern; -+ fLength = pattern.length(); ++ protected abstract void cleanup(); + -+ if (fIgnoreWildCards) { -+ parseNoWildCards(); -+ } else { -+ parseWildCards(); -+ } ++ private void deregister() { ++ // remove it from the friend connection ++ friendConnection.deregisterOverlayTransport(this); ++ cleanup(); + } + + /** -+ * Find the first occurrence of the pattern between startend(exclusive). -+ * @param text the String object to search in -+ * @param start the starting index of the search range, inclusive -+ * @param end the ending index of the search range, exclusive -+ * @return an StringMatcher.Position object that keeps the starting -+ * (inclusive) and ending positions (exclusive) of the first occurrence of the -+ * pattern in the specified range of the text; return null if not found or subtext -+ * is empty (start==end). A pair of zeros is returned if pattern is empty string -+ * Note that for pattern like ""*abc*"" with leading and trailing stars, position of ""abc"" -+ * is returned. For a pattern like""*??*"" in text ""abcdf"", (1,3) is returned ++ * This method is called ""from above"", when the peer connection is ++ * terminated, send a reset to other side + */ -+ public StringMatcher.Position find(String text, int start, int end) { -+ if (text == null) { -+ throw new IllegalArgumentException(); -+ } -+ -+ int tlen = text.length(); -+ if (start < 0) { -+ start = 0; -+ } -+ if (end > tlen) { -+ end = tlen; -+ } -+ if (end < 0 || start >= end) { -+ return null; -+ } -+ if (fLength == 0) { -+ return new Position(start, start); -+ } -+ if (fIgnoreWildCards) { -+ int x = posIn(text, start, end); -+ if (x < 0) { -+ return null; -+ } -+ return new Position(x, x + fLength); -+ } -+ -+ int segCount = fSegments.length; -+ if (segCount == 0) { -+ return new Position(start, end); -+ } ++ public void close(String reason) { ++ if (!closed) { ++ closeReason = ""peer - "" + reason; ++ logger.fine(getDescription() + "": OverlayTransport closed, reason:"" + closeReason); + -+ int curPos = start; -+ int matchStart = -1; -+ int i; -+ for (i = 0; i < segCount && curPos < end; ++i) { -+ String current = fSegments[i]; -+ int nextMatch = regExpPosIn(text, curPos, end, current); -+ if (nextMatch < 0) { -+ return null; -+ } -+ if (i == 0) { -+ matchStart = nextMatch; -+ } -+ curPos = nextMatch + current.length(); ++ closed = true; ++ this.sendReset(); + } -+ if (i < segCount) { -+ return null; -+ } -+ return new Position(matchStart, curPos); -+ } ++ // we don't expect anyone to read whatever we have left in the buffer ++ this.destroyBufferedMessages(); + -+ /** -+ * match the given text with the pattern -+ * @return true if matched otherwise false -+ * @param text a String object -+ */ -+ public boolean match(String text) { -+ if(text == null) { -+ return false; -+ } -+ return match(text, 0, text.length()); ++ deregister(); + } + + /** -+ * Given the starting (inclusive) and the ending (exclusive) positions in the -+ * text, determine if the given substring matches with aPattern -+ * @return true if the specified portion of the text matches the pattern -+ * @param text a String object that contains the substring to match -+ * @param start marks the starting position (inclusive) of the substring -+ * @param end marks the ending index (exclusive) of the substring ++ * this method is called from below when a reset is received ++ * ++ * @param reason + */ -+ public boolean match(String text, int start, int end) { -+ if (null == text) { -+ throw new IllegalArgumentException(); -+ } -+ -+ if (start > end) { -+ return false; -+ } -+ -+ if (fIgnoreWildCards) { -+ return (end - start == fLength) -+ && fPattern.regionMatches(fIgnoreCase, 0, text, start, -+ fLength); -+ } -+ int segCount = fSegments.length; -+ if (segCount == 0 && (fHasLeadingStar || fHasTrailingStar)) { -+ return true; -+ } -+ if (start == end) { -+ return fLength == 0; -+ } -+ if (fLength == 0) { -+ return start == end; -+ } -+ -+ int tlen = text.length(); -+ if (start < 0) { -+ start = 0; -+ } -+ if (end > tlen) { -+ end = tlen; -+ } -+ -+ int tCurPos = start; -+ int bound = end - fBound; -+ if (bound < 0) { -+ return false; -+ } -+ int i = 0; -+ String current = fSegments[i]; -+ int segLength = current.length(); -+ -+ /* process first segment */ -+ if (!fHasLeadingStar) { -+ if (!regExpRegionMatches(text, start, current, 0, segLength)) { -+ return false; -+ } else { -+ ++i; -+ tCurPos = tCurPos + segLength; -+ } -+ } -+ if ((fSegments.length == 1) && (!fHasLeadingStar) -+ && (!fHasTrailingStar)) { -+ // only one segment to match, no wildcards specified -+ return tCurPos == end; -+ } -+ /* process middle segments */ -+ while (i < segCount) { -+ current = fSegments[i]; -+ int currentMatch; -+ int k = current.indexOf(fSingleWildCard); -+ if (k < 0) { -+ currentMatch = textPosIn(text, tCurPos, end, current); -+ if (currentMatch < 0) { -+ return false; -+ } -+ } else { -+ currentMatch = regExpPosIn(text, tCurPos, end, current); -+ if (currentMatch < 0) { -+ return false; -+ } ++ public void closeChannelReset() { ++ if (sentReset) { ++ // ok, this is the response to our previous close ++ deregister(); ++ } else { ++ if (!closed) { ++ closeReason = ""remote host closed overlay channel""; ++ logger.fine(getDescription() + "": OverlayTransport closed, reason:"" + closeReason); ++ // this is the remote side saying that the connection is closed ++ // send a reset back to confirm ++ closed = true; ++ sendReset(); + } -+ tCurPos = currentMatch + current.length(); -+ i++; -+ } -+ -+ /* process final segment */ -+ if (!fHasTrailingStar && tCurPos != end) { -+ int clen = current.length(); -+ return regExpRegionMatches(text, end - clen, current, 0, clen); + } -+ return i == segCount; -+ } -+ -+ /** -+ * This method parses the given pattern into segments seperated by wildcard '*' characters. -+ * Since wildcards are not being used in this case, the pattern consists of a single segment. -+ */ -+ private void parseNoWildCards() { -+ fSegments = new String[1]; -+ fSegments[0] = fPattern; -+ fBound = fLength; + } + + /** -+ * Parses the given pattern into segments seperated by wildcard '*' characters. -+ * @param p, a String object that is a simple regular expression with '*' and/or '?' ++ * this method is called from below if the friend connection dies ++ * ++ * @param reason + */ -+ private void parseWildCards() { -+ if (fPattern.startsWith(""*"")) { //$NON-NLS-1$ -+ fHasLeadingStar = true; -+ } -+ if (fPattern.endsWith(""*"")) {//$NON-NLS-1$ -+ /* make sure it's not an escaped wildcard */ -+ if (fLength > 1 && fPattern.charAt(fLength - 2) != '\\') { -+ fHasTrailingStar = true; -+ } -+ } -+ -+ Vector temp = new Vector(); -+ -+ int pos = 0; -+ StringBuffer buf = new StringBuffer(); -+ while (pos < fLength) { -+ char c = fPattern.charAt(pos++); -+ switch (c) { -+ case '\\': -+ if (pos >= fLength) { -+ buf.append(c); -+ } else { -+ char next = fPattern.charAt(pos++); -+ /* if it's an escape sequence */ -+ if (next == '*' || next == '?' || next == '\\') { -+ buf.append(next); -+ } else { -+ /* not an escape sequence, just insert literally */ -+ buf.append(c); -+ buf.append(next); -+ } -+ } -+ break; -+ case '*': -+ if (buf.length() > 0) { -+ /* new segment */ -+ temp.addElement(buf.toString()); -+ fBound += buf.length(); -+ buf.setLength(0); -+ } -+ break; -+ case '?': -+ /* append special character representing single match wildcard */ -+ buf.append(fSingleWildCard); -+ break; -+ default: -+ buf.append(c); -+ } -+ } -+ -+ /* add last buffer to segment list */ -+ if (buf.length() > 0) { -+ temp.addElement(buf.toString()); -+ fBound += buf.length(); -+ } ++ public void closeConnectionClosed(String reason) { ++ closeReason = reason; ++ logger.fine(getDescription() + "": OverlayTransport closed, reason:"" + closeReason); + -+ fSegments = new String[temp.size()]; -+ temp.copyInto(fSegments); ++ closed = true; ++ deregister(); + } + -+ /** -+ * @param text a string which contains no wildcard -+ * @param start the starting index in the text for search, inclusive -+ * @param end the stopping point of search, exclusive -+ * @return the starting index in the text of the pattern , or -1 if not found -+ */ -+ protected int posIn(String text, int start, int end) {//no wild card in pattern -+ int max = end - fLength; -+ -+ if (!fIgnoreCase) { -+ int i = text.indexOf(fPattern, start); -+ if (i == -1 || i > max) { -+ return -1; -+ } -+ return i; -+ } -+ -+ for (int i = start; i <= max; ++i) { -+ if (text.regionMatches(true, i, fPattern, 0, fLength)) { -+ return i; -+ } -+ } ++ protected abstract void destroyBufferedMessages(); + -+ return -1; ++ public long getAge() { ++ return System.currentTimeMillis() - startTime; + } + -+ /** -+ * @param text a simple regular expression that may only contain '?'(s) -+ * @param start the starting index in the text for search, inclusive -+ * @param end the stopping point of search, exclusive -+ * @param p a simple regular expression that may contains '?' -+ * @return the starting index in the text of the pattern , or -1 if not found -+ */ -+ protected int regExpPosIn(String text, int start, int end, String p) { -+ int plen = p.length(); -+ -+ int max = end - plen; -+ for (int i = start; i <= max; ++i) { -+ if (regExpRegionMatches(text, i, p, 0, plen)) { -+ return i; -+ } -+ } -+ return -1; ++ public long getArtificialDelay() { ++ return overlayDelayMs; + } + -+ /** -+ * -+ * @return boolean -+ * @param text a String to match -+ * @param start int that indicates the starting index of match, inclusive -+ * @param end int that indicates the ending index of match, exclusive -+ * @param p String, String, a simple regular expression that may contain '?' -+ * @param ignoreCase boolean indicating wether code>p is case sensitive -+ */ -+ protected boolean regExpRegionMatches(String text, int tStart, String p, -+ int pStart, int plen) { -+ while (plen-- > 0) { -+ char tchar = text.charAt(tStart++); -+ char pchar = p.charAt(pStart++); -+ -+ /* process wild cards */ -+ if (!fIgnoreWildCards) { -+ /* skip single wild cards */ -+ if (pchar == fSingleWildCard) { -+ continue; -+ } -+ } -+ if (pchar == tchar) { -+ continue; -+ } -+ if (fIgnoreCase) { -+ if (Character.toUpperCase(tchar) == Character -+ .toUpperCase(pchar)) { -+ continue; -+ } -+ // comparing after converting to upper case doesn't handle all cases; -+ // also compare after converting to lower case -+ if (Character.toLowerCase(tchar) == Character -+ .toLowerCase(pchar)) { -+ continue; -+ } -+ } -+ return false; -+ } -+ return true; ++ public long getBytesIn() { ++ return bytesIn; + } + -+ /** -+ * @param text the string to match -+ * @param start the starting index in the text for search, inclusive -+ * @param end the stopping point of search, exclusive -+ * @param p a pattern string that has no wildcard -+ * @return the starting index in the text of the pattern , or -1 if not found -+ */ -+ protected int textPosIn(String text, int start, int end, String p) { -+ -+ int plen = p.length(); -+ int max = end - plen; -+ -+ if (!fIgnoreCase) { -+ int i = text.indexOf(p, start); -+ if (i == -1 || i > max) { -+ return -1; -+ } -+ return i; -+ } -+ -+ for (int i = start; i <= max; ++i) { -+ if (text.regionMatches(true, i, p, 0, plen)) { -+ return i; -+ } -+ } -+ -+ return -1; ++ public long getBytesOut() { ++ return bytesOut; + } -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/listener/IResourceSelectionListener.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/listener/IResourceSelectionListener.java -new file mode 100644 -index 00000000..7cb6f96d ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/listener/IResourceSelectionListener.java -@@ -0,0 +1,9 @@ -+package org.eclipselabs.tapiji.tools.core.ui.widgets.listener; -+ -+import org.eclipselabs.tapiji.tools.core.ui.widgets.event.ResourceSelectionEvent; -+ -+public interface IResourceSelectionListener { -+ -+ public void selectionChanged (ResourceSelectionEvent e); -+ -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/provider/KeyTreeLabelProvider.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/provider/KeyTreeLabelProvider.java -new file mode 100644 -index 00000000..62d1aa3b ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/provider/KeyTreeLabelProvider.java -@@ -0,0 +1,166 @@ -+ /* -++ * Copyright (C) 2003, 2004 Pascal Essiembre, Essiembre Consultant Inc. -+ * -+ * This file is part of Essiembre ResourceBundle Editor. -+ * -+ * Essiembre ResourceBundle Editor is free software; you can redistribute it -+ * and/or modify it under the terms of the GNU Lesser General Public -+ * License as published by the Free Software Foundation; either -+ * version 2.1 of the License, or (at your option) any later version. -+ * -+ * Essiembre ResourceBundle Editor is distributed in the hope that it will be -+ * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ * Lesser General Public License for more details. -+ * -+ * You should have received a copy of the GNU Lesser General Public -+ * License along with Essiembre ResourceBundle Editor; if not, write to the -+ * Free Software Foundation, Inc., 59 Temple Place, Suite 330, -+ * Boston, MA 02111-1307 USA -+ */ -+package org.eclipselabs.tapiji.tools.core.ui.widgets.provider; + -+import org.eclipse.jface.resource.ImageRegistry; -+import org.eclipse.jface.viewers.ILabelProvider; -+import org.eclipse.jface.viewers.StyledCellLabelProvider; -+import org.eclipse.jface.viewers.ViewerCell; -+import org.eclipse.swt.SWT; -+import org.eclipse.swt.graphics.Color; -+import org.eclipse.swt.graphics.Font; -+import org.eclipse.swt.graphics.Image; -+import org.eclipselabs.tapiji.tools.core.Activator; -+import org.eclipselabs.tapiji.tools.core.util.FontUtils; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IKeyTreeNode; -+ -+/** -+ * Label provider for key tree viewer. -+ * @author Pascal Essiembre (essiembre@users.sourceforge.net) -+ * @version $Author: nl_carnage $ $Revision: 1.11 $ $Date: 2007/09/11 16:11:09 $ -+ */ -+public class KeyTreeLabelProvider -+ extends StyledCellLabelProvider /*implements IFontProvider, IColorProvider*/ { -+ -+ private static final int KEY_DEFAULT = 1 << 1; -+ private static final int KEY_COMMENTED = 1 << 2; -+ private static final int KEY_NOT = 1 << 3; -+ private static final int WARNING = 1 << 4; -+ private static final int WARNING_GREY = 1 << 5; -+ -+ /** Registry instead of UIUtils one for image not keyed by file name. */ -+ private static ImageRegistry imageRegistry = new ImageRegistry(); -+ -+ private Color commentedColor = FontUtils.getSystemColor(SWT.COLOR_GRAY); -+ -+ /** Group font. */ -+ private Font groupFontKey = FontUtils.createFont(SWT.BOLD); -+ private Font groupFontNoKey = FontUtils.createFont(SWT.BOLD | SWT.ITALIC); -+ -+ -+ /** -+ * @see ILabelProvider#getImage(Object) -+ */ -+ public Image getImage(Object element) { -+ IKeyTreeNode treeItem = ((IKeyTreeNode) element); -+ -+ int iconFlags = 0; ++ public int getChannelId() { ++ return channelId; ++ } + -+ // Figure out background icon -+ if (treeItem.getMessagesBundleGroup() != null && -+ treeItem.getMessagesBundleGroup().isKey(treeItem.getMessageKey())) { -+ iconFlags += KEY_DEFAULT; -+ } else { -+ iconFlags += KEY_NOT; ++ public String getDescription() { ++ if (desc == null) { ++ desc = NetworkManager.OSF2F_TRANSPORT_PREFIX + "": "" ++ + friendConnection.getRemoteFriend().getNick() + "":"" ++ + Integer.toHexString(channelId); + } -+ -+ return generateImage(iconFlags); ++ return desc; + } + -+ /** -+ * @see ILabelProvider#getText(Object) -+ */ -+ public String getText(Object element) { -+ return ((IKeyTreeNode) element).getName(); ++ public int getDownloadRate() { ++ return (int) downloadRateAverage.getAverage(); + } + -+ /** -+ * @see org.eclipse.jface.viewers.IBaseLabelProvider#dispose() -+ */ -+ public void dispose() { -+ groupFontKey.dispose(); -+ groupFontNoKey.dispose(); ++ public long getLastMsgTime() { ++ return System.currentTimeMillis() - lastMsgTime; + } + -+ /** -+ * @see org.eclipse.jface.viewers.IFontProvider#getFont(java.lang.Object) -+ */ -+ public Font getFont(Object element) { -+ IKeyTreeNode item = (IKeyTreeNode) element; -+ if (item.getChildren().length > 0 && item.getMessagesBundleGroup() != null) { -+ if (item.getMessagesBundleGroup().isKey(item.getMessageKey())) { -+ return groupFontKey; -+ } -+ return groupFontNoKey; -+ } -+ return null; ++ public int getPathID() { ++ return pathID; + } + -+ /** -+ * @see org.eclipse.jface.viewers.IColorProvider#getForeground(java.lang.Object) -+ */ -+ public Color getForeground(Object element) { -+ IKeyTreeNode treeItem = (IKeyTreeNode) element; -+ return null; ++ public Friend getRemoteFriend() { ++ return friendConnection.getRemoteFriend(); + } + -+ /** -+ * @see org.eclipse.jface.viewers.IColorProvider#getBackground(java.lang.Object) -+ */ -+ public Color getBackground(Object element) { -+ // TODO Auto-generated method stub -+ return null; -+ } -+ -+ /** -+ * Generates an image based on icon flags. -+ * @param iconFlags -+ * @return generated image -+ */ -+ private Image generateImage(int iconFlags) { -+ Image image = imageRegistry.get("""" + iconFlags); //$NON-NLS-1$ -+ if (image == null) { -+ // Figure background image -+ if ((iconFlags & KEY_COMMENTED) != 0) { -+ image = getRegistryImage(""keyCommented.gif""); //$NON-NLS-1$ -+ } else if ((iconFlags & KEY_NOT) != 0) { -+ image = getRegistryImage(""key.gif""); //$NON-NLS-1$ -+ } else { -+ image = getRegistryImage(""key.gif""); //$NON-NLS-1$ -+ } -+ -+ } -+ return image; ++ public String getRemoteIP() { ++ return friendConnection.getRemoteIp().getHostAddress(); + } + -+ -+ private Image getRegistryImage(String imageName) { -+ Image image = imageRegistry.get(imageName); -+ if (image == null) { -+ image = Activator.getImageDescriptor(imageName).createImage(); -+ imageRegistry.put(imageName, image); -+ } -+ return image; ++ public int getUploadRate() { ++ return (int) uploadRateAverage.getAverage(); + } + -+ @Override -+ public void update(ViewerCell cell) { -+ cell.setBackground(getBackground(cell.getElement())); -+ cell.setFont(getFont(cell.getElement())); -+ cell.setForeground(getForeground(cell.getElement())); -+ -+ cell.setText(getText(cell.getElement())); -+ cell.setImage(getImage(cell.getElement())); -+ super.update(cell); -+ } -+ -+ -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/provider/LightKeyTreeLabelProvider.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/provider/LightKeyTreeLabelProvider.java -new file mode 100644 -index 00000000..b718f1c7 ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/provider/LightKeyTreeLabelProvider.java -@@ -0,0 +1,16 @@ -+package org.eclipselabs.tapiji.tools.core.ui.widgets.provider; -+ -+import org.eclipse.jface.viewers.ITableLabelProvider; -+import org.eclipse.swt.graphics.Image; -+ -+public class LightKeyTreeLabelProvider extends KeyTreeLabelProvider implements ITableLabelProvider { -+ @Override -+ public Image getColumnImage(Object element, int columnIndex) { -+ return null; -+ } -+ -+ @Override -+ public String getColumnText(Object element, int columnIndex) { -+ return super.getText(element); -+ } -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/provider/ResKeyTreeContentProvider.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/provider/ResKeyTreeContentProvider.java -new file mode 100644 -index 00000000..1d8ac669 ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/provider/ResKeyTreeContentProvider.java -@@ -0,0 +1,246 @@ -+package org.eclipselabs.tapiji.tools.core.ui.widgets.provider; -+ -+import java.util.ArrayList; -+import java.util.Collection; -+import java.util.List; -+import java.util.Locale; -+ -+import org.eclipse.babel.editor.api.KeyTreeFactory; -+import org.eclipse.jface.viewers.IStructuredSelection; -+import org.eclipse.jface.viewers.ITreeContentProvider; -+import org.eclipse.jface.viewers.TreeViewer; -+import org.eclipse.jface.viewers.Viewer; -+import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IAbstractKeyTreeModel; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IKeyTreeNode; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IKeyTreeVisitor; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IMessage; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IMessagesBundleGroup; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IValuedKeyTreeNode; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.TreeType; -+ -+ -+ -+public class ResKeyTreeContentProvider implements ITreeContentProvider { -+ -+ private IAbstractKeyTreeModel keyTreeModel; -+ private Viewer viewer; -+ -+ private TreeType treeType = TreeType.Tree; -+ -+ /** Represents empty objects. */ -+ private static Object[] EMPTY_ARRAY = new Object[0]; -+ /** Viewer this provided act upon. */ -+ protected TreeViewer treeViewer; -+ -+ private IMessagesBundleGroup bundle; -+ private List locales; -+ private ResourceBundleManager manager; -+ private String bundleId; -+ -+ -+ public ResKeyTreeContentProvider (IMessagesBundleGroup iBundleGroup, List locales, -+ ResourceBundleManager manager, String bundleId, TreeType treeType) { -+ this.bundle = iBundleGroup; -+ this.locales = locales; -+ this.manager = manager; -+ this.bundleId = bundleId; -+ this.treeType = treeType; -+ } -+ -+ public void setBundleGroup (IMessagesBundleGroup iBundleGroup) { -+ this.bundle = iBundleGroup; -+ } -+ -+ public ResKeyTreeContentProvider() { -+ locales = new ArrayList(); -+ } -+ -+ public void setLocales (List locales) { -+ this.locales = locales; -+ } -+ -+ @Override -+ public Object[] getChildren(Object parentElement) { -+ // brauche sie als VKTI -+// if(parentElement instanceof IAbstractKeyTreeModel) { -+// IAbstractKeyTreeModel model = (IAbstractKeyTreeModel) parentElement; -+// return convertKTItoVKTI(model.getRootNodes()); -+// } else if (parentElement instanceof IValuedKeyTreeNode) { // convert because we hold the children as IKeyTreeNodes -+// return convertKTItoVKTI(((IValuedKeyTreeNode) parentElement).getChildren()); -+// } -+ //new code -+ IKeyTreeNode parentNode = (IKeyTreeNode) parentElement; -+ switch (treeType) { -+ case Tree: -+ return convertKTItoVKTI(keyTreeModel.getChildren(parentNode)); -+ case Flat: -+ return new IKeyTreeNode[0]; -+ default: -+ // Should not happen -+ return new IKeyTreeNode[0]; -+ } -+ //new code -+// return EMPTY_ARRAY; -+ } -+ -+ protected Object[] convertKTItoVKTI (Object[] children) { -+ Collection items = new ArrayList(); -+ -+ for (Object o : children) { -+ if (o instanceof IValuedKeyTreeNode) -+ items.add((IValuedKeyTreeNode)o); -+ else { -+ IKeyTreeNode kti = (IKeyTreeNode) o; -+ IValuedKeyTreeNode vkti = KeyTreeFactory.createKeyTree(kti.getParent(), kti.getName(), kti.getMessageKey(), bundle); -+ -+ for (IKeyTreeNode k : kti.getChildren()) { -+ vkti.addChild(k); -+ } -+ -+ // init translations -+ for (Locale l : locales) { -+ try { -+ IMessage message = bundle.getMessagesBundle(l).getMessage(kti.getMessageKey()); -+ if (message != null) { -+ vkti.addValue(l, message.getValue()); -+ } -+ } catch (Exception e) {} -+ } -+ items.add(vkti); -+ } -+ } -+ -+ return items.toArray(); -+ } -+ -+ @Override -+ public Object[] getElements(Object inputElement) { -+// return getChildren(inputElement); -+ switch (treeType) { -+ case Tree: -+ return convertKTItoVKTI(keyTreeModel.getRootNodes()); -+ case Flat: -+ final Collection actualKeys = new ArrayList(); -+ IKeyTreeVisitor visitor = new IKeyTreeVisitor() { -+ public void visitKeyTreeNode(IKeyTreeNode node) { -+ if (node.isUsedAsKey()) { -+ actualKeys.add(node); -+ } -+ } -+ }; -+ keyTreeModel.accept(visitor, keyTreeModel.getRootNode()); -+ -+ return actualKeys.toArray(); -+ default: -+ // Should not happen -+ return new IKeyTreeNode[0]; -+ } -+ } ++ protected abstract void handleDelayedOverlayMessage(final OSF2FChannelDataMsg msg); + -+ @Override -+ public Object getParent(Object element) { -+// Object[] parent = new Object[1]; -+// -+// if(element instanceof IKeyTreeNode) { -+// return ((IKeyTreeNode) element).getParent(); -+// } -+// -+// if (parent[0] == null) -+// return null; -+// -+// Object[] result = convertKTItoVKTI(parent); -+// if (result.length > 0) -+// return result[0]; -+// else -+// return null; -+ -+ // new code -+ IKeyTreeNode node = (IKeyTreeNode) element; -+ switch (treeType) { -+ case Tree: -+ return keyTreeModel.getParent(node); -+ case Flat: -+ return keyTreeModel; -+ default: -+ // Should not happen -+ return null; -+ } -+ // new code -+ } -+ -+ /** -+ * @see ITreeContentProvider#hasChildren(Object) -+ */ -+ public boolean hasChildren(Object element) { -+// return countChildren(element) > 0; -+ -+ // new code -+ switch (treeType) { -+ case Tree: -+ return keyTreeModel.getChildren((IKeyTreeNode) element).length > 0; -+ case Flat: -+ return false; -+ default: -+ // Should not happen -+ return false; -+ } -+ // new code -+ } -+ -+ public int countChildren(Object element) { -+ -+ if (element instanceof IKeyTreeNode) { -+ return ((IKeyTreeNode)element).getChildren().length; -+ } else if (element instanceof IValuedKeyTreeNode) { -+ return ((IValuedKeyTreeNode)element).getChildren().length; -+ } else { -+ System.out.println(""wait a minute""); -+ return 1; ++ public void incomingOverlayMsg(final OSF2FChannelDataMsg msg) { ++ lastMsgTime = System.currentTimeMillis(); ++ if (closed) { ++ return; + } -+ } -+ -+ /** -+ * Gets the selected key tree item. -+ * @return key tree item -+ */ -+ private IKeyTreeNode getTreeSelection() { -+ IStructuredSelection selection = (IStructuredSelection) treeViewer.getSelection(); -+ return ((IKeyTreeNode) selection.getFirstElement()); ++ delayedOverlayMessageTimer.queue(overlayDelayMs, INCOMING_MESSAGE_DELAY_SLACK, ++ new TimerTask() { ++ @Override ++ public void run() { ++ handleDelayedOverlayMessage(msg); ++ } ++ }); + } + -+ @Override -+ public void inputChanged(Viewer viewer, Object oldInput, Object newInput) { -+ this.viewer = (TreeViewer) viewer; -+ this.keyTreeModel = (IAbstractKeyTreeModel) newInput; ++ public boolean isLANLocal() { ++ return friendConnection.getNetworkConnection().isLANLocal(); + } -+ -+ public IMessagesBundleGroup getBundle() { -+ return bundle; -+ } -+ -+ public ResourceBundleManager getManager() { -+ return manager; -+ } -+ -+ public String getBundleId() { -+ return bundleId; -+ } + -+ @Override -+ public void dispose() { -+ // TODO Auto-generated method stub -+ -+ } -+ -+ public TreeType getTreeType() { -+ return treeType; ++ public boolean isStarted() { ++ return started; + } + -+ public void setTreeType(TreeType treeType) { -+ if (this.treeType != treeType) { -+ this.treeType = treeType; -+ viewer.refresh(); -+ } ++ public boolean isTimedOut() { ++ return System.currentTimeMillis() - lastMsgTime > TIMEOUT; + } -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/provider/ResKeyTreeLabelProvider.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/provider/ResKeyTreeLabelProvider.java -new file mode 100644 -index 00000000..8ad7424d ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/provider/ResKeyTreeLabelProvider.java -@@ -0,0 +1,152 @@ -+package org.eclipselabs.tapiji.tools.core.ui.widgets.provider; -+ -+import java.util.ArrayList; -+import java.util.List; -+import java.util.Locale; -+ -+import org.eclipse.jface.text.Region; -+import org.eclipse.jface.viewers.ViewerCell; -+import org.eclipse.swt.SWT; -+import org.eclipse.swt.custom.StyleRange; -+import org.eclipse.swt.graphics.Color; -+import org.eclipse.swt.graphics.Font; -+import org.eclipse.swt.graphics.Image; -+import org.eclipselabs.tapiji.tools.core.ui.widgets.filter.FilterInfo; -+import org.eclipselabs.tapiji.tools.core.util.FontUtils; -+import org.eclipselabs.tapiji.tools.core.util.ImageUtils; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IKeyTreeNode; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IMessage; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IValuedKeyTreeNode; -+ -+ -+public class ResKeyTreeLabelProvider extends KeyTreeLabelProvider { -+ -+ private List locales; -+ private boolean searchEnabled = false; -+ -+ /*** COLORS ***/ -+ private Color gray = FontUtils.getSystemColor(SWT.COLOR_GRAY); -+ private Color black = FontUtils.getSystemColor(SWT.COLOR_BLACK); -+ private Color info_color = FontUtils.getSystemColor(SWT.COLOR_YELLOW); -+ -+ /*** FONTS ***/ -+ private Font bold = FontUtils.createFont(SWT.BOLD); -+ private Font bold_italic = FontUtils.createFont(SWT.BOLD | SWT.ITALIC); -+ -+ public ResKeyTreeLabelProvider (List locales) { -+ this.locales = locales; -+ } -+ -+ //@Override -+ public Image getColumnImage(Object element, int columnIndex) { -+ if (columnIndex == 0) { -+ IKeyTreeNode kti = (IKeyTreeNode) element; -+ IMessage[] be = kti.getMessagesBundleGroup().getMessages(kti.getMessageKey()); -+ boolean incomplete = false; -+ -+ if (be.length != kti.getMessagesBundleGroup().getMessagesBundleCount()) -+ incomplete = true; -+ else { -+ for (IMessage b : be) { -+ if (b.getValue() == null || b.getValue().trim().length() == 0) { -+ incomplete = true; -+ break; -+ } -+ } -+ } -+ -+ if (incomplete) -+ return ImageUtils.getImage(ImageUtils.ICON_RESOURCE_INCOMPLETE); -+ else -+ return ImageUtils.getImage(ImageUtils.ICON_RESOURCE); -+ } -+ return null; -+ } -+ -+ //@Override -+ public String getColumnText(Object element, int columnIndex) { -+ if (columnIndex == 0) -+ return super.getText(element); -+ -+ if (columnIndex <= locales.size()) { -+ IValuedKeyTreeNode item = (IValuedKeyTreeNode) element; -+ String entry = item.getValue(locales.get(columnIndex-1)); -+ if (entry != null) -+ return entry; -+ } -+ return """"; -+ } -+ -+ public void setSearchEnabled (boolean enabled) { -+ this.searchEnabled = enabled; -+ } -+ -+ public boolean isSearchEnabled () { -+ return this.searchEnabled; -+ } + -+ public void setLocales(List visibleLocales) { -+ locales = visibleLocales; -+ } -+ -+ protected boolean isMatchingToPattern (Object element, int columnIndex) { -+ boolean matching = false; -+ -+ if (element instanceof IValuedKeyTreeNode) { -+ IValuedKeyTreeNode vkti = (IValuedKeyTreeNode) element; -+ -+ if (vkti.getInfo() == null) -+ return false; -+ -+ FilterInfo filterInfo = (FilterInfo) vkti.getInfo(); -+ -+ if (columnIndex == 0) { -+ matching = filterInfo.isFoundInKey(); -+ } else { -+ matching = filterInfo.hasFoundInLocale(locales.get(columnIndex-1)); -+ } -+ } -+ -+ return matching; -+ } ++ private void sendReset() { ++ sentReset = true; ++ friendConnection.sendChannelRst(new OSF2FChannelReset(OSF2FChannelReset.CURRENT_VERSION, ++ channelId)); ++ } + -+ protected boolean isSearchEnabled (Object element) { -+ return (element instanceof IValuedKeyTreeNode && searchEnabled ); -+ } -+ -+ @Override -+ public void update(ViewerCell cell) { -+ Object element = cell.getElement(); -+ int columnIndex = cell.getColumnIndex(); -+ -+ if (isSearchEnabled(element)) { -+ if (isMatchingToPattern(element, columnIndex) ) { -+ List styleRanges = new ArrayList(); -+ FilterInfo filterInfo = (FilterInfo) ((IValuedKeyTreeNode)element).getInfo(); -+ -+ if (columnIndex > 0) { -+ for (Region reg : filterInfo.getFoundInLocaleRanges(locales.get(columnIndex-1))) { -+ styleRanges.add(new StyleRange(reg.getOffset(), reg.getLength(), black, info_color, SWT.BOLD)); -+ } -+ } else { -+ // check if the pattern has been found within the key section -+ if (filterInfo.isFoundInKey()) { -+ for (Region reg : filterInfo.getKeyOccurrences()) { -+ StyleRange sr = new StyleRange(reg.getOffset(), reg.getLength(), black, info_color, SWT.BOLD); -+ styleRanges.add(sr); -+ } -+ } -+ } -+ cell.setStyleRanges(styleRanges.toArray(new StyleRange[styleRanges.size()])); -+ } else { -+ cell.setForeground(gray); -+ } -+ } else if (columnIndex == 0) -+ super.update(cell); -+ -+ cell.setImage(this.getColumnImage(element, columnIndex)); -+ cell.setText(this.getColumnText(element, columnIndex)); -+ } -+ ++ abstract void start(); + ++ protected long writeMessageToFriendConnection(DirectByteBuffer msgBuffer) { ++ OSF2FChannelDataMsg msg = new OSF2FChannelDataMsg(OSF2FMessage.CURRENT_VERSION, channelId, ++ msgBuffer); ++ long totalWritten = msgBuffer.remaining(DirectByteBuffer.SS_MSG); ++ msg.setForward(false); ++ friendConnection.sendChannelMsg(msg, true); ++ return totalWritten; ++ } +} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/provider/ValueKeyTreeLabelProvider.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/provider/ValueKeyTreeLabelProvider.java -new file mode 100644 -index 00000000..1241bab4 ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/provider/ValueKeyTreeLabelProvider.java -@@ -0,0 +1,69 @@ -+package org.eclipselabs.tapiji.tools.core.ui.widgets.provider; -+ -+import org.eclipse.jface.viewers.ITableColorProvider; -+import org.eclipse.jface.viewers.ITableFontProvider; -+import org.eclipse.jface.viewers.ViewerCell; -+import org.eclipse.swt.graphics.Color; -+import org.eclipse.swt.graphics.Font; -+import org.eclipse.swt.graphics.Image; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IKeyTreeNode; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IMessage; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IMessagesBundle; -+ -+ -+public class ValueKeyTreeLabelProvider extends KeyTreeLabelProvider implements -+ ITableColorProvider, ITableFontProvider { -+ -+ private IMessagesBundle locale; -+ -+ public ValueKeyTreeLabelProvider(IMessagesBundle iBundle) { -+ this.locale = iBundle; -+ } -+ -+ //@Override -+ public Image getColumnImage(Object element, int columnIndex) { -+ return null; -+ } +diff --git a/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/OverlayManager.java b/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/OverlayManager.java +index f933377c..c6505ad3 100644 +--- a/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/OverlayManager.java ++++ b/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/OverlayManager.java +@@ -56,837 +56,902 @@ + + public class OverlayManager { + +- /** +- * make sure to not call any az functions when holding this lock +- */ +- public static BigFatLock lock = BigFatLock.getInstance(false); +- +- // private Friend me; +- public static boolean logToStdOut = false; +- +- private int mMIN_DELAY_LINK_LATENCY = COConfigurationManager.getIntParameter(""f2f_search_emulate_hops_min"") * COConfigurationManager.getIntParameter(""f2f_overlay_emulate_link_latency_max""); +- private int mMAX_DELAY_LINK_LATENCY = COConfigurationManager.getIntParameter(""f2f_search_emulate_hops_max"") * COConfigurationManager.getIntParameter(""f2f_overlay_emulate_link_latency_max""); +- private int mMIN_RESPONSE_DELAY = COConfigurationManager.getIntParameter(""f2f_search_emulate_hops_min"") * COConfigurationManager.getIntParameter(""f2f_search_forward_delay""); +- private int mMAX_RESPONSE_DELAY = COConfigurationManager.getIntParameter(""f2f_search_emulate_hops_max"") * COConfigurationManager.getIntParameter(""f2f_search_forward_delay""); +- +- private double mForwardSearchProbability = COConfigurationManager.getFloatParameter(""f2f_forward_search_probability""); +- +- // this is just a way to treat requests for the local file list a bit +- // differently +- public final static int OWN_CONNECTION_ID_MAGIC_NUMBER = 0; +- +- private final static String RESPONSE_DELAY_SEED_SETTING_KEY = ""response_delay_seed""; +- +- private static final int TIMEOUT_CHECK_PERIOD = 5 * 1000; +- private final ConcurrentHashMap connections; +- private final FileListManager filelistManager; +- private final LinkedList friendConnectListeners = new LinkedList(); +- +- private final FriendManager friendManager; +- private long lastConnectionCheckRun = System.currentTimeMillis(); +- +- private final AZInstance myInstance; +- private final PublicKey ownPublicKey; +- private final QueueManager queueManager = new QueueManager(); +- private final RandomnessManager randomnessManager; +- private final RandomnessManager responseDelayRandomnesManager; +- private final SearchManager searchManager; +- +- public RotatingLogger searchTimingsLogger = new RotatingLogger(""search_timing""); +- +- private final GlobalManagerStats stats; +- +- private boolean stopped = false; +- private final Timer t = new Timer(""FriendConnectionInitialChecker"", true); +- +- public OverlayManager(FriendManager _friendManager, PublicKey _ownPublicKey, FileListManager _fileListManager, GlobalManagerStats _stats) { +- stats = _stats; +- myInstance = AzureusCoreImpl.getSingleton().getInstanceManager().getMyInstance(); +- +- this.randomnessManager = new RandomnessManager(); +- this.friendManager = _friendManager; +- this.filelistManager = _fileListManager; +- this.ownPublicKey = _ownPublicKey; +- this.connections = new ConcurrentHashMap(); +- this.searchManager = new SearchManager(this, filelistManager, randomnessManager, stats); +- +- byte[] seedBytes = COConfigurationManager.getByteParameter(RESPONSE_DELAY_SEED_SETTING_KEY); +- if (seedBytes != null) { +- responseDelayRandomnesManager = new RandomnessManager(seedBytes); +- } else { +- responseDelayRandomnesManager = new RandomnessManager(); +- COConfigurationManager.setParameter(RESPONSE_DELAY_SEED_SETTING_KEY, randomnessManager.getSecretBytes()); +- } +- +- COConfigurationManager.addAndFireParameterListeners(new String[] { ""f2f_overlay_emulate_link_latency_max"", +- ""f2f_search_emulate_hops_min"", ""f2f_search_emulate_hops_max"", ""f2f_search_forward_delay"", +- ""f2f_forward_search_probability"" }, new ParameterListener() { +- public void parameterChanged(String parameterName) { +- mMIN_DELAY_LINK_LATENCY = COConfigurationManager.getIntParameter(""f2f_search_emulate_hops_min"") * COConfigurationManager.getIntParameter(""f2f_overlay_emulate_link_latency_max""); +- mMAX_DELAY_LINK_LATENCY = COConfigurationManager.getIntParameter(""f2f_search_emulate_hops_max"") * COConfigurationManager.getIntParameter(""f2f_overlay_emulate_link_latency_max""); +- mMIN_RESPONSE_DELAY = COConfigurationManager.getIntParameter(""f2f_search_emulate_hops_min"") * COConfigurationManager.getIntParameter(""f2f_search_forward_delay""); +- mMAX_RESPONSE_DELAY = COConfigurationManager.getIntParameter(""f2f_search_emulate_hops_max"") * COConfigurationManager.getIntParameter(""f2f_search_forward_delay""); +- mForwardSearchProbability = COConfigurationManager.getFloatParameter(""f2f_forward_search_probability""); +- +- if( mForwardSearchProbability <= 0 ) { +- COConfigurationManager.setParameter(""f2f_forward_search_probability"", 0.5f); +- mForwardSearchProbability = 0.5; +- } +- System.err.println(""f2f_search_fwd_p: "" + mForwardSearchProbability); +- } +- }); +- +- OSF2FMessageFactory.init(); +- +- Timer timeoutTimer = new Timer(""OS Overlay Timeout checker"", true); +- timeoutTimer.schedule(new ConnectionChecker(), 0, TIMEOUT_CHECK_PERIOD); +- +- } +- +- public void closeAllConnections() { +- for (FriendConnection c : connections.values()) { +- c.close(); +- } +- +- stopped = true; +- } +- +- public boolean createIncomingConnection(byte[] publicKey, NetworkConnection netConn) { +- +- if (isConnectionAllowed(netConn.getEndpoint().getNotionalAddress().getAddress(), publicKey)) { +- Friend friend = friendManager.getFriend(publicKey); +- new FriendConnection(stats, queueManager, netConn, friend, filelistManager, new FriendConnectionListener()); +- +- return true; +- } +- return false; +- } +- +- public boolean createOutgoingConnection(ConnectionEndpoint remoteFriendAddr, Friend friend) { +- if (isConnectionAllowed(remoteFriendAddr.getNotionalAddress().getAddress(), friend.getPublicKey())) { +- final FriendConnection fc = new FriendConnection(stats, queueManager, remoteFriendAddr, friend, filelistManager, new FriendConnectionListener()); +- /* +- * create a check for this connection to verify that we actually get +- * connected within a reasonable time frame +- */ +- t.schedule(new TimerTask() { +- @Override +- public void run() { +- if (fc.isTimedOut()) { +- fc.close(); +- } +- } +- }, FriendConnection.INITIAL_HANDSHAKE_TIMEOUT + 10 * 1000); +- +- return true; +- } +- return false; +- } +- +- private boolean deregisterConnection(FriendConnection connection) { +- lock.lock(); +- try { +- Log.log(""deregistered connection: "" + connection.toString() + "" "" + connections.containsKey(connection.hashCode()) + "" "", logToStdOut); +- boolean res = null != connections.remove(connection.hashCode()); +- Friend remoteFriend = connection.getRemoteFriend(); +- +- /* +- * check if there are any active connections to the friend, if not, +- * mark as disconnected +- * +- * this check is needed if there are 2 concurrent connections to the +- * same friend , and one is denied to register because of the other +- * one already connected +- */ +- +- FriendConnection connectedConn = null; +- for (FriendConnection c : connections.values()) { +- if (c.getRemoteFriend().equals(remoteFriend)) { +- connectedConn = c; +- } +- } +- /* +- * if the connection id != null, verify that the friend actually +- * think it is connected to the right connection id +- */ +- if (connectedConn != null && connectedConn.isHandshakeReceived()) { +- int friendConnId = remoteFriend.getConnectionId(); +- if (connectedConn.hashCode() != friendConnId) { +- // fix it... +- boolean fileListReceived = connectedConn.isFileListReceived(); +- if (!fileListReceived) { +- Log.log(""connection closed, existing connection found, set to handshaking: "" + remoteFriend.getNick(), logToStdOut); +- remoteFriend.setStatus(Friend.STATUS_HANDSHAKING); +- remoteFriend.setConnectionId(Friend.NOT_CONNECTED_CONNECTION_ID); +- } else { +- Log.log(""connection closed, existing connection found, set to connected: "" + remoteFriend.getNick(), logToStdOut); +- remoteFriend.setConnectionId(connectedConn.hashCode()); +- remoteFriend.setStatus(Friend.STATUS_ONLINE); +- } +- } +- } else { +- // ok, no existing connections, mark as disconnected. +- remoteFriend.disconnected(connection.hashCode()); +- } +- +- return res; +- } finally { +- lock.unlock(); +- } +- } +- +- public void disconnectFriend(Friend f) { +- for (FriendConnection conn : connections.values()) { +- if (conn.getRemoteFriend().equals(f)) { +- conn.close(); +- } +- } +- } +- +- void forwardSearchOrCancel(FriendConnection ignoreConn, OSF2FSearch msg) { +- for (FriendConnection conn : connections.values()) { +- if (ignoreConn.hashCode() == conn.hashCode()) { +- Log.log(""not forwarding search/cancel to: "" + conn + "" (source friend)"", logToStdOut); +- continue; +- } +- Log.log(""forwarding search/cancel to: "" + conn, logToStdOut); +- if (shouldForwardSearch(msg, ignoreConn)) { +- conn.sendSearch(msg.clone(), false); +- } +- } +- } +- +- public int getConnectCount() { +- return connections.size(); +- } +- +- public Map getConnectedFriends() { +- // sanity checks +- for (int connectionId : connections.keySet()) { +- FriendConnection c = connections.get(connectionId); +- // check status just to make sure +- final Friend remoteFriend = c.getRemoteFriend(); +- int status = remoteFriend.getStatus(); +- if (status == Friend.STATUS_OFFLINE && c.isHandshakeReceived()) { +- // fix it... +- boolean handshakeCompletedFully = c.isFileListReceived(); +- if (!handshakeCompletedFully) { +- Debug.out(""getConnectedFriends, existing connection found, settings to handshaking: "" + remoteFriend.getNick()); +- remoteFriend.setStatus(Friend.STATUS_HANDSHAKING); +- } else { +- Debug.out(""getConnectedFriends, existing connection found, settings to connected: "" + remoteFriend.getNick()); +- remoteFriend.setConnectionId(c.hashCode()); +- remoteFriend.setStatus(Friend.STATUS_ONLINE); +- } +- } +- } +- +- Map l = new HashMap(connections.size()); +- /* +- * we don't show me in friends list anymore +- */ +- // l.put(me.getConnectionIds().get(0), me); +- Friend[] friends = friendManager.getFriends(); +- for (Friend friend : friends) { +- if (friend.getStatus() == Friend.STATUS_ONLINE) { +- // System.out.println(""online: "" + friend.getNick()); +- l.put(friend.getConnectionId(), friend); +- } +- } +- +- return l; +- } +- +- public int getSearchDelayForInfohash(Friend destination, byte[] infohash) { +- if (destination.isCanSeeFileList()) { +- return 0; +- } else { +- int searchDelay = responseDelayRandomnesManager.getDeterministicNextInt(infohash, mMIN_RESPONSE_DELAY, mMAX_RESPONSE_DELAY); +- int latencyDelay = getLatencyDelayForInfohash(destination, infohash); +- return searchDelay + latencyDelay; +- } +- } +- +- public int getLatencyDelayForInfohash(Friend destination, byte[] infohash) { +- if (destination.isCanSeeFileList()) { +- return 0; +- } else { +- return responseDelayRandomnesManager.getDeterministicNextInt(infohash, mMIN_DELAY_LINK_LATENCY, mMAX_DELAY_LINK_LATENCY); +- } +- } +- +- +- public List getDisconnectedFriends() { +- List l = new ArrayList(); +- Friend[] friends = friendManager.getFriends(); +- for (Friend friend : friends) { +- if (friend.getStatus() != Friend.STATUS_ONLINE) { +- l.add(friend); +- } +- } +- return l; +- } +- +- public FileListManager getFilelistManager() { +- return filelistManager; +- } +- +- public List getFriendConnections() { +- return new ArrayList(connections.values()); +- } +- +- // private int parallelConnectCount(InetAddress remoteIP, byte[] +- // remotePubKey) { +- // int count = 0; +- // for (FriendConnection overlayConnection : connections.values()) { +- // if (overlayConnection.getRemoteIp().equals(remoteIP) +- // && Arrays.equals(overlayConnection.getRemotePublicKey(), +- // remotePubKey)) { +- // count++; +- // } +- // } +- // +- // return count; +- // } +- +- public long getLastConnectionCheckRun() { +- return lastConnectionCheckRun; +- } +- +- public PublicKey getOwnPublicKey() { +- return ownPublicKey; +- } +- +- public QueueManager getQueueManager() { +- return queueManager; +- } +- +- public SearchManager getSearchManager() { +- return searchManager; +- } +- +- public double getTransportDownloadKBps() { +- long totalDownloadSpeed = 0; +- +- LinkedList conns = new LinkedList(); +- conns.addAll(connections.values()); +- +- for (FriendConnection fc : conns) { +- final Map ot = fc.getOverlayTransports(); +- +- for (OverlayTransport o : ot.values()) { +- totalDownloadSpeed += o.getDownloadRate(); +- } +- } +- +- return totalDownloadSpeed / 1024.0; +- } +- +- public long getTransportSendRate(boolean includeLan) { +- long totalUploadSpeed = 0; +- +- LinkedList conns = new LinkedList(); +- conns.addAll(connections.values()); +- +- for (FriendConnection fc : conns) { +- final Map ot = fc.getOverlayTransports(); +- +- for (OverlayTransport o : ot.values()) { +- if (!includeLan && o.isLANLocal()) { +- // not including lan local peers +- } else { +- totalUploadSpeed += o.getUploadRate(); +- } +- } +- } +- +- return totalUploadSpeed; +- } +- +- public boolean isConnectionAllowed(InetAddress remoteIP, byte[] remotePubKey) { +- Friend friend = friendManager.getFriend(remotePubKey); +- if (stopped) { +- Log.log(""connection denied: (f2f transfers disabled)"", logToStdOut); +- return false; +- } +- // check if we should allow this public key to connect +- if (Arrays.equals(remotePubKey, ownPublicKey.getEncoded()) && remoteIP.equals(myInstance.getExternalAddress())) { +- Log.log(LogEvent.LT_INFORMATION, ""connection from self not allowed (if same ip)"", logToStdOut); +- return false; +- } else if (friend == null) { +- Log.log(LogEvent.LT_WARNING, "" access denied (not friend): "" + remoteIP, logToStdOut); +- return false; +- } else if (friend.isBlocked()) { +- Log.log(LogEvent.LT_WARNING, "" access denied (friend blocked): "" + remoteIP, logToStdOut); +- return false; +- } else if (friend.getFriendBannedUntil() > System.currentTimeMillis()) { +- double minutesLeft = friend.getFriendBannedUntil() - System.currentTimeMillis() / (60 * 1000.0); +- friend.updateConnectionLog(true, ""incoming connection denied, friend blocked for "" + minutesLeft + "" more minutes because of: "" + friend.getBannedReason()); +- Log.log(LogEvent.LT_WARNING, "" access denied (friend blocked for "" + minutesLeft + "" more minutes): "" + remoteIP, logToStdOut); +- return false; +- } +- +- for (FriendConnection c : connections.values()) { +- if (c.getRemoteFriend().equals(friend)) { +- Log.log(LogEvent.LT_WARNING, "" access denied (friend already connected): "" + remoteIP, logToStdOut); +- return false; +- } +- } +- Log.log(LogEvent.LT_INFORMATION, ""friend connection ok: "" + remoteIP + "" :: "" + friend, logToStdOut); +- return true; +- } +- +- /** +- * make sure to synchronize before calling this function +- */ +- private void notifyConnectionListeners(Friend f, boolean connected) { +- lock.lock(); +- try { +- for (FriendConnectListener cb : friendConnectListeners) { +- if (connected) { +- cb.friendConnected(f); +- } else { +- cb.friendDisconnected(f); +- } +- } +- } finally { +- lock.unlock(); +- } +- } +- +- private boolean registerConnection(FriendConnection connection) { +- lock.lock(); +- try { +- if (isConnectionAllowed(connection.getRemoteIp(), connection.getRemotePublicKey())) { +- connections.put(connection.hashCode(), connection); +- /* +- * don't mark remote friend as connected until after the +- * oneswarm handshake message is received +- */ +- // connection.getRemoteFriend().connected(connection.hashCode()); +- Log.log(""registered connection: "" + connection, logToStdOut); +- return true; +- } else { +- return false; +- } +- } finally { +- lock.unlock(); +- } +- } +- +- public void registerForConnectNotifications(FriendConnectListener callback) { +- lock.lock(); +- try { +- friendConnectListeners.add(callback); +- } finally { +- lock.unlock(); +- } +- } +- +- public void restartAllConnections() { +- stopped = false; +- } +- +- public void sendChatMessage(int connectionId, String inPlaintextMessage) { +- FriendConnection conn = connections.get(connectionId); +- conn.sendChat(inPlaintextMessage); +- } +- +- void sendDirectedSearch(FriendConnection target, OSF2FHashSearch search) { +- Log.log(""sending search to "" + target, logToStdOut); +- target.sendSearch(search, true); +- } +- +- public boolean sendFileListRequest(int connectionId, long maxCacheAge, PluginCallback callback) { +- +- // just check if the request is for the local list +- if (connectionId == OWN_CONNECTION_ID_MAGIC_NUMBER) { +- callback.requestCompleted(filelistManager.getOwnFileList()); +- } +- +- FriendConnection conn = connections.get(connectionId); +- +- if (conn != null) { +- +- FileList oldList = filelistManager.getFriendsList(conn.getRemoteFriend()); +- if (oldList != null && (System.currentTimeMillis() - oldList.getCreated()) < maxCacheAge) { +- callback.requestCompleted(oldList); +- } +- +- Log.log(""sending filelist request to "" + conn); +- conn.sendFileListRequest(OSF2FMessage.FILE_LIST_TYPE_COMPLETE, callback); +- return true; +- } else { +- System.err.println(""tried to get filelist for unknown connection id (friend just went offline?)!, stack trace is:""); +- new RuntimeException().printStackTrace(); +- } +- return false; +- } +- +- // public void startDownload(byte type, byte[] metainfo, +- // boolean createIfNotExist) { +- // if (type == OSF2FMessage.METAINFO_TYPE_BITTORRENT) { +- // +- +- public boolean sendMetaInfoRequest(int connectionId, int channelId, byte[] infohash, int lengthHint, PluginCallback callback) { +- FriendConnection conn = connections.get(connectionId); +- Log.log(""sending metainfo request to "" + conn); +- if (conn != null) { +- conn.sendMetaInfoRequest(OSF2FMessage.METAINFO_TYPE_BITTORRENT, channelId, infohash, lengthHint, callback); +- return true; +- } +- return false; +- +- } +- +- void sendSearchOrCancel(OSF2FSearch search, boolean skipQueue, boolean forceSend) { +- Log.log(""sending search/cancel to "" + connections.size(), logToStdOut); +- int numSent = 0; +- for (FriendConnection conn : connections.values()) { +- +- boolean shouldSend = true; +- if (!forceSend) { +- shouldSend = shouldForwardSearch(search, conn); +- } +- if (shouldSend) { +- conn.sendSearch(search.clone(), skipQueue); +- numSent++; +- } +- if (search instanceof OSF2FHashSearch) { +- OSF2FHashSearch hs = (OSF2FHashSearch) search; +- searchTimingsLogger.log(System.currentTimeMillis() + "", send_search, "" + conn.getRemoteFriend().getNick() + "", "" + hs.getSearchID() + "", "" + hs.getInfohashhash()); +- } +- } +- /* +- * for searches sent by us, if we didn't send it to anyone try again but +- * without the randomness linitng who we are sending to +- */ +- if (numSent == 0 && !forceSend) { +- sendSearchOrCancel(search, skipQueue, true); +- } +- } +- +- /** +- * to protect against colluding friends we are only forwarding searches with +- * 95% probability +- * +- * if forcesend = true the search will be forwarded anyway even if the +- * randomness says that friend shouldn't be forwarded +- */ +- private boolean shouldForwardSearch(OSF2FSearch search, FriendConnection conn) { +- boolean shouldSend = true; +- if (search instanceof OSF2FHashSearch) { +- shouldSend = shouldForwardSearch(((OSF2FHashSearch) search).getInfohashhash(), conn); +- } else if (search instanceof OSF2FSearchCancel) { +- long infohash = searchManager.getInfoHashHashFromSearchId(search.getSearchID()); +- if (infohash != -1) { +- shouldSend = shouldForwardSearch(infohash, conn); +- } else { +- shouldSend = false; +- } +- } +- return shouldSend; +- } +- +- private boolean shouldForwardSearch(long infohashhash, FriendConnection conn) { +- if (conn.getRemoteFriend().isCanSeeFileList()) { +- return true; +- } +- byte[] infohashbytes = RandomnessManager.getBytes(infohashhash); +- byte[] friendHash = RandomnessManager.getBytes(conn.getRemotePublicKeyHash()); +- byte[] all = new byte[infohashbytes.length + friendHash.length]; +- System.arraycopy(infohashbytes, 0, all, 0, infohashbytes.length); +- System.arraycopy(friendHash, 0, all, infohashbytes.length, friendHash.length); +- +- int randomVal = randomnessManager.getDeterministicRandomInt(all); +- if (randomVal < 0) { +- randomVal = -randomVal; +- } +- if (randomVal < Integer.MAX_VALUE * mForwardSearchProbability) { +- return true; +- } else { +- return false; +- } +- } +- +- public void triggerFileListUpdates() { +- List conns = new LinkedList(); +- lock.lock(); +- try { +- conns.addAll(connections.values()); +- } finally { +- lock.unlock(); +- } +- for (FriendConnection conn : connections.values()) { +- conn.triggerFileListSend(); +- } +- } +- +- private class ConnectionChecker extends TimerTask { +- +- @Override +- public void run() { +- +- try { +- // first, check if we have any overlays that are timed out +- for (FriendConnection connection : connections.values()) { +- connection.clearTimedOutForwards(); +- connection.clearTimedOutTransports(); +- connection.clearTimedOutSearchRecords(); +- connection.clearOldMetainfoRequests(); +- } +- } catch (Throwable t) { +- Debug.out(""F2F Connection Checker: got error when clearing transports/forwards"", t); +- } +- try { +- // then, check if we need to send any keepalives +- for (FriendConnection connection : connections.values()) { +- connection.doKeepAliveCheck(); +- } +- } catch (Throwable t) { +- Debug.out(""F2F Connection Checker: got error when sending keep alives"", t); +- } +- +- try { +- // check if we have any timed out connections +- List timedOut = new ArrayList(); +- for (FriendConnection connection : connections.values()) { +- if (connection.isTimedOut()) { +- timedOut.add(connection); +- } +- } +- // and close them +- for (FriendConnection friendConnection : timedOut) { +- friendConnection.close(); +- } +- } catch (Throwable t) { +- Debug.out(""F2F Connection Checker: got error when clearing friend connections"", t); +- } +- +- try { +- // then, recycle the search IDs +- searchManager.clearTimedOutSearches(); +- } catch (Throwable t) { +- Debug.out(""F2F Connection Checker: got error when clearing timed out searches"", t); +- } +- lastConnectionCheckRun = System.currentTimeMillis(); +- } +- } +- +- class FriendConnectionListener { +- public boolean connectSuccess(FriendConnection friendConnection) { +- if (!registerConnection(friendConnection)) { +- Log.log(""Unable to register connection, "" + ""connect count to high, closing connection"", logToStdOut); +- friendConnection.close(); +- return false; +- } +- return true; +- } +- +- public void disconnected(FriendConnection friendConnection) { +- if (friendConnection.isFileListReceived()) { +- notifyConnectionListeners(friendConnection.getRemoteFriend(), false); +- } +- deregisterConnection(friendConnection); +- } +- +- public void gotSearchCancel(FriendConnection friendConnection, OSF2FSearchCancel msg) { +- searchManager.handleIncomingSearchCancel(friendConnection, msg); +- } +- +- public void gotSearchMessage(FriendConnection friendConnection, OSF2FSearch msg) { +- if (msg instanceof OSF2FHashSearch) { +- OSF2FHashSearch hs = (OSF2FHashSearch) msg; +- searchTimingsLogger.log(System.currentTimeMillis() + "", search, "" + friendConnection.getRemoteFriend().getNick() + "", "" + hs.getSearchID() + "", "" + hs.getInfohashhash() + "", "" + friendConnection.getRemoteIp().getHostAddress()); +- } +- searchManager.handleIncomingSearch(friendConnection, msg); +- } +- +- public void gotSearchResponse(FriendConnection friendConnection, OSF2FSearchResp msg) { +- if (msg instanceof OSF2FHashSearchResp) { +- OSF2FHashSearchResp hsr = (OSF2FHashSearchResp) msg; +- searchTimingsLogger.log(System.currentTimeMillis() + "", response, "" + friendConnection.getRemoteFriend().getNick() + "", "" + hsr.getSearchID() + "", "" + hsr.getChannelID() + "", "" + friendConnection.getRemoteIp().getHostAddress() + "", "" + hsr.getPathID()); +- } +- searchManager.handleIncomingSearchResponse(friendConnection, msg); +- } +- +- @SuppressWarnings(""unchecked"") +- public void handshakeCompletedFully(final FriendConnection friendConnection) { +- +- notifyConnectionListeners(friendConnection.getRemoteFriend(), true); +- +- /* +- * check if we have any running downloads that this friend has +- */ +- Log.log(""New friend connected, checking if friend has anything we want"", logToStdOut); +- List runningDownloadHashes = new LinkedList(); +- List dms = AzureusCoreImpl.getSingleton().getGlobalManager().getDownloadManagers(); +- for (DownloadManager dm : dms) { +- if (dm.getState() == DownloadManager.STATE_DOWNLOADING) { +- try { +- TOTorrent torrent = dm.getTorrent(); +- if (torrent != null) { +- runningDownloadHashes.add(torrent.getHash()); +- } +- } catch (TOTorrentException e) { +- e.printStackTrace(); +- } +- } +- } +- Log.log(""found "" + runningDownloadHashes.size() + "" running downloads"", logToStdOut); +- FileList remoteFileList = filelistManager.getFriendsList(friendConnection.getRemoteFriend()); +- for (byte[] hash : runningDownloadHashes) { +- if (remoteFileList.contains(hash)) { +- Log.log(""sending search for '"" + Base32.encode(hash) + ""' to "" + friendConnection.getRemoteFriend(), logToStdOut); +- searchManager.sendDirectedHashSearch(friendConnection, hash); +- } +- } +- +- /** +- * Check if we need to send any pending chat messages to this user. (5/sec) +- */ +- Thread queryThread = new Thread(""Queued chat SQL query for: "" + friendConnection.getRemoteFriend().getNick()) { +- @Override +- public void run() { +- +- try { +- Thread.sleep(3*1000); +- } catch( InterruptedException e ) {} +- +- String base64Key = new String(Base64.encode(friendConnection.getRemotePublicKey())); +- ChatDAO dao = ChatDAO.get(); +- List out = dao.getQueuedMessagesForUser(base64Key); +- for( Chat c : out ) { +- if( friendConnection.isTimedOut() || friendConnection.isClosing() ) { +- return; +- } +- +- friendConnection.sendChat(c.getMessage() + "" (sent "" + StringTools.formatDateAppleLike(new Date(c.getTimestamp()), true) + "")""); +- dao.markSent(c.getUID()); +- +- try { +- Thread.sleep(200); +- } catch( InterruptedException e ) {} +- } +- } +- }; +- queryThread.setDaemon(true); +- queryThread.start(); +- } +- } ++ /** ++ * make sure to not call any az functions when holding this lock ++ */ ++ public static BigFatLock lock = BigFatLock.getInstance(false); ++ ++ // private Friend me; ++ public static boolean logToStdOut = false; ++ ++ private int mMIN_DELAY_LINK_LATENCY = COConfigurationManager ++ .getIntParameter(""f2f_search_emulate_hops_min"") ++ * COConfigurationManager.getIntParameter(""f2f_overlay_emulate_link_latency_max""); ++ private int mMAX_DELAY_LINK_LATENCY = COConfigurationManager ++ .getIntParameter(""f2f_search_emulate_hops_max"") ++ * COConfigurationManager.getIntParameter(""f2f_overlay_emulate_link_latency_max""); ++ private int mMIN_RESPONSE_DELAY = COConfigurationManager ++ .getIntParameter(""f2f_search_emulate_hops_min"") ++ * COConfigurationManager.getIntParameter(""f2f_search_forward_delay""); ++ private int mMAX_RESPONSE_DELAY = COConfigurationManager ++ .getIntParameter(""f2f_search_emulate_hops_max"") ++ * COConfigurationManager.getIntParameter(""f2f_search_forward_delay""); ++ ++ private double mForwardSearchProbability = COConfigurationManager ++ .getFloatParameter(""f2f_forward_search_probability""); ++ ++ // this is just a way to treat requests for the local file list a bit ++ // differently ++ public final static int OWN_CONNECTION_ID_MAGIC_NUMBER = 0; ++ ++ private final static String RESPONSE_DELAY_SEED_SETTING_KEY = ""response_delay_seed""; ++ ++ private static final int TIMEOUT_CHECK_PERIOD = 5 * 1000; ++ private final ConcurrentHashMap connections; ++ private final FileListManager filelistManager; ++ private final LinkedList friendConnectListeners = new LinkedList(); ++ ++ private final FriendManager friendManager; ++ private long lastConnectionCheckRun = System.currentTimeMillis(); ++ ++ private final AZInstance myInstance; ++ private final PublicKey ownPublicKey; ++ private final QueueManager queueManager = new QueueManager(); ++ private final RandomnessManager randomnessManager; ++ private final RandomnessManager responseDelayRandomnesManager; ++ private final SearchManager searchManager; ++ ++ public RotatingLogger searchTimingsLogger = new RotatingLogger(""search_timing""); ++ ++ private final GlobalManagerStats stats; ++ ++ private boolean stopped = false; ++ private final Timer t = new Timer(""FriendConnectionInitialChecker"", true); ++ ++ public OverlayManager(FriendManager _friendManager, PublicKey _ownPublicKey, ++ FileListManager _fileListManager, GlobalManagerStats _stats) { ++ stats = _stats; ++ myInstance = AzureusCoreImpl.getSingleton().getInstanceManager().getMyInstance(); ++ ++ this.randomnessManager = new RandomnessManager(); ++ this.friendManager = _friendManager; ++ this.filelistManager = _fileListManager; ++ this.ownPublicKey = _ownPublicKey; ++ this.connections = new ConcurrentHashMap(); ++ this.searchManager = new SearchManager(this, filelistManager, randomnessManager, stats); ++ ++ byte[] seedBytes = COConfigurationManager.getByteParameter(RESPONSE_DELAY_SEED_SETTING_KEY); ++ if (seedBytes != null) { ++ responseDelayRandomnesManager = new RandomnessManager(seedBytes); ++ } else { ++ responseDelayRandomnesManager = new RandomnessManager(); ++ COConfigurationManager.setParameter(RESPONSE_DELAY_SEED_SETTING_KEY, ++ randomnessManager.getSecretBytes()); ++ } ++ ++ COConfigurationManager.addAndFireParameterListeners(new String[] { ++ ""f2f_overlay_emulate_link_latency_max"", ""f2f_search_emulate_hops_min"", ++ ""f2f_search_emulate_hops_max"", ""f2f_search_forward_delay"", ++ ""f2f_forward_search_probability"" }, new ParameterListener() { ++ public void parameterChanged(String parameterName) { ++ mMIN_DELAY_LINK_LATENCY = COConfigurationManager ++ .getIntParameter(""f2f_search_emulate_hops_min"") ++ * COConfigurationManager ++ .getIntParameter(""f2f_overlay_emulate_link_latency_max""); ++ mMAX_DELAY_LINK_LATENCY = COConfigurationManager ++ .getIntParameter(""f2f_search_emulate_hops_max"") ++ * COConfigurationManager ++ .getIntParameter(""f2f_overlay_emulate_link_latency_max""); ++ mMIN_RESPONSE_DELAY = COConfigurationManager ++ .getIntParameter(""f2f_search_emulate_hops_min"") ++ * COConfigurationManager.getIntParameter(""f2f_search_forward_delay""); ++ mMAX_RESPONSE_DELAY = COConfigurationManager ++ .getIntParameter(""f2f_search_emulate_hops_max"") ++ * COConfigurationManager.getIntParameter(""f2f_search_forward_delay""); ++ mForwardSearchProbability = COConfigurationManager ++ .getFloatParameter(""f2f_forward_search_probability""); ++ ++ if (mForwardSearchProbability <= 0) { ++ COConfigurationManager.setParameter(""f2f_forward_search_probability"", 0.5f); ++ mForwardSearchProbability = 0.5; ++ } ++ System.err.println(""f2f_search_fwd_p: "" + mForwardSearchProbability); ++ } ++ }); + -+ //@Override -+ public String getColumnText(Object element, int columnIndex) { -+ try { -+ IKeyTreeNode item = (IKeyTreeNode) element; -+ IMessage entry = locale.getMessage(item.getMessageKey()); -+ if (entry != null) { -+ String value = entry.getValue(); -+ if (value.length() > 40) -+ value = value.substring(0, 39) + ""...""; -+ } -+ } catch (Exception e) { -+ } -+ return """"; -+ } ++ OSF2FMessageFactory.init(); + -+ @Override -+ public Color getBackground(Object element, int columnIndex) { -+ return null;//return new Color(Display.getDefault(), 255, 0, 0); -+ } ++ Timer timeoutTimer = new Timer(""OS Overlay Timeout checker"", true); ++ timeoutTimer.schedule(new ConnectionChecker(), 0, TIMEOUT_CHECK_PERIOD); + -+ @Override -+ public Color getForeground(Object element, int columnIndex) { -+ // TODO Auto-generated method stub -+ return null; -+ } ++ } + -+ @Override -+ public Font getFont(Object element, int columnIndex) { -+ return null; //UIUtils.createFont(SWT.BOLD); -+ } ++ public void closeAllConnections() { ++ for (FriendConnection c : connections.values()) { ++ c.close(); ++ } + -+ @Override -+ public void update(ViewerCell cell) { -+ Object element = cell.getElement(); -+ int columnIndex = cell.getColumnIndex(); -+ cell.setImage(this.getColumnImage(element, columnIndex)); -+ cell.setText(this.getColumnText(element, columnIndex)); -+ -+ super.update(cell); -+ } -+ -+} -diff --git a/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/sorter/ValuedKeyTreeItemSorter.java b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/sorter/ValuedKeyTreeItemSorter.java -new file mode 100644 -index 00000000..0650b1d7 ---- /dev/null -+++ b/org.eclipselabs.tapiji.tools.core.ui/src/org/eclipselabs/tapiji/tools/core/ui/widgets/sorter/ValuedKeyTreeItemSorter.java -@@ -0,0 +1,65 @@ -+package org.eclipselabs.tapiji.tools.core.ui.widgets.sorter; ++ stopped = true; ++ } + -+import java.util.Locale; ++ public boolean createIncomingConnection(byte[] publicKey, NetworkConnection netConn) { + -+import org.eclipse.jface.viewers.StructuredViewer; -+import org.eclipse.jface.viewers.Viewer; -+import org.eclipse.jface.viewers.ViewerSorter; -+import org.eclipselabs.tapiji.tools.core.model.view.SortInfo; -+import org.eclipselabs.tapiji.translator.rbe.babel.bundle.IValuedKeyTreeNode; ++ if (isConnectionAllowed(netConn.getEndpoint().getNotionalAddress().getAddress(), publicKey)) { ++ Friend friend = friendManager.getFriend(publicKey); ++ new FriendConnection(stats, queueManager, netConn, friend, filelistManager, ++ new FriendConnectionListener()); + -+public class ValuedKeyTreeItemSorter extends ViewerSorter { ++ return true; ++ } ++ return false; ++ } + -+ private StructuredViewer viewer; -+ private SortInfo sortInfo; -+ -+ public ValuedKeyTreeItemSorter (StructuredViewer viewer, -+ SortInfo sortInfo) { -+ this.viewer = viewer; -+ this.sortInfo = sortInfo; -+ } ++ public boolean createOutgoingConnection(ConnectionEndpoint remoteFriendAddr, Friend friend) { ++ if (isConnectionAllowed(remoteFriendAddr.getNotionalAddress().getAddress(), ++ friend.getPublicKey())) { ++ final FriendConnection fc = new FriendConnection(stats, queueManager, remoteFriendAddr, ++ friend, filelistManager, new FriendConnectionListener()); ++ /* ++ * create a check for this connection to verify that we actually get ++ * connected within a reasonable time frame ++ */ ++ t.schedule(new TimerTask() { ++ @Override ++ public void run() { ++ if (fc.isTimedOut()) { ++ fc.close(); ++ } ++ } ++ }, FriendConnection.INITIAL_HANDSHAKE_TIMEOUT + 10 * 1000); + -+ public StructuredViewer getViewer() { -+ return viewer; -+ } ++ return true; ++ } ++ return false; ++ } + -+ public void setViewer(StructuredViewer viewer) { -+ this.viewer = viewer; -+ } ++ private boolean deregisterConnection(FriendConnection connection) { ++ lock.lock(); ++ try { ++ Log.log(""deregistered connection: "" + connection.toString() + "" "" ++ + connections.containsKey(connection.hashCode()) + "" "", logToStdOut); ++ boolean res = null != connections.remove(connection.hashCode()); ++ Friend remoteFriend = connection.getRemoteFriend(); ++ ++ /* ++ * check if there are any active connections to the friend, if not, ++ * mark as disconnected ++ * ++ * this check is needed if there are 2 concurrent connections to the ++ * same friend , and one is denied to register because of the other ++ * one already connected ++ */ + -+ public SortInfo getSortInfo() { -+ return sortInfo; -+ } ++ FriendConnection connectedConn = null; ++ for (FriendConnection c : connections.values()) { ++ if (c.getRemoteFriend().equals(remoteFriend)) { ++ connectedConn = c; ++ } ++ } ++ /* ++ * if the connection id != null, verify that the friend actually ++ * think it is connected to the right connection id ++ */ ++ if (connectedConn != null && connectedConn.isHandshakeReceived()) { ++ int friendConnId = remoteFriend.getConnectionId(); ++ if (connectedConn.hashCode() != friendConnId) { ++ // fix it... ++ boolean fileListReceived = connectedConn.isFileListReceived(); ++ if (!fileListReceived) { ++ Log.log(""connection closed, existing connection found, set to handshaking: "" ++ + remoteFriend.getNick(), logToStdOut); ++ remoteFriend.setStatus(Friend.STATUS_HANDSHAKING); ++ remoteFriend.setConnectionId(Friend.NOT_CONNECTED_CONNECTION_ID); ++ } else { ++ Log.log(""connection closed, existing connection found, set to connected: "" ++ + remoteFriend.getNick(), logToStdOut); ++ remoteFriend.setConnectionId(connectedConn.hashCode()); ++ remoteFriend.setStatus(Friend.STATUS_ONLINE); ++ } ++ } ++ } else { ++ // ok, no existing connections, mark as disconnected. ++ remoteFriend.disconnected(connection.hashCode()); ++ } + -+ public void setSortInfo(SortInfo sortInfo) { -+ this.sortInfo = sortInfo; -+ } ++ return res; ++ } finally { ++ lock.unlock(); ++ } ++ } + -+ @Override -+ public int compare(Viewer viewer, Object e1, Object e2) { -+ try { -+ if (!(e1 instanceof IValuedKeyTreeNode && e2 instanceof IValuedKeyTreeNode)) -+ return super.compare(viewer, e1, e2); -+ IValuedKeyTreeNode comp1 = (IValuedKeyTreeNode) e1; -+ IValuedKeyTreeNode comp2 = (IValuedKeyTreeNode) e2; -+ -+ int result = 0; -+ -+ if (sortInfo == null) -+ return 0; -+ -+ if (sortInfo.getColIdx() == 0) -+ result = comp1.getMessageKey().compareTo(comp2.getMessageKey()); -+ else { -+ Locale loc = sortInfo.getVisibleLocales().get(sortInfo.getColIdx()-1); -+ result = (comp1.getValue(loc) == null ? """" : comp1.getValue(loc)) -+ .compareTo((comp2.getValue(loc) == null ? """" : comp2.getValue(loc))); -+ } -+ -+ return result * (sortInfo.isDESC() ? -1 : 1); -+ } catch (Exception e) { -+ return 0; -+ } -+ } -+ -+}" -9ebbf1bfcea9942117727c08c6905dd444c230ae,hadoop,YARN-3361. CapacityScheduler side changes to- support non-exclusive node labels. Contributed by Wangda Tan (cherry picked- from commit 0fefda645bca935b87b6bb8ca63e6f18340d59f5)--,a,https://github.com/apache/hadoop,"diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt -index 478d0aebacc5f..059c5a3d39e7d 100644 ---- a/hadoop-yarn-project/CHANGES.txt -+++ b/hadoop-yarn-project/CHANGES.txt -@@ -24,6 +24,9 @@ Release 2.8.0 - UNRELEASED - YARN-3443. Create a 'ResourceHandler' subsystem to ease addition of support - for new resource types on the NM. (Sidharta Seethana via junping_du) - -+ YARN-3361. CapacityScheduler side changes to support non-exclusive node -+ labels. (Wangda Tan via jianhe) ++ public void disconnectFriend(Friend f) { ++ for (FriendConnection conn : connections.values()) { ++ if (conn.getRemoteFriend().equals(f)) { ++ conn.close(); ++ } ++ } ++ } + - IMPROVEMENTS - - YARN-1880. Cleanup TestApplicationClientProtocolOnHA -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java -index 68d4ef9fe77aa..f2146c8b124be 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java -@@ -313,6 +313,7 @@ public static ResourceRequest newResourceRequest(ResourceRequest r) { - request.setResourceName(r.getResourceName()); - request.setCapability(r.getCapability()); - request.setNumContainers(r.getNumContainers()); -+ request.setNodeLabelExpression(r.getNodeLabelExpression()); - return request; - } - -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java -index 1be1727e86599..1071831263ae1 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java -@@ -146,7 +146,7 @@ public class RMAppAttemptImpl implements RMAppAttempt, Recoverable { - private ConcurrentMap> - finishedContainersSentToAM = - new ConcurrentHashMap>(); -- private Container masterContainer; -+ private volatile Container masterContainer; - - private float progress = 0; - private String host = ""N/A""; -@@ -762,13 +762,7 @@ public List pullJustFinishedContainers() { - - @Override - public Container getMasterContainer() { -- this.readLock.lock(); -- -- try { -- return this.masterContainer; -- } finally { -- this.readLock.unlock(); -- } -+ return this.masterContainer; - } - - @InterfaceAudience.Private -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/AppSchedulingInfo.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/AppSchedulingInfo.java -index 5521d47ed6076..5604f0f33965f 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/AppSchedulingInfo.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/AppSchedulingInfo.java -@@ -73,10 +73,11 @@ public class AppSchedulingInfo { - /* Allocated by scheduler */ - boolean pending = true; // for app metrics - -+ private ResourceUsage appResourceUsage; - - public AppSchedulingInfo(ApplicationAttemptId appAttemptId, - String user, Queue queue, ActiveUsersManager activeUsersManager, -- long epoch) { -+ long epoch, ResourceUsage appResourceUsage) { - this.applicationAttemptId = appAttemptId; - this.applicationId = appAttemptId.getApplicationId(); - this.queue = queue; -@@ -84,6 +85,7 @@ public AppSchedulingInfo(ApplicationAttemptId appAttemptId, - this.user = user; - this.activeUsersManager = activeUsersManager; - this.containerIdCounter = new AtomicLong(epoch << EPOCH_BIT_SHIFT); -+ this.appResourceUsage = appResourceUsage; - } - - public ApplicationId getApplicationId() { -@@ -191,13 +193,19 @@ synchronized public void updateResourceRequests( - lastRequestCapability); - - // update queue: -+ Resource increasedResource = Resources.multiply(request.getCapability(), -+ request.getNumContainers()); - queue.incPendingResource( - request.getNodeLabelExpression(), -- Resources.multiply(request.getCapability(), -- request.getNumContainers())); -+ increasedResource); -+ appResourceUsage.incPending(request.getNodeLabelExpression(), increasedResource); - if (lastRequest != null) { -+ Resource decreasedResource = -+ Resources.multiply(lastRequestCapability, lastRequestContainers); - queue.decPendingResource(lastRequest.getNodeLabelExpression(), -- Resources.multiply(lastRequestCapability, lastRequestContainers)); -+ decreasedResource); -+ appResourceUsage.decPending(lastRequest.getNodeLabelExpression(), -+ decreasedResource); - } - } - } -@@ -385,6 +393,8 @@ synchronized private void decrementOutstanding( - checkForDeactivation(); - } - -+ appResourceUsage.decPending(offSwitchRequest.getNodeLabelExpression(), -+ offSwitchRequest.getCapability()); - queue.decPendingResource(offSwitchRequest.getNodeLabelExpression(), - offSwitchRequest.getCapability()); - } -@@ -492,9 +502,10 @@ public synchronized void recoverContainer(RMContainer rmContainer) { - } - - public ResourceRequest cloneResourceRequest(ResourceRequest request) { -- ResourceRequest newRequest = ResourceRequest.newInstance( -- request.getPriority(), request.getResourceName(), -- request.getCapability(), 1, request.getRelaxLocality()); -+ ResourceRequest newRequest = -+ ResourceRequest.newInstance(request.getPriority(), -+ request.getResourceName(), request.getCapability(), 1, -+ request.getRelaxLocality(), request.getNodeLabelExpression()); - return newRequest; - } - } -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/ResourceUsage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/ResourceUsage.java -index 36ee4daa1edbc..5169b78dd582f 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/ResourceUsage.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/ResourceUsage.java -@@ -27,6 +27,7 @@ - - import org.apache.hadoop.yarn.api.records.Resource; - import org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager; -+import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; - import org.apache.hadoop.yarn.util.resource.Resources; - - /** -@@ -250,6 +251,10 @@ private static Resource normalize(Resource res) { - } - - private Resource _get(String label, ResourceType type) { -+ if (label == null) { -+ label = RMNodeLabelsManager.NO_LABEL; ++ void forwardSearchOrCancel(FriendConnection ignoreConn, OSF2FSearch msg) { ++ for (FriendConnection conn : connections.values()) { ++ if (ignoreConn.hashCode() == conn.hashCode()) { ++ Log.log(""not forwarding search/cancel to: "" + conn + "" (source friend)"", ++ logToStdOut); ++ continue; ++ } ++ Log.log(""forwarding search/cancel to: "" + conn, logToStdOut); ++ if (shouldForwardSearch(msg, ignoreConn)) { ++ conn.sendSearch(msg.clone(), false); ++ } ++ } + } -+ - try { - readLock.lock(); - UsageByLabel usage = usages.get(label); -@@ -263,6 +268,9 @@ private Resource _get(String label, ResourceType type) { - } - - private UsageByLabel getAndAddIfMissing(String label) { -+ if (label == null) { -+ label = RMNodeLabelsManager.NO_LABEL; ++ ++ public int getConnectCount() { ++ return connections.size(); + } - if (!usages.containsKey(label)) { - UsageByLabel u = new UsageByLabel(label); - usages.put(label, u); -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerApplicationAttempt.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerApplicationAttempt.java -index 5e0bbc7f9b48e..fccf7661a2ad4 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerApplicationAttempt.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerApplicationAttempt.java -@@ -56,6 +56,8 @@ - import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerReservedEvent; - import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerState; - import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeCleanContainerEvent; -+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.SchedulingMode; -+import org.apache.hadoop.yarn.util.resource.ResourceCalculator; - import org.apache.hadoop.yarn.util.resource.Resources; - - import com.google.common.base.Preconditions; -@@ -108,14 +110,24 @@ public class SchedulerApplicationAttempt { - private Set pendingRelease = null; - - /** -- * Count how many times the application has been given an opportunity -- * to schedule a task at each priority. Each time the scheduler -- * asks the application for a task at this priority, it is incremented, -- * and each time the application successfully schedules a task, it -+ * Count how many times the application has been given an opportunity to -+ * schedule a task at each priority. Each time the scheduler asks the -+ * application for a task at this priority, it is incremented, and each time -+ * the application successfully schedules a task (at rack or node local), it - * is reset to 0. - */ - Multiset schedulingOpportunities = HashMultiset.create(); - -+ /** -+ * Count how many times the application has been given an opportunity to -+ * schedule a non-partitioned resource request at each priority. Each time the -+ * scheduler asks the application for a task at this priority, it is -+ * incremented, and each time the application successfully schedules a task, -+ * it is reset to 0 when schedule any task at corresponding priority. -+ */ -+ Multiset missedNonPartitionedRequestSchedulingOpportunity = -+ HashMultiset.create(); -+ - // Time of the last container scheduled at the current allowed level - protected Map lastScheduledContainer = - new HashMap(); -@@ -132,7 +144,7 @@ public SchedulerApplicationAttempt(ApplicationAttemptId applicationAttemptId, - this.rmContext = rmContext; - this.appSchedulingInfo = - new AppSchedulingInfo(applicationAttemptId, user, queue, -- activeUsersManager, rmContext.getEpoch()); -+ activeUsersManager, rmContext.getEpoch(), attemptResourceUsage); - this.queue = queue; - this.pendingRelease = new HashSet(); - this.attemptId = applicationAttemptId; -@@ -489,6 +501,18 @@ public boolean isBlacklisted(String resourceName) { - return this.appSchedulingInfo.isBlacklisted(resourceName); - } - -+ public synchronized int addMissedNonPartitionedRequestSchedulingOpportunity( -+ Priority priority) { -+ missedNonPartitionedRequestSchedulingOpportunity.add(priority); -+ return missedNonPartitionedRequestSchedulingOpportunity.count(priority); -+ } + -+ public synchronized void -+ resetMissedNonPartitionedRequestSchedulingOpportunity(Priority priority) { -+ missedNonPartitionedRequestSchedulingOpportunity.setCount(priority, 0); -+ } ++ public Map getConnectedFriends() { ++ // sanity checks ++ for (int connectionId : connections.keySet()) { ++ FriendConnection c = connections.get(connectionId); ++ // check status just to make sure ++ final Friend remoteFriend = c.getRemoteFriend(); ++ int status = remoteFriend.getStatus(); ++ if (status == Friend.STATUS_OFFLINE && c.isHandshakeReceived()) { ++ // fix it... ++ boolean handshakeCompletedFully = c.isFileListReceived(); ++ if (!handshakeCompletedFully) { ++ Debug.out(""getConnectedFriends, existing connection found, settings to handshaking: "" ++ + remoteFriend.getNick()); ++ remoteFriend.setStatus(Friend.STATUS_HANDSHAKING); ++ } else { ++ Debug.out(""getConnectedFriends, existing connection found, settings to connected: "" ++ + remoteFriend.getNick()); ++ remoteFriend.setConnectionId(c.hashCode()); ++ remoteFriend.setStatus(Friend.STATUS_ONLINE); ++ } ++ } ++ } + -+ - public synchronized void addSchedulingOpportunity(Priority priority) { - schedulingOpportunities.setCount(priority, - schedulingOpportunities.count(priority) + 1); -@@ -518,6 +542,7 @@ public synchronized int getSchedulingOpportunities(Priority priority) { - public synchronized void resetSchedulingOpportunities(Priority priority) { - resetSchedulingOpportunities(priority, System.currentTimeMillis()); - } ++ Map l = new HashMap(connections.size()); ++ /* ++ * we don't show me in friends list anymore ++ */ ++ // l.put(me.getConnectionIds().get(0), me); ++ Friend[] friends = friendManager.getFriends(); ++ for (Friend friend : friends) { ++ if (friend.getStatus() == Friend.STATUS_ONLINE) { ++ // System.out.println(""online: "" + friend.getNick()); ++ l.put(friend.getConnectionId(), friend); ++ } ++ } + - // used for continuous scheduling - public synchronized void resetSchedulingOpportunities(Priority priority, - long currentTimeMs) { -@@ -669,4 +694,13 @@ public void recordContainerAllocationTime(long value) { - public Set getBlacklistedNodes() { - return this.appSchedulingInfo.getBlackListCopy(); - } -+ -+ @Private -+ public boolean hasPendingResourceRequest(ResourceCalculator rc, -+ String nodePartition, Resource cluster, -+ SchedulingMode schedulingMode) { -+ return SchedulerUtils.hasPendingResourceRequest(rc, -+ this.attemptResourceUsage, nodePartition, cluster, -+ schedulingMode); -+ } - } -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerUtils.java -index 248cc08b74853..7a1a5287a9959 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerUtils.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerUtils.java -@@ -37,11 +37,10 @@ - import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; - import org.apache.hadoop.yarn.security.AccessType; - import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; -+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.SchedulingMode; - import org.apache.hadoop.yarn.util.resource.ResourceCalculator; - import org.apache.hadoop.yarn.util.resource.Resources; - --import com.google.common.collect.Sets; -- - /** - * Utilities shared by schedulers. - */ -@@ -235,9 +234,13 @@ public static void validateResourceRequest(ResourceRequest resReq, - if (labelExp == null && queueInfo != null - && ResourceRequest.ANY.equals(resReq.getResourceName())) { - labelExp = queueInfo.getDefaultNodeLabelExpression(); -- resReq.setNodeLabelExpression(labelExp); - } - -+ // If labelExp still equals to null, set it to be NO_LABEL -+ resReq -+ .setNodeLabelExpression(labelExp == null ? RMNodeLabelsManager.NO_LABEL -+ : labelExp); -+ - // we don't allow specify label expression other than resourceName=ANY now - if (!ResourceRequest.ANY.equals(resReq.getResourceName()) - && labelExp != null && !labelExp.trim().isEmpty()) { -@@ -273,25 +276,6 @@ public static void validateResourceRequest(ResourceRequest resReq, - } - } - -- public static boolean checkQueueAccessToNode(Set queueLabels, -- Set nodeLabels) { -- // if queue's label is *, it can access any node -- if (queueLabels != null && queueLabels.contains(RMNodeLabelsManager.ANY)) { -- return true; -- } -- // any queue can access to a node without label -- if (nodeLabels == null || nodeLabels.isEmpty()) { -- return true; -- } -- // a queue can access to a node only if it contains any label of the node -- if (queueLabels != null -- && Sets.intersection(queueLabels, nodeLabels).size() > 0) { -- return true; -- } -- // sorry, you cannot access -- return false; -- } -- - public static void checkIfLabelInClusterNodeLabels(RMNodeLabelsManager mgr, - Set labels) throws IOException { - if (mgr == null) { -@@ -311,26 +295,6 @@ public static void checkIfLabelInClusterNodeLabels(RMNodeLabelsManager mgr, - } - } - } -- -- public static boolean checkNodeLabelExpression(Set nodeLabels, -- String labelExpression) { -- // empty label expression can only allocate on node with empty labels -- if (labelExpression == null || labelExpression.trim().isEmpty()) { -- if (!nodeLabels.isEmpty()) { -- return false; -- } -- } -- -- if (labelExpression != null) { -- for (String str : labelExpression.split(""&&"")) { -- if (!str.trim().isEmpty() -- && (nodeLabels == null || !nodeLabels.contains(str.trim()))) { -- return false; -- } -- } -- } -- return true; -- } - - public static boolean checkQueueLabelExpression(Set queueLabels, - String labelExpression) { -@@ -360,4 +324,43 @@ public static AccessType toAccessType(QueueACL acl) { - } - return null; - } -+ -+ public static boolean checkResourceRequestMatchingNodePartition( -+ ResourceRequest offswitchResourceRequest, String nodePartition, -+ SchedulingMode schedulingMode) { -+ // We will only look at node label = nodeLabelToLookAt according to -+ // schedulingMode and partition of node. -+ String nodePartitionToLookAt = null; -+ if (schedulingMode == SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY) { -+ nodePartitionToLookAt = nodePartition; -+ } else { -+ nodePartitionToLookAt = RMNodeLabelsManager.NO_LABEL; ++ return l; + } -+ -+ String askedNodePartition = offswitchResourceRequest.getNodeLabelExpression(); -+ if (null == askedNodePartition) { -+ askedNodePartition = RMNodeLabelsManager.NO_LABEL; ++ ++ public int getSearchDelayForInfohash(Friend destination, byte[] infohash) { ++ if (destination.isCanSeeFileList()) { ++ return 0; ++ } else { ++ int searchDelay = responseDelayRandomnesManager.getDeterministicNextInt(infohash, ++ mMIN_RESPONSE_DELAY, mMAX_RESPONSE_DELAY); ++ int latencyDelay = getLatencyDelayForInfohash(destination, infohash); ++ return searchDelay + latencyDelay; ++ } + } -+ return askedNodePartition.equals(nodePartitionToLookAt); -+ } -+ -+ private static boolean hasPendingResourceRequest(ResourceCalculator rc, -+ ResourceUsage usage, String partitionToLookAt, Resource cluster) { -+ if (Resources.greaterThan(rc, cluster, -+ usage.getPending(partitionToLookAt), Resources.none())) { -+ return true; ++ ++ public int getLatencyDelayForInfohash(Friend destination, byte[] infohash) { ++ if (destination.isCanSeeFileList()) { ++ return 0; ++ } else { ++ return responseDelayRandomnesManager.getDeterministicNextInt(infohash, ++ mMIN_DELAY_LINK_LATENCY, mMAX_DELAY_LINK_LATENCY); ++ } + } -+ return false; -+ } + -+ @Private -+ public static boolean hasPendingResourceRequest(ResourceCalculator rc, -+ ResourceUsage usage, String nodePartition, Resource cluster, -+ SchedulingMode schedulingMode) { -+ String partitionToLookAt = nodePartition; -+ if (schedulingMode == SchedulingMode.IGNORE_PARTITION_EXCLUSIVITY) { -+ partitionToLookAt = RMNodeLabelsManager.NO_LABEL; ++ public List getDisconnectedFriends() { ++ List l = new ArrayList(); ++ Friend[] friends = friendManager.getFriends(); ++ for (Friend friend : friends) { ++ if (friend.getStatus() != Friend.STATUS_ONLINE) { ++ l.add(friend); ++ } ++ } ++ return l; + } -+ return hasPendingResourceRequest(rc, usage, partitionToLookAt, cluster); -+ } - } -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/AbstractCSQueue.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/AbstractCSQueue.java -index 42ea089d72afa..d95c45c79be87 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/AbstractCSQueue.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/AbstractCSQueue.java -@@ -20,7 +20,6 @@ - - import java.io.IOException; - import java.util.HashMap; --import java.util.HashSet; - import java.util.Map; - import java.util.Set; - -@@ -38,12 +37,12 @@ - import org.apache.hadoop.yarn.conf.YarnConfiguration; - import org.apache.hadoop.yarn.factories.RecordFactory; - import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; --import org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager; - import org.apache.hadoop.yarn.security.AccessType; - import org.apache.hadoop.yarn.security.PrivilegedEntity; - import org.apache.hadoop.yarn.security.PrivilegedEntity.EntityType; - import org.apache.hadoop.yarn.security.YarnAuthorizationProvider; - import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; -+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.NodeType; - import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics; - import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceLimits; - import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceUsage; -@@ -56,6 +55,11 @@ - public abstract class AbstractCSQueue implements CSQueue { - private static final Log LOG = LogFactory.getLog(AbstractCSQueue.class); - -+ static final CSAssignment NULL_ASSIGNMENT = -+ new CSAssignment(Resources.createResource(0, 0), NodeType.NODE_LOCAL); -+ -+ static final CSAssignment SKIP_ASSIGNMENT = new CSAssignment(true); -+ - CSQueue parent; - final String queueName; - volatile int numContainers; -@@ -343,16 +347,8 @@ public Resource getMinimumAllocation() { - } - - synchronized void allocateResource(Resource clusterResource, -- Resource resource, Set nodeLabels) { -- -- // Update usedResources by labels -- if (nodeLabels == null || nodeLabels.isEmpty()) { -- queueUsage.incUsed(resource); -- } else { -- for (String label : Sets.intersection(accessibleLabels, nodeLabels)) { -- queueUsage.incUsed(label, resource); -- } -- } -+ Resource resource, String nodePartition) { -+ queueUsage.incUsed(nodePartition, resource); - - ++numContainers; - CSQueueUtils.updateQueueStatistics(resourceCalculator, this, getParent(), -@@ -360,15 +356,8 @@ synchronized void allocateResource(Resource clusterResource, - } - - protected synchronized void releaseResource(Resource clusterResource, -- Resource resource, Set nodeLabels) { -- // Update usedResources by labels -- if (null == nodeLabels || nodeLabels.isEmpty()) { -- queueUsage.decUsed(resource); -- } else { -- for (String label : Sets.intersection(accessibleLabels, nodeLabels)) { -- queueUsage.decUsed(label, resource); -- } -- } -+ Resource resource, String nodePartition) { -+ queueUsage.decUsed(nodePartition, resource); - - CSQueueUtils.updateQueueStatistics(resourceCalculator, this, getParent(), - clusterResource, minimumAllocation); -@@ -434,103 +423,108 @@ private boolean isQueueHierarchyPreemptionDisabled(CSQueue q) { - parentQ.getPreemptionDisabled()); - } - -- private Resource getCurrentLimitResource(String nodeLabel, -- Resource clusterResource, ResourceLimits currentResourceLimits) { -- /* -- * Current limit resource: For labeled resource: limit = queue-max-resource -- * (TODO, this part need update when we support labeled-limit) For -- * non-labeled resource: limit = min(queue-max-resource, -- * limit-set-by-parent) -- */ -- Resource queueMaxResource = -- Resources.multiplyAndNormalizeDown(resourceCalculator, -- labelManager.getResourceByLabel(nodeLabel, clusterResource), -- queueCapacities.getAbsoluteMaximumCapacity(nodeLabel), minimumAllocation); -- if (nodeLabel.equals(RMNodeLabelsManager.NO_LABEL)) { -- return Resources.min(resourceCalculator, clusterResource, -- queueMaxResource, currentResourceLimits.getLimit()); -+ private Resource getCurrentLimitResource(String nodePartition, -+ Resource clusterResource, ResourceLimits currentResourceLimits, -+ SchedulingMode schedulingMode) { -+ if (schedulingMode == SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY) { -+ /* -+ * Current limit resource: For labeled resource: limit = queue-max-resource -+ * (TODO, this part need update when we support labeled-limit) For -+ * non-labeled resource: limit = min(queue-max-resource, -+ * limit-set-by-parent) -+ */ -+ Resource queueMaxResource = -+ Resources.multiplyAndNormalizeDown(resourceCalculator, -+ labelManager.getResourceByLabel(nodePartition, clusterResource), -+ queueCapacities.getAbsoluteMaximumCapacity(nodePartition), minimumAllocation); -+ if (nodePartition.equals(RMNodeLabelsManager.NO_LABEL)) { -+ return Resources.min(resourceCalculator, clusterResource, -+ queueMaxResource, currentResourceLimits.getLimit()); -+ } -+ return queueMaxResource; -+ } else if (schedulingMode == SchedulingMode.IGNORE_PARTITION_EXCLUSIVITY) { -+ // When we doing non-exclusive resource allocation, maximum capacity of -+ // all queues on this label equals to total resource with the label. -+ return labelManager.getResourceByLabel(nodePartition, clusterResource); - } -- return queueMaxResource; -+ -+ return Resources.none(); - } - - synchronized boolean canAssignToThisQueue(Resource clusterResource, -- Set nodeLabels, ResourceLimits currentResourceLimits, -- Resource nowRequired, Resource resourceCouldBeUnreserved) { -- // Get label of this queue can access, it's (nodeLabel AND queueLabel) -- Set labelCanAccess; -- if (null == nodeLabels || nodeLabels.isEmpty()) { -- labelCanAccess = new HashSet(); -- // Any queue can always access any node without label -- labelCanAccess.add(RMNodeLabelsManager.NO_LABEL); -- } else { -- labelCanAccess = new HashSet( -- accessibleLabels.contains(CommonNodeLabelsManager.ANY) ? nodeLabels -- : Sets.intersection(accessibleLabels, nodeLabels)); -- } -- -- for (String label : labelCanAccess) { -- // New total resource = used + required -- Resource newTotalResource = -- Resources.add(queueUsage.getUsed(label), nowRequired); -- -- Resource currentLimitResource = -- getCurrentLimitResource(label, clusterResource, currentResourceLimits); -- -- // if reservation continous looking enabled, check to see if could we -- // potentially use this node instead of a reserved node if the application -- // has reserved containers. -- // TODO, now only consider reservation cases when the node has no label -- if (this.reservationsContinueLooking -- && label.equals(RMNodeLabelsManager.NO_LABEL) -- && Resources.greaterThan(resourceCalculator, clusterResource, -- resourceCouldBeUnreserved, Resources.none())) { -- // resource-without-reserved = used - reserved -- Resource newTotalWithoutReservedResource = -- Resources.subtract(newTotalResource, resourceCouldBeUnreserved); -- -- // when total-used-without-reserved-resource < currentLimit, we still -- // have chance to allocate on this node by unreserving some containers -- if (Resources.lessThan(resourceCalculator, clusterResource, -- newTotalWithoutReservedResource, currentLimitResource)) { -- if (LOG.isDebugEnabled()) { -- LOG.debug(""try to use reserved: "" + getQueueName() -- + "" usedResources: "" + queueUsage.getUsed() -- + "", clusterResources: "" + clusterResource -- + "", reservedResources: "" + resourceCouldBeUnreserved -- + "", capacity-without-reserved: "" -- + newTotalWithoutReservedResource + "", maxLimitCapacity: "" -- + currentLimitResource); -- } -- return true; -+ String nodePartition, ResourceLimits currentResourceLimits, -+ Resource nowRequired, Resource resourceCouldBeUnreserved, -+ SchedulingMode schedulingMode) { -+ // New total resource = used + required -+ Resource newTotalResource = -+ Resources.add(queueUsage.getUsed(nodePartition), nowRequired); + -+ // Get current limited resource: -+ // - When doing RESPECT_PARTITION_EXCLUSIVITY allocation, we will respect -+ // queues' max capacity. -+ // - When doing IGNORE_PARTITION_EXCLUSIVITY allocation, we will not respect -+ // queue's max capacity, queue's max capacity on the partition will be -+ // considered to be 100%. Which is a queue can use all resource in the -+ // partition. -+ // Doing this because: for non-exclusive allocation, we make sure there's -+ // idle resource on the partition, to avoid wastage, such resource will be -+ // leveraged as much as we can, and preemption policy will reclaim it back -+ // when partitoned-resource-request comes back. -+ Resource currentLimitResource = -+ getCurrentLimitResource(nodePartition, clusterResource, -+ currentResourceLimits, schedulingMode); ++ public FileListManager getFilelistManager() { ++ return filelistManager; ++ } + -+ // if reservation continous looking enabled, check to see if could we -+ // potentially use this node instead of a reserved node if the application -+ // has reserved containers. -+ // TODO, now only consider reservation cases when the node has no label -+ if (this.reservationsContinueLooking -+ && nodePartition.equals(RMNodeLabelsManager.NO_LABEL) -+ && Resources.greaterThan(resourceCalculator, clusterResource, -+ resourceCouldBeUnreserved, Resources.none())) { -+ // resource-without-reserved = used - reserved -+ Resource newTotalWithoutReservedResource = -+ Resources.subtract(newTotalResource, resourceCouldBeUnreserved); ++ public List getFriendConnections() { ++ return new ArrayList(connections.values()); ++ } + -+ // when total-used-without-reserved-resource < currentLimit, we still -+ // have chance to allocate on this node by unreserving some containers -+ if (Resources.lessThan(resourceCalculator, clusterResource, -+ newTotalWithoutReservedResource, currentLimitResource)) { -+ if (LOG.isDebugEnabled()) { -+ LOG.debug(""try to use reserved: "" + getQueueName() -+ + "" usedResources: "" + queueUsage.getUsed() -+ + "", clusterResources: "" + clusterResource -+ + "", reservedResources: "" + resourceCouldBeUnreserved -+ + "", capacity-without-reserved: "" -+ + newTotalWithoutReservedResource + "", maxLimitCapacity: "" -+ + currentLimitResource); - } -+ return true; - } -- -- // Otherwise, if any of the label of this node beyond queue limit, we -- // cannot allocate on this node. Consider a small epsilon here. -- if (Resources.greaterThan(resourceCalculator, clusterResource, -- newTotalResource, currentLimitResource)) { -- return false; -- } ++ // private int parallelConnectCount(InetAddress remoteIP, byte[] ++ // remotePubKey) { ++ // int count = 0; ++ // for (FriendConnection overlayConnection : connections.values()) { ++ // if (overlayConnection.getRemoteIp().equals(remoteIP) ++ // && Arrays.equals(overlayConnection.getRemotePublicKey(), ++ // remotePubKey)) { ++ // count++; ++ // } ++ // } ++ // ++ // return count; ++ // } ++ ++ public long getLastConnectionCheckRun() { ++ return lastConnectionCheckRun; + } - -- if (LOG.isDebugEnabled()) { -- LOG.debug(getQueueName() -- + ""Check assign to queue, label="" + label -- + "" usedResources: "" + queueUsage.getUsed(label) -- + "" clusterResources: "" + clusterResource -- + "" currentUsedCapacity "" -- + Resources.divide(resourceCalculator, clusterResource, -- queueUsage.getUsed(label), -- labelManager.getResourceByLabel(label, clusterResource)) -- + "" max-capacity: "" -- + queueCapacities.getAbsoluteMaximumCapacity(label) -- + "")""); -- } -- return true; -+ // Check if we over current-resource-limit computed. -+ if (Resources.greaterThan(resourceCalculator, clusterResource, -+ newTotalResource, currentLimitResource)) { -+ return false; - } -- -- // Actually, this will not happen, since labelCanAccess will be always -- // non-empty -- return false; + -+ if (LOG.isDebugEnabled()) { -+ LOG.debug(getQueueName() -+ + ""Check assign to queue, nodePartition="" -+ + nodePartition -+ + "" usedResources: "" -+ + queueUsage.getUsed(nodePartition) -+ + "" clusterResources: "" -+ + clusterResource -+ + "" currentUsedCapacity "" -+ + Resources.divide(resourceCalculator, clusterResource, -+ queueUsage.getUsed(nodePartition), -+ labelManager.getResourceByLabel(nodePartition, clusterResource)) -+ + "" max-capacity: "" -+ + queueCapacities.getAbsoluteMaximumCapacity(nodePartition) + "")""); ++ public PublicKey getOwnPublicKey() { ++ return ownPublicKey; + } -+ return true; - } - - @Override -@@ -556,4 +550,33 @@ public void decPendingResource(String nodeLabel, Resource resourceToDec) { - parent.decPendingResource(nodeLabel, resourceToDec); - } - } -+ -+ /** -+ * Return if the queue has pending resource on given nodePartition and -+ * schedulingMode. -+ */ -+ boolean hasPendingResourceRequest(String nodePartition, -+ Resource cluster, SchedulingMode schedulingMode) { -+ return SchedulerUtils.hasPendingResourceRequest(resourceCalculator, -+ queueUsage, nodePartition, cluster, schedulingMode); -+ } -+ -+ boolean accessibleToPartition(String nodePartition) { -+ // if queue's label is *, it can access any node -+ if (accessibleLabels != null -+ && accessibleLabels.contains(RMNodeLabelsManager.ANY)) { -+ return true; ++ ++ public QueueManager getQueueManager() { ++ return queueManager; + } -+ // any queue can access to a node without label -+ if (nodePartition == null -+ || nodePartition.equals(RMNodeLabelsManager.NO_LABEL)) { -+ return true; ++ ++ public SearchManager getSearchManager() { ++ return searchManager; + } -+ // a queue can access to a node only if it contains any label of the node -+ if (accessibleLabels != null && accessibleLabels.contains(nodePartition)) { -+ return true; ++ ++ public double getTransportDownloadKBps() { ++ long totalDownloadSpeed = 0; ++ ++ LinkedList conns = new LinkedList(); ++ conns.addAll(connections.values()); ++ ++ for (FriendConnection fc : conns) { ++ final Map ot = fc.getOverlayTransports(); ++ ++ for (OverlayEndpoint o : ot.values()) { ++ totalDownloadSpeed += o.getDownloadRate(); ++ } ++ } ++ ++ return totalDownloadSpeed / 1024.0; + } -+ // sorry, you cannot access -+ return false; -+ } - } -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CSQueue.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CSQueue.java -index 1a9448acaa148..b06a646cec973 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CSQueue.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CSQueue.java -@@ -190,10 +190,13 @@ public void finishApplicationAttempt(FiCaSchedulerApp application, - * @param clusterResource the resource of the cluster. - * @param node node on which resources are available - * @param resourceLimits how much overall resource of this queue can use. -+ * @param schedulingMode Type of exclusive check when assign container on a -+ * NodeManager, see {@link SchedulingMode}. - * @return the assignment - */ - public CSAssignment assignContainers(Resource clusterResource, -- FiCaSchedulerNode node, ResourceLimits resourceLimits); -+ FiCaSchedulerNode node, ResourceLimits resourceLimits, -+ SchedulingMode schedulingMode); - - /** - * A container assigned to the queue has completed. -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacityScheduler.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacityScheduler.java -index e93c5291f2905..cfeee37d1e6ac 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacityScheduler.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacityScheduler.java -@@ -35,6 +35,7 @@ - import java.util.concurrent.atomic.AtomicBoolean; - import java.util.concurrent.atomic.AtomicInteger; - -+import org.apache.commons.lang.StringUtils; - import org.apache.commons.logging.Log; - import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate; -@@ -1114,28 +1115,30 @@ private synchronized void allocateContainersToNode(FiCaSchedulerNode node) { - if (reservedContainer != null) { - FiCaSchedulerApp reservedApplication = - getCurrentAttemptForContainer(reservedContainer.getContainerId()); -- + - // Try to fulfill the reservation -- LOG.info(""Trying to fulfill reservation for application "" + -- reservedApplication.getApplicationId() + "" on node: "" + -- node.getNodeID()); -- -- LeafQueue queue = ((LeafQueue)reservedApplication.getQueue()); -- assignment = queue.assignContainers( -+ LOG.info(""Trying to fulfill reservation for application "" -+ + reservedApplication.getApplicationId() + "" on node: "" -+ + node.getNodeID()); ++ public long getTransportSendRate(boolean includeLan) { ++ long totalUploadSpeed = 0; + -+ LeafQueue queue = ((LeafQueue) reservedApplication.getQueue()); -+ assignment = -+ queue.assignContainers( - clusterResource, - node, - // TODO, now we only consider limits for parent for non-labeled - // resources, should consider labeled resources as well. - new ResourceLimits(labelManager.getResourceByLabel( -- RMNodeLabelsManager.NO_LABEL, clusterResource))); -+ RMNodeLabelsManager.NO_LABEL, clusterResource)), -+ SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - if (assignment.isFulfilledReservation()) { - CSAssignment tmp = - new CSAssignment(reservedContainer.getReservedResource(), -- assignment.getType()); -+ assignment.getType()); - Resources.addTo(assignment.getAssignmentInformation().getAllocated(), -- reservedContainer.getReservedResource()); -+ reservedContainer.getReservedResource()); - tmp.getAssignmentInformation().addAllocationDetails( -- reservedContainer.getContainerId(), queue.getQueuePath()); -+ reservedContainer.getContainerId(), queue.getQueuePath()); - tmp.getAssignmentInformation().incrAllocations(); - updateSchedulerHealth(lastNodeUpdateTime, node, tmp); - schedulerHealth.updateSchedulerFulfilledReservationCounts(1); -@@ -1143,16 +1146,13 @@ private synchronized void allocateContainersToNode(FiCaSchedulerNode node) { - - RMContainer excessReservation = assignment.getExcessReservation(); - if (excessReservation != null) { -- Container container = excessReservation.getContainer(); -- queue.completedContainer( -- clusterResource, assignment.getApplication(), node, -- excessReservation, -- SchedulerUtils.createAbnormalContainerStatus( -- container.getId(), -- SchedulerUtils.UNRESERVED_CONTAINER), -- RMContainerEventType.RELEASED, null, true); -+ Container container = excessReservation.getContainer(); -+ queue.completedContainer(clusterResource, assignment.getApplication(), -+ node, excessReservation, SchedulerUtils -+ .createAbnormalContainerStatus(container.getId(), -+ SchedulerUtils.UNRESERVED_CONTAINER), -+ RMContainerEventType.RELEASED, null, true); - } -- - } - - // Try to schedule more if there are no reservations to fulfill -@@ -1163,22 +1163,61 @@ private synchronized void allocateContainersToNode(FiCaSchedulerNode node) { - LOG.debug(""Trying to schedule on node: "" + node.getNodeName() + - "", available: "" + node.getAvailableResource()); - } ++ LinkedList conns = new LinkedList(); ++ conns.addAll(connections.values()); + - assignment = root.assignContainers( - clusterResource, - node, - // TODO, now we only consider limits for parent for non-labeled - // resources, should consider labeled resources as well. - new ResourceLimits(labelManager.getResourceByLabel( -- RMNodeLabelsManager.NO_LABEL, clusterResource))); -+ RMNodeLabelsManager.NO_LABEL, clusterResource)), -+ SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); -+ if (Resources.greaterThan(calculator, clusterResource, -+ assignment.getResource(), Resources.none())) { -+ updateSchedulerHealth(lastNodeUpdateTime, node, assignment); -+ return; ++ for (FriendConnection fc : conns) { ++ final Map ot = fc.getOverlayTransports(); ++ ++ for (OverlayEndpoint o : ot.values()) { ++ if (!includeLan && o.isLANLocal()) { ++ // not including lan local peers ++ } else { ++ totalUploadSpeed += o.getUploadRate(); ++ } ++ } + } -+ -+ // Only do non-exclusive allocation when node has node-labels. -+ if (StringUtils.equals(node.getPartition(), -+ RMNodeLabelsManager.NO_LABEL)) { -+ return; ++ ++ return totalUploadSpeed; ++ } ++ ++ public boolean isConnectionAllowed(InetAddress remoteIP, byte[] remotePubKey) { ++ Friend friend = friendManager.getFriend(remotePubKey); ++ if (stopped) { ++ Log.log(""connection denied: (f2f transfers disabled)"", logToStdOut); ++ return false; + } -+ -+ // Only do non-exclusive allocation when the node-label supports that ++ // check if we should allow this public key to connect ++ if (Arrays.equals(remotePubKey, ownPublicKey.getEncoded()) ++ && remoteIP.equals(myInstance.getExternalAddress())) { ++ Log.log(LogEvent.LT_INFORMATION, ""connection from self not allowed (if same ip)"", ++ logToStdOut); ++ return false; ++ } else if (friend == null) { ++ Log.log(LogEvent.LT_WARNING, "" access denied (not friend): "" + remoteIP, logToStdOut); ++ return false; ++ } else if (friend.isBlocked()) { ++ Log.log(LogEvent.LT_WARNING, "" access denied (friend blocked): "" + remoteIP, ++ logToStdOut); ++ return false; ++ } else if (friend.getFriendBannedUntil() > System.currentTimeMillis()) { ++ double minutesLeft = friend.getFriendBannedUntil() - System.currentTimeMillis() ++ / (60 * 1000.0); ++ friend.updateConnectionLog(true, ""incoming connection denied, friend blocked for "" ++ + minutesLeft + "" more minutes because of: "" + friend.getBannedReason()); ++ Log.log(LogEvent.LT_WARNING, "" access denied (friend blocked for "" + minutesLeft ++ + "" more minutes): "" + remoteIP, logToStdOut); ++ return false; ++ } ++ ++ for (FriendConnection c : connections.values()) { ++ if (c.getRemoteFriend().equals(friend)) { ++ Log.log(LogEvent.LT_WARNING, "" access denied (friend already connected): "" ++ + remoteIP, logToStdOut); ++ return false; ++ } ++ } ++ Log.log(LogEvent.LT_INFORMATION, ""friend connection ok: "" + remoteIP + "" :: "" + friend, ++ logToStdOut); ++ return true; ++ } ++ ++ /** ++ * make sure to synchronize before calling this function ++ */ ++ private void notifyConnectionListeners(Friend f, boolean connected) { ++ lock.lock(); + try { -+ if (rmContext.getNodeLabelManager().isExclusiveNodeLabel( -+ node.getPartition())) { -+ return; -+ } -+ } catch (IOException e) { -+ LOG.warn(""Exception when trying to get exclusivity of node label="" -+ + node.getPartition(), e); -+ return; ++ for (FriendConnectListener cb : friendConnectListeners) { ++ if (connected) { ++ cb.friendConnected(f); ++ } else { ++ cb.friendDisconnected(f); ++ } ++ } ++ } finally { ++ lock.unlock(); + } -+ -+ // Try to use NON_EXCLUSIVE -+ assignment = root.assignContainers( -+ clusterResource, -+ node, -+ // TODO, now we only consider limits for parent for non-labeled -+ // resources, should consider labeled resources as well. -+ new ResourceLimits(labelManager.getResourceByLabel( -+ RMNodeLabelsManager.NO_LABEL, clusterResource)), -+ SchedulingMode.IGNORE_PARTITION_EXCLUSIVITY); - updateSchedulerHealth(lastNodeUpdateTime, node, assignment); -+ if (Resources.greaterThan(calculator, clusterResource, -+ assignment.getResource(), Resources.none())) { -+ return; ++ } ++ ++ private boolean registerConnection(FriendConnection connection) { ++ lock.lock(); ++ try { ++ if (isConnectionAllowed(connection.getRemoteIp(), connection.getRemotePublicKey())) { ++ connections.put(connection.hashCode(), connection); ++ /* ++ * don't mark remote friend as connected until after the ++ * oneswarm handshake message is received ++ */ ++ // connection.getRemoteFriend().connected(connection.hashCode()); ++ Log.log(""registered connection: "" + connection, logToStdOut); ++ return true; ++ } else { ++ return false; ++ } ++ } finally { ++ lock.unlock(); + } - } - } else { -- LOG.info(""Skipping scheduling since node "" + node.getNodeID() + -- "" is reserved by application "" + -- node.getReservedContainer().getContainerId().getApplicationAttemptId() -- ); -+ LOG.info(""Skipping scheduling since node "" -+ + node.getNodeID() -+ + "" is reserved by application "" -+ + node.getReservedContainer().getContainerId() -+ .getApplicationAttemptId()); - } -- - } - - @Override -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java -index 102e5539f162a..4e8d61769ecdf 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java -@@ -319,6 +319,11 @@ public float getMaximumApplicationMasterResourcePerQueuePercent(String queue) { - getMaximumApplicationMasterResourcePercent()); - } - -+ public void setMaximumApplicationMasterResourcePerQueuePercent(String queue, -+ float percent) { -+ setFloat(getQueuePrefix(queue) + MAXIMUM_AM_RESOURCE_SUFFIX, percent); -+ } -+ - public float getNonLabeledQueueCapacity(String queue) { - float capacity = queue.equals(""root"") ? 100.0f : getFloat( - getQueuePrefix(queue) + CAPACITY, UNDEFINED); -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/LeafQueue.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/LeafQueue.java -index 59a016f98140d..8a6a601f202f7 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/LeafQueue.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/LeafQueue.java -@@ -24,7 +24,6 @@ - import java.util.Collections; - import java.util.Comparator; - import java.util.HashMap; --import java.util.HashSet; - import java.util.Iterator; - import java.util.List; - import java.util.Map; -@@ -58,6 +57,7 @@ - import org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager; - import org.apache.hadoop.yarn.security.AccessType; - import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; -+import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; - import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; - import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerEventType; - import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerState; -@@ -718,39 +718,11 @@ private synchronized FiCaSchedulerApp getApplication( - ApplicationAttemptId applicationAttemptId) { - return applicationAttemptMap.get(applicationAttemptId); - } -- -- private static final CSAssignment NULL_ASSIGNMENT = -- new CSAssignment(Resources.createResource(0, 0), NodeType.NODE_LOCAL); -- -- private static final CSAssignment SKIP_ASSIGNMENT = new CSAssignment(true); -- -- private static Set getRequestLabelSetByExpression( -- String labelExpression) { -- Set labels = new HashSet(); -- if (null == labelExpression) { -- return labels; -- } -- for (String l : labelExpression.split(""&&"")) { -- if (l.trim().isEmpty()) { -- continue; -- } -- labels.add(l.trim()); -- } -- return labels; -- } -- -- private boolean checkResourceRequestMatchingNodeLabel(ResourceRequest offswitchResourceRequest, -- FiCaSchedulerNode node) { -- String askedNodeLabel = offswitchResourceRequest.getNodeLabelExpression(); -- if (null == askedNodeLabel) { -- askedNodeLabel = RMNodeLabelsManager.NO_LABEL; -- } -- return askedNodeLabel.equals(node.getPartition()); -- } - - @Override - public synchronized CSAssignment assignContainers(Resource clusterResource, -- FiCaSchedulerNode node, ResourceLimits currentResourceLimits) { -+ FiCaSchedulerNode node, ResourceLimits currentResourceLimits, -+ SchedulingMode schedulingMode) { - updateCurrentResourceLimits(currentResourceLimits, clusterResource); - - if(LOG.isDebugEnabled()) { -@@ -758,12 +730,6 @@ public synchronized CSAssignment assignContainers(Resource clusterResource, - + "" #applications="" + activeApplications.size()); - } - -- // if our queue cannot access this node, just return -- if (!SchedulerUtils.checkQueueAccessToNode(accessibleLabels, -- node.getLabels())) { -- return NULL_ASSIGNMENT; -- } -- - // Check for reserved resources - RMContainer reservedContainer = node.getReservedContainer(); - if (reservedContainer != null) { -@@ -771,8 +737,26 @@ public synchronized CSAssignment assignContainers(Resource clusterResource, - getApplication(reservedContainer.getApplicationAttemptId()); - synchronized (application) { - return assignReservedContainer(application, node, reservedContainer, -- clusterResource); -+ clusterResource, schedulingMode); -+ } + } -+ -+ // if our queue cannot access this node, just return -+ if (schedulingMode == SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY -+ && !accessibleToPartition(node.getPartition())) { -+ return NULL_ASSIGNMENT; ++ ++ public void registerForConnectNotifications(FriendConnectListener callback) { ++ lock.lock(); ++ try { ++ friendConnectListeners.add(callback); ++ } finally { ++ lock.unlock(); ++ } + } -+ -+ // Check if this queue need more resource, simply skip allocation if this -+ // queue doesn't need more resources. -+ if (!hasPendingResourceRequest(node.getPartition(), -+ clusterResource, schedulingMode)) { -+ if (LOG.isDebugEnabled()) { -+ LOG.debug(""Skip this queue="" + getQueuePath() -+ + "", because it doesn't need more resource, schedulingMode="" -+ + schedulingMode.name() + "" node-partition="" + node.getPartition()); - } -+ return NULL_ASSIGNMENT; - } - - // Try to assign containers to applications in order -@@ -783,6 +767,17 @@ public synchronized CSAssignment assignContainers(Resource clusterResource, - + application.getApplicationId()); - application.showRequests(); - } -+ -+ // Check if application needs more resource, skip if it doesn't need more. -+ if (!application.hasPendingResourceRequest(resourceCalculator, -+ node.getPartition(), clusterResource, schedulingMode)) { -+ if (LOG.isDebugEnabled()) { -+ LOG.debug(""Skip app_attempt="" + application.getApplicationAttemptId() -+ + "", because it doesn't need more resource, schedulingMode="" -+ + schedulingMode.name() + "" node-label="" + node.getPartition()); ++ ++ public void restartAllConnections() { ++ stopped = false; ++ } ++ ++ public void sendChatMessage(int connectionId, String inPlaintextMessage) { ++ FriendConnection conn = connections.get(connectionId); ++ conn.sendChat(inPlaintextMessage); ++ } ++ ++ void sendDirectedSearch(FriendConnection target, OSF2FHashSearch search) { ++ Log.log(""sending search to "" + target, logToStdOut); ++ target.sendSearch(search, true); ++ } ++ ++ public boolean sendFileListRequest(int connectionId, long maxCacheAge, ++ PluginCallback callback) { ++ ++ // just check if the request is for the local list ++ if (connectionId == OWN_CONNECTION_ID_MAGIC_NUMBER) { ++ callback.requestCompleted(filelistManager.getOwnFileList()); + } -+ continue; -+ } - - synchronized (application) { - // Check if this resource is on the blacklist -@@ -806,10 +801,27 @@ public synchronized CSAssignment assignContainers(Resource clusterResource, - continue; - } - -+ // AM container allocation doesn't support non-exclusive allocation to -+ // avoid painful of preempt an AM container -+ if (schedulingMode == SchedulingMode.IGNORE_PARTITION_EXCLUSIVITY) { -+ RMAppAttempt rmAppAttempt = -+ csContext.getRMContext().getRMApps() -+ .get(application.getApplicationId()).getCurrentAppAttempt(); -+ if (null == rmAppAttempt.getMasterContainer()) { -+ if (LOG.isDebugEnabled()) { -+ LOG.debug(""Skip allocating AM container to app_attempt="" -+ + application.getApplicationAttemptId() -+ + "", don't allow to allocate AM container in non-exclusive mode""); -+ } -+ break; ++ ++ FriendConnection conn = connections.get(connectionId); ++ ++ if (conn != null) { ++ ++ FileList oldList = filelistManager.getFriendsList(conn.getRemoteFriend()); ++ if (oldList != null ++ && (System.currentTimeMillis() - oldList.getCreated()) < maxCacheAge) { ++ callback.requestCompleted(oldList); + } -+ } -+ - // Is the node-label-expression of this offswitch resource request - // matches the node's label? - // If not match, jump to next priority. -- if (!checkResourceRequestMatchingNodeLabel(anyRequest, node)) { -+ if (!SchedulerUtils.checkResourceRequestMatchingNodePartition( -+ anyRequest, node.getPartition(), schedulingMode)) { - continue; - } - -@@ -822,10 +834,6 @@ public synchronized CSAssignment assignContainers(Resource clusterResource, - } - } - -- Set requestedNodeLabels = -- getRequestLabelSetByExpression(anyRequest -- .getNodeLabelExpression()); -- - // Compute user-limit & set headroom - // Note: We compute both user-limit & headroom with the highest - // priority request as the target. -@@ -833,27 +841,61 @@ public synchronized CSAssignment assignContainers(Resource clusterResource, - // before all higher priority ones are serviced. - Resource userLimit = - computeUserLimitAndSetHeadroom(application, clusterResource, -- required, requestedNodeLabels); -+ required, node.getPartition(), schedulingMode); - - // Check queue max-capacity limit -- if (!super.canAssignToThisQueue(clusterResource, node.getLabels(), -- this.currentResourceLimits, required, application.getCurrentReservation())) { -+ if (!super.canAssignToThisQueue(clusterResource, node.getPartition(), -+ this.currentResourceLimits, required, -+ application.getCurrentReservation(), schedulingMode)) { - return NULL_ASSIGNMENT; - } - - // Check user limit - if (!canAssignToUser(clusterResource, application.getUser(), userLimit, -- application, true, requestedNodeLabels)) { -+ application, true, node.getPartition())) { - break; - } - - // Inform the application it is about to get a scheduling opportunity - application.addSchedulingOpportunity(priority); - -+ // Increase missed-non-partitioned-resource-request-opportunity. -+ // This is to make sure non-partitioned-resource-request will prefer -+ // to be allocated to non-partitioned nodes -+ int missedNonPartitionedRequestSchedulingOpportunity = 0; -+ if (anyRequest.getNodeLabelExpression().equals( -+ RMNodeLabelsManager.NO_LABEL)) { -+ missedNonPartitionedRequestSchedulingOpportunity = -+ application -+ .addMissedNonPartitionedRequestSchedulingOpportunity(priority); -+ } -+ -+ if (schedulingMode == SchedulingMode.IGNORE_PARTITION_EXCLUSIVITY) { -+ // Before doing allocation, we need to check scheduling opportunity to -+ // make sure : non-partitioned resource request should be scheduled to -+ // non-partitioned partition first. -+ if (missedNonPartitionedRequestSchedulingOpportunity < scheduler -+ .getNumClusterNodes()) { -+ if (LOG.isDebugEnabled()) { -+ LOG.debug(""Skip app_attempt="" -+ + application.getApplicationAttemptId() -+ + "" priority="" -+ + priority -+ + "" because missed-non-partitioned-resource-request"" -+ + "" opportunity under requred:"" -+ + "" Now="" + missedNonPartitionedRequestSchedulingOpportunity -+ + "" required="" -+ + scheduler.getNumClusterNodes()); -+ } + -+ break; ++ Log.log(""sending filelist request to "" + conn); ++ conn.sendFileListRequest(OSF2FMessage.FILE_LIST_TYPE_COMPLETE, callback); ++ return true; ++ } else { ++ System.err ++ .println(""tried to get filelist for unknown connection id (friend just went offline?)!, stack trace is:""); ++ new RuntimeException().printStackTrace(); ++ } ++ return false; ++ } ++ ++ // public void startDownload(byte type, byte[] metainfo, ++ // boolean createIfNotExist) { ++ // if (type == OSF2FMessage.METAINFO_TYPE_BITTORRENT) { ++ // ++ ++ public boolean sendMetaInfoRequest(int connectionId, int channelId, byte[] infohash, ++ int lengthHint, PluginCallback callback) { ++ FriendConnection conn = connections.get(connectionId); ++ Log.log(""sending metainfo request to "" + conn); ++ if (conn != null) { ++ conn.sendMetaInfoRequest(OSF2FMessage.METAINFO_TYPE_BITTORRENT, channelId, infohash, ++ lengthHint, callback); ++ return true; ++ } ++ return false; ++ ++ } ++ ++ void sendSearchOrCancel(OSF2FSearch search, boolean skipQueue, boolean forceSend) { ++ Log.log(""sending search/cancel to "" + connections.size(), logToStdOut); ++ int numSent = 0; ++ for (FriendConnection conn : connections.values()) { ++ ++ boolean shouldSend = true; ++ if (!forceSend) { ++ shouldSend = shouldForwardSearch(search, conn); + } -+ } -+ - // Try to schedule - CSAssignment assignment = - assignContainersOnNode(clusterResource, node, application, priority, -- null); -+ null, schedulingMode); - - // Did the application skip this node? - if (assignment.getSkipped()) { -@@ -870,9 +912,9 @@ public synchronized CSAssignment assignContainers(Resource clusterResource, - // Book-keeping - // Note: Update headroom to account for current allocation too... - allocateResource(clusterResource, application, assigned, -- node.getLabels()); -+ node.getPartition()); - -- // Don't reset scheduling opportunities for non-local assignments -+ // Don't reset scheduling opportunities for offswitch assignments - // otherwise the app will be delayed for each non-local assignment. - // This helps apps with many off-cluster requests schedule faster. - if (assignment.getType() != NodeType.OFF_SWITCH) { -@@ -881,6 +923,10 @@ public synchronized CSAssignment assignContainers(Resource clusterResource, - } - application.resetSchedulingOpportunities(priority); - } -+ // Non-exclusive scheduling opportunity is different: we need reset -+ // it every time to make sure non-labeled resource request will be -+ // most likely allocated on non-labeled nodes first. -+ application.resetMissedNonPartitionedRequestSchedulingOpportunity(priority); - - // Done - return assignment; -@@ -904,7 +950,8 @@ public synchronized CSAssignment assignContainers(Resource clusterResource, - - private synchronized CSAssignment assignReservedContainer( - FiCaSchedulerApp application, FiCaSchedulerNode node, -- RMContainer rmContainer, Resource clusterResource) { -+ RMContainer rmContainer, Resource clusterResource, -+ SchedulingMode schedulingMode) { - // Do we still need this reservation? - Priority priority = rmContainer.getReservedPriority(); - if (application.getTotalRequiredResources(priority) == 0) { -@@ -915,7 +962,7 @@ private synchronized CSAssignment assignReservedContainer( - // Try to assign if we have sufficient resources - CSAssignment tmp = - assignContainersOnNode(clusterResource, node, application, priority, -- rmContainer); -+ rmContainer, schedulingMode); - - // Doesn't matter... since it's already charged for at time of reservation - // ""re-reservation"" is *free* -@@ -929,7 +976,8 @@ private synchronized CSAssignment assignReservedContainer( - protected Resource getHeadroom(User user, Resource queueCurrentLimit, - Resource clusterResource, FiCaSchedulerApp application, Resource required) { - return getHeadroom(user, queueCurrentLimit, clusterResource, -- computeUserLimit(application, clusterResource, required, user, null)); -+ computeUserLimit(application, clusterResource, required, user, -+ RMNodeLabelsManager.NO_LABEL, SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY)); - } - - private Resource getHeadroom(User user, Resource currentResourceLimit, -@@ -973,7 +1021,8 @@ private void setQueueResourceLimitsInfo( ++ if (shouldSend) { ++ conn.sendSearch(search.clone(), skipQueue); ++ numSent++; ++ } ++ if (search instanceof OSF2FHashSearch) { ++ OSF2FHashSearch hs = (OSF2FHashSearch) search; ++ searchTimingsLogger.log(System.currentTimeMillis() + "", send_search, "" ++ + conn.getRemoteFriend().getNick() + "", "" + hs.getSearchID() + "", "" ++ + hs.getInfohashhash()); ++ } ++ } ++ /* ++ * for searches sent by us, if we didn't send it to anyone try again but ++ * without the randomness linitng who we are sending to ++ */ ++ if (numSent == 0 && !forceSend) { ++ sendSearchOrCancel(search, skipQueue, true); ++ } ++ } ++ ++ /** ++ * to protect against colluding friends we are only forwarding searches with ++ * 95% probability ++ * ++ * if forcesend = true the search will be forwarded anyway even if the ++ * randomness says that friend shouldn't be forwarded ++ */ ++ private boolean shouldForwardSearch(OSF2FSearch search, FriendConnection conn) { ++ boolean shouldSend = true; ++ if (search instanceof OSF2FHashSearch) { ++ shouldSend = shouldForwardSearch(((OSF2FHashSearch) search).getInfohashhash(), conn); ++ } else if (search instanceof OSF2FSearchCancel) { ++ long infohash = searchManager.getInfoHashHashFromSearchId(search.getSearchID()); ++ if (infohash != -1) { ++ shouldSend = shouldForwardSearch(infohash, conn); ++ } else { ++ shouldSend = false; ++ } ++ } ++ return shouldSend; ++ } ++ ++ private boolean shouldForwardSearch(long infohashhash, FriendConnection conn) { ++ if (conn.getRemoteFriend().isCanSeeFileList()) { ++ return true; ++ } ++ byte[] infohashbytes = RandomnessManager.getBytes(infohashhash); ++ byte[] friendHash = RandomnessManager.getBytes(conn.getRemotePublicKeyHash()); ++ byte[] all = new byte[infohashbytes.length + friendHash.length]; ++ System.arraycopy(infohashbytes, 0, all, 0, infohashbytes.length); ++ System.arraycopy(friendHash, 0, all, infohashbytes.length, friendHash.length); ++ ++ int randomVal = randomnessManager.getDeterministicRandomInt(all); ++ if (randomVal < 0) { ++ randomVal = -randomVal; ++ } ++ if (randomVal < Integer.MAX_VALUE * mForwardSearchProbability) { ++ return true; ++ } else { ++ return false; ++ } ++ } ++ ++ public void triggerFileListUpdates() { ++ List conns = new LinkedList(); ++ lock.lock(); ++ try { ++ conns.addAll(connections.values()); ++ } finally { ++ lock.unlock(); ++ } ++ for (FriendConnection conn : connections.values()) { ++ conn.triggerFileListSend(); ++ } ++ } ++ ++ private class ConnectionChecker extends TimerTask { ++ ++ @Override ++ public void run() { ++ ++ try { ++ // first, check if we have any overlays that are timed out ++ for (FriendConnection connection : connections.values()) { ++ connection.clearTimedOutForwards(); ++ connection.clearTimedOutTransports(); ++ connection.clearTimedOutSearchRecords(); ++ connection.clearOldMetainfoRequests(); ++ } ++ } catch (Throwable t) { ++ Debug.out(""F2F Connection Checker: got error when clearing transports/forwards"", t); ++ } ++ try { ++ // then, check if we need to send any keepalives ++ for (FriendConnection connection : connections.values()) { ++ connection.doKeepAliveCheck(); ++ } ++ } catch (Throwable t) { ++ Debug.out(""F2F Connection Checker: got error when sending keep alives"", t); ++ } ++ ++ try { ++ // check if we have any timed out connections ++ List timedOut = new ArrayList(); ++ for (FriendConnection connection : connections.values()) { ++ if (connection.isTimedOut()) { ++ timedOut.add(connection); ++ } ++ } ++ // and close them ++ for (FriendConnection friendConnection : timedOut) { ++ friendConnection.close(); ++ } ++ } catch (Throwable t) { ++ Debug.out(""F2F Connection Checker: got error when clearing friend connections"", t); ++ } ++ ++ try { ++ // then, recycle the search IDs ++ searchManager.clearTimedOutSearches(); ++ } catch (Throwable t) { ++ Debug.out(""F2F Connection Checker: got error when clearing timed out searches"", t); ++ } ++ lastConnectionCheckRun = System.currentTimeMillis(); ++ } ++ } ++ ++ class FriendConnectionListener { ++ public boolean connectSuccess(FriendConnection friendConnection) { ++ if (!registerConnection(friendConnection)) { ++ Log.log(""Unable to register connection, "" ++ + ""connect count to high, closing connection"", logToStdOut); ++ friendConnection.close(); ++ return false; ++ } ++ return true; ++ } ++ ++ public void disconnected(FriendConnection friendConnection) { ++ if (friendConnection.isFileListReceived()) { ++ notifyConnectionListeners(friendConnection.getRemoteFriend(), false); ++ } ++ deregisterConnection(friendConnection); ++ } ++ ++ public void gotSearchCancel(FriendConnection friendConnection, OSF2FSearchCancel msg) { ++ searchManager.handleIncomingSearchCancel(friendConnection, msg); ++ } ++ ++ public void gotSearchMessage(FriendConnection friendConnection, OSF2FSearch msg) { ++ if (msg instanceof OSF2FHashSearch) { ++ OSF2FHashSearch hs = (OSF2FHashSearch) msg; ++ searchTimingsLogger.log(System.currentTimeMillis() + "", search, "" ++ + friendConnection.getRemoteFriend().getNick() + "", "" + hs.getSearchID() ++ + "", "" + hs.getInfohashhash() + "", "" ++ + friendConnection.getRemoteIp().getHostAddress()); ++ } ++ searchManager.handleIncomingSearch(friendConnection, msg); ++ } ++ ++ public void gotSearchResponse(FriendConnection friendConnection, OSF2FSearchResp msg) { ++ if (msg instanceof OSF2FHashSearchResp) { ++ OSF2FHashSearchResp hsr = (OSF2FHashSearchResp) msg; ++ searchTimingsLogger.log(System.currentTimeMillis() + "", response, "" ++ + friendConnection.getRemoteFriend().getNick() + "", "" + hsr.getSearchID() ++ + "", "" + hsr.getChannelID() + "", "" ++ + friendConnection.getRemoteIp().getHostAddress() + "", "" + hsr.getPathID()); ++ } ++ searchManager.handleIncomingSearchResponse(friendConnection, msg); ++ } ++ ++ @SuppressWarnings(""unchecked"") ++ public void handshakeCompletedFully(final FriendConnection friendConnection) { ++ ++ notifyConnectionListeners(friendConnection.getRemoteFriend(), true); ++ ++ /* ++ * check if we have any running downloads that this friend has ++ */ ++ Log.log(""New friend connected, checking if friend has anything we want"", logToStdOut); ++ List runningDownloadHashes = new LinkedList(); ++ List dms = AzureusCoreImpl.getSingleton().getGlobalManager() ++ .getDownloadManagers(); ++ for (DownloadManager dm : dms) { ++ if (dm.getState() == DownloadManager.STATE_DOWNLOADING) { ++ try { ++ TOTorrent torrent = dm.getTorrent(); ++ if (torrent != null) { ++ runningDownloadHashes.add(torrent.getHash()); ++ } ++ } catch (TOTorrentException e) { ++ e.printStackTrace(); ++ } ++ } ++ } ++ Log.log(""found "" + runningDownloadHashes.size() + "" running downloads"", logToStdOut); ++ FileList remoteFileList = filelistManager.getFriendsList(friendConnection ++ .getRemoteFriend()); ++ for (byte[] hash : runningDownloadHashes) { ++ if (remoteFileList.contains(hash)) { ++ Log.log(""sending search for '"" + Base32.encode(hash) + ""' to "" ++ + friendConnection.getRemoteFriend(), logToStdOut); ++ searchManager.sendDirectedHashSearch(friendConnection, hash); ++ } ++ } ++ ++ /** ++ * Check if we need to send any pending chat messages to this user. ++ * (5/sec) ++ */ ++ Thread queryThread = new Thread(""Queued chat SQL query for: "" ++ + friendConnection.getRemoteFriend().getNick()) { ++ @Override ++ public void run() { ++ ++ try { ++ Thread.sleep(3 * 1000); ++ } catch (InterruptedException e) { ++ } ++ ++ String base64Key = new String(Base64.encode(friendConnection ++ .getRemotePublicKey())); ++ ChatDAO dao = ChatDAO.get(); ++ List out = dao.getQueuedMessagesForUser(base64Key); ++ for (Chat c : out) { ++ if (friendConnection.isTimedOut() || friendConnection.isClosing()) { ++ return; ++ } ++ ++ friendConnection.sendChat(c.getMessage() + "" (sent "" ++ + StringTools.formatDateAppleLike(new Date(c.getTimestamp()), true) ++ + "")""); ++ dao.markSent(c.getUID()); ++ ++ try { ++ Thread.sleep(200); ++ } catch (InterruptedException e) { ++ } ++ } ++ } ++ }; ++ queryThread.setDaemon(true); ++ queryThread.start(); ++ } ++ } + } - @Lock({LeafQueue.class, FiCaSchedulerApp.class}) - Resource computeUserLimitAndSetHeadroom(FiCaSchedulerApp application, -- Resource clusterResource, Resource required, Set requestedLabels) { -+ Resource clusterResource, Resource required, String nodePartition, -+ SchedulingMode schedulingMode) { - String user = application.getUser(); - User queueUser = getUser(user); + class RandomnessManager { +- private byte[] secretBytes = new byte[20]; +- +- public RandomnessManager() { +- SecureRandom random; +- try { +- random = SecureRandom.getInstance(""SHA1PRNG""); +- random.nextBytes(secretBytes); +- } catch (NoSuchAlgorithmException e) { +- // TODO Auto-generated catch block +- e.printStackTrace(); +- secretBytes = null; +- } +- } +- +- public RandomnessManager(byte[] secretBytes) { +- this.secretBytes = secretBytes; +- } +- +- /** +- * returns a random int between 0 (inclusive) and n (exclusive) seeded by +- * seedBytes +- */ +- +- public int getDeterministicNextInt(byte[] seedBytes, int minValue, int maxValue) { +- int randomInt = getDeterministicRandomInt(seedBytes); +- if (randomInt < 0) { +- randomInt = -randomInt; +- } +- return minValue + (randomInt % (maxValue - minValue)); +- } +- +- public int getDeterministicNextInt(int seed, int minValue, int maxValue) { +- byte[] seedBytes = getBytes(seed); +- return getDeterministicNextInt(seedBytes, minValue, maxValue); +- } +- +- public int getDeterministicNextInt(long seed, int minValue, int maxValue) { +- byte[] seedBytes = getBytes(seed); +- return getDeterministicNextInt(seedBytes, minValue, maxValue); +- } +- +- public int getDeterministicRandomInt(byte[] seedBytes) { +- if (secretBytes != null) { +- byte[] sha1input = new byte[secretBytes.length + seedBytes.length]; +- System.arraycopy(secretBytes, 0, sha1input, 0, secretBytes.length); +- System.arraycopy(seedBytes, 0, sha1input, secretBytes.length, seedBytes.length); +- MessageDigest md; +- try { +- md = MessageDigest.getInstance(""SHA-1""); +- md.update(sha1input); +- byte[] sha1 = md.digest(); +- ByteArrayInputStream bis = new ByteArrayInputStream(sha1); +- DataInputStream in = new DataInputStream(bis); +- return in.readInt(); +- } catch (NoSuchAlgorithmException e) { +- // TODO Auto-generated catch block +- e.printStackTrace(); +- } catch (IOException e) { +- // TODO Auto-generated catch block +- e.printStackTrace(); +- } +- } +- +- throw new RuntimeException(""unable to generate deterministic random int""); +- } +- +- /** +- * Returns a random int seeded with the secret appended to the seed. For a +- * given seed the returned value will always be the same for a given +- * instance of the RandomnessManager +- * +- * @param seed +- * @return +- * @throws NoSuchAlgorithmException +- * @throws IOException +- */ +- public int getDeterministicRandomInt(int seed) { +- byte[] seedBytes = getBytes(seed); +- return getDeterministicRandomInt(seedBytes); +- } +- +- public int getDeterministicRandomInt(long seed) { +- byte[] seedBytes = getBytes(seed); +- return getDeterministicRandomInt(seedBytes); +- } +- +- public byte[] getSecretBytes() { +- return secretBytes; +- } +- +- static byte[] getBytes(int val) { +- byte[] b = new byte[4]; +- b[3] = (byte) (val >>> 0); +- b[2] = (byte) (val >>> 8); +- b[1] = (byte) (val >>> 16); +- b[0] = (byte) (val >>> 24); +- return b; +- } +- +- static byte[] getBytes(long val) { +- byte[] b = new byte[8]; +- b[7] = (byte) (val >>> 0); +- b[6] = (byte) (val >>> 8); +- b[5] = (byte) (val >>> 16); +- b[4] = (byte) (val >>> 24); +- b[3] = (byte) (val >>> 32); +- b[2] = (byte) (val >>> 40); +- b[1] = (byte) (val >>> 48); +- b[0] = (byte) (val >>> 56); +- return b; +- } ++ private byte[] secretBytes = new byte[20]; ++ ++ public RandomnessManager() { ++ SecureRandom random; ++ try { ++ random = SecureRandom.getInstance(""SHA1PRNG""); ++ random.nextBytes(secretBytes); ++ } catch (NoSuchAlgorithmException e) { ++ // TODO Auto-generated catch block ++ e.printStackTrace(); ++ secretBytes = null; ++ } ++ } ++ ++ public RandomnessManager(byte[] secretBytes) { ++ this.secretBytes = secretBytes; ++ } ++ ++ /** ++ * returns a random int between 0 (inclusive) and n (exclusive) seeded by ++ * seedBytes ++ */ ++ ++ public int getDeterministicNextInt(byte[] seedBytes, int minValue, int maxValue) { ++ int randomInt = getDeterministicRandomInt(seedBytes); ++ if (randomInt < 0) { ++ randomInt = -randomInt; ++ } ++ return minValue + (randomInt % (maxValue - minValue)); ++ } ++ ++ public int getDeterministicNextInt(int seed, int minValue, int maxValue) { ++ byte[] seedBytes = getBytes(seed); ++ return getDeterministicNextInt(seedBytes, minValue, maxValue); ++ } ++ ++ public int getDeterministicNextInt(long seed, int minValue, int maxValue) { ++ byte[] seedBytes = getBytes(seed); ++ return getDeterministicNextInt(seedBytes, minValue, maxValue); ++ } ++ ++ public int getDeterministicRandomInt(byte[] seedBytes) { ++ if (secretBytes != null) { ++ byte[] sha1input = new byte[secretBytes.length + seedBytes.length]; ++ System.arraycopy(secretBytes, 0, sha1input, 0, secretBytes.length); ++ System.arraycopy(seedBytes, 0, sha1input, secretBytes.length, seedBytes.length); ++ MessageDigest md; ++ try { ++ md = MessageDigest.getInstance(""SHA-1""); ++ md.update(sha1input); ++ byte[] sha1 = md.digest(); ++ ByteArrayInputStream bis = new ByteArrayInputStream(sha1); ++ DataInputStream in = new DataInputStream(bis); ++ return in.readInt(); ++ } catch (NoSuchAlgorithmException e) { ++ // TODO Auto-generated catch block ++ e.printStackTrace(); ++ } catch (IOException e) { ++ // TODO Auto-generated catch block ++ e.printStackTrace(); ++ } ++ } ++ ++ throw new RuntimeException(""unable to generate deterministic random int""); ++ } ++ ++ /** ++ * Returns a random int seeded with the secret appended to the seed. For a ++ * given seed the returned value will always be the same for a given ++ * instance of the RandomnessManager ++ * ++ * @param seed ++ * @return ++ * @throws NoSuchAlgorithmException ++ * @throws IOException ++ */ ++ public int getDeterministicRandomInt(int seed) { ++ byte[] seedBytes = getBytes(seed); ++ return getDeterministicRandomInt(seedBytes); ++ } ++ ++ public int getDeterministicRandomInt(long seed) { ++ byte[] seedBytes = getBytes(seed); ++ return getDeterministicRandomInt(seedBytes); ++ } ++ ++ public byte[] getSecretBytes() { ++ return secretBytes; ++ } ++ ++ static byte[] getBytes(int val) { ++ byte[] b = new byte[4]; ++ b[3] = (byte) (val >>> 0); ++ b[2] = (byte) (val >>> 8); ++ b[1] = (byte) (val >>> 16); ++ b[0] = (byte) (val >>> 24); ++ return b; ++ } ++ ++ static byte[] getBytes(long val) { ++ byte[] b = new byte[8]; ++ b[7] = (byte) (val >>> 0); ++ b[6] = (byte) (val >>> 8); ++ b[5] = (byte) (val >>> 16); ++ b[4] = (byte) (val >>> 24); ++ b[3] = (byte) (val >>> 32); ++ b[2] = (byte) (val >>> 40); ++ b[1] = (byte) (val >>> 48); ++ b[0] = (byte) (val >>> 56); ++ return b; ++ } -@@ -981,7 +1030,7 @@ Resource computeUserLimitAndSetHeadroom(FiCaSchedulerApp application, - // TODO, need consider headroom respect labels also - Resource userLimit = - computeUserLimit(application, clusterResource, required, -- queueUser, requestedLabels); -+ queueUser, nodePartition, schedulingMode); + } +diff --git a/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/OverlayTransport.java b/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/OverlayTransport.java +index 9ad34c81..8cdb2c01 100644 +--- a/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/OverlayTransport.java ++++ b/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/OverlayTransport.java +@@ -9,7 +9,6 @@ + import java.util.LinkedList; + import java.util.List; + import java.util.Random; +-import java.util.TimerTask; + import java.util.logging.Logger; - setQueueResourceLimitsInfo(clusterResource); - -@@ -1010,34 +1059,18 @@ Resource computeUserLimitAndSetHeadroom(FiCaSchedulerApp application, - @Lock(NoLock.class) - private Resource computeUserLimit(FiCaSchedulerApp application, - Resource clusterResource, Resource required, User user, -- Set requestedLabels) { -+ String nodePartition, SchedulingMode schedulingMode) { - // What is our current capacity? - // * It is equal to the max(required, queue-capacity) if - // we're running below capacity. The 'max' ensures that jobs in queues - // with miniscule capacity (< 1 slot) make progress - // * If we're running over capacity, then its - // (usedResources + required) (which extra resources we are allocating) -- Resource queueCapacity = Resource.newInstance(0, 0); -- if (requestedLabels != null && !requestedLabels.isEmpty()) { -- // if we have multiple labels to request, we will choose to use the first -- // label -- String firstLabel = requestedLabels.iterator().next(); -- queueCapacity = -- Resources -- .max(resourceCalculator, clusterResource, queueCapacity, -- Resources.multiplyAndNormalizeUp(resourceCalculator, -- labelManager.getResourceByLabel(firstLabel, -- clusterResource), -- queueCapacities.getAbsoluteCapacity(firstLabel), -- minimumAllocation)); -- } else { -- // else there's no label on request, just to use absolute capacity as -- // capacity for nodes without label -- queueCapacity = -- Resources.multiplyAndNormalizeUp(resourceCalculator, labelManager -- .getResourceByLabel(CommonNodeLabelsManager.NO_LABEL, clusterResource), -- queueCapacities.getAbsoluteCapacity(), minimumAllocation); + import org.bouncycastle.util.encoders.Base64; +@@ -20,7 +19,6 @@ + import org.gudy.azureus2.core3.peer.impl.PEPeerTransport; + import org.gudy.azureus2.core3.peer.impl.PEPeerTransportFactory; + import org.gudy.azureus2.core3.util.AENetworkClassifier; +-import org.gudy.azureus2.core3.util.Average; + import org.gudy.azureus2.core3.util.Debug; + import org.gudy.azureus2.core3.util.DirectByteBuffer; + import org.gudy.azureus2.core3.util.DirectByteBufferPool; +@@ -30,7 +28,6 @@ + import com.aelitis.azureus.core.networkmanager.ConnectionEndpoint; + import com.aelitis.azureus.core.networkmanager.EventWaiter; + import com.aelitis.azureus.core.networkmanager.NetworkConnection; +-import com.aelitis.azureus.core.networkmanager.NetworkManager; + import com.aelitis.azureus.core.networkmanager.ProtocolEndpoint; + import com.aelitis.azureus.core.networkmanager.Transport; + import com.aelitis.azureus.core.networkmanager.TransportEndpoint; +@@ -38,211 +35,63 @@ + import com.aelitis.azureus.core.peermanager.messaging.bittorrent.BTMessageDecoder; + import com.aelitis.azureus.core.peermanager.messaging.bittorrent.BTMessageEncoder; + +-import edu.washington.cs.oneswarm.f2f.Friend; + import edu.washington.cs.oneswarm.f2f.OSF2FAzSwtUi; + import edu.washington.cs.oneswarm.f2f.messaging.OSF2FChannelDataMsg; +-import edu.washington.cs.oneswarm.f2f.messaging.OSF2FChannelReset; + import edu.washington.cs.oneswarm.f2f.messaging.OSF2FMessage; +-import edu.washington.cs.oneswarm.f2f.network.DelayedExecutorService.DelayedExecutor; + import edu.washington.cs.oneswarm.f2f.share.DownloadManagerStarter; + import edu.washington.cs.oneswarm.f2f.share.DownloadManagerStarter.DownloadManagerStartListener; + +-public class OverlayTransport implements Transport { ++public class OverlayTransport extends OverlayEndpoint implements Transport { + +- private final static Logger logger = Logger.getLogger(OverlayTransport.class.getName()); +- +- class OverlayProtocolEndpoint implements ProtocolEndpoint { +- private ConnectionEndpoint connectionEndpoint; +- +- public OverlayProtocolEndpoint() { +- connectionEndpoint = new ConnectionEndpoint(getRandomAddr()); +- } +- +- public Transport connectOutbound(boolean connect_with_crypto, boolean allow_fallback, +- byte[][] shared_secrets, ByteBuffer initial_data, boolean high_priority, +- ConnectListener listener) { +- Debug.out(""tried to create outgoing OverlayTransport, this should never happen!!!""); +- throw new RuntimeException(""not implemented""); +- } +- +- public Transport connectOutbound(boolean connect_with_crypto, boolean allow_fallback, +- byte[][] shared_secrets, ByteBuffer initial_data, ConnectListener listener) { +- Debug.out(""tried to create outgoing OverlayTransport, this should never happen!!!""); +- throw new RuntimeException(""not implemented""); +- } +- +- public ConnectionEndpoint getConnectionEndpoint() { +- return connectionEndpoint; +- } +- +- public String getDescription() { +- return ""PROTOCOL_TCP""; +- } +- +- public int getType() { +- return PROTOCOL_TCP; +- } +- +- public void setConnectionEndpoint(ConnectionEndpoint ce) { +- this.connectionEndpoint = ce; +- } - } -+ Resource queueCapacity = -+ Resources.multiplyAndNormalizeUp(resourceCalculator, -+ labelManager.getResourceByLabel(nodePartition, clusterResource), -+ queueCapacities.getAbsoluteCapacity(nodePartition), -+ minimumAllocation); - - // Allow progress for queues with miniscule capacity - queueCapacity = -@@ -1047,33 +1080,56 @@ private Resource computeUserLimit(FiCaSchedulerApp application, - required); - - Resource currentCapacity = -- Resources.lessThan(resourceCalculator, clusterResource, -- queueUsage.getUsed(), queueCapacity) ? -- queueCapacity : Resources.add(queueUsage.getUsed(), required); -+ Resources.lessThan(resourceCalculator, clusterResource, -+ queueUsage.getUsed(nodePartition), queueCapacity) ? queueCapacity -+ : Resources.add(queueUsage.getUsed(nodePartition), required); - - // Never allow a single user to take more than the - // queue's configured capacity * user-limit-factor. - // Also, the queue's configured capacity should be higher than - // queue-hard-limit * ulMin - -- final int activeUsers = activeUsersManager.getNumActiveUsers(); -- -- Resource limit = -+ final int activeUsers = activeUsersManager.getNumActiveUsers(); -+ -+ // User limit resource is determined by: -+ // max{currentCapacity / #activeUsers, currentCapacity * user-limit-percentage%) -+ Resource userLimitResource = Resources.max( -+ resourceCalculator, clusterResource, -+ Resources.divideAndCeil( -+ resourceCalculator, currentCapacity, activeUsers), -+ Resources.divideAndCeil( -+ resourceCalculator, -+ Resources.multiplyAndRoundDown( -+ currentCapacity, userLimit), -+ 100) -+ ); -+ -+ // User limit is capped by maxUserLimit -+ // - maxUserLimit = queueCapacity * user-limit-factor (RESPECT_PARTITION_EXCLUSIVITY) -+ // - maxUserLimit = total-partition-resource (IGNORE_PARTITION_EXCLUSIVITY) -+ // -+ // In IGNORE_PARTITION_EXCLUSIVITY mode, if a queue cannot access a -+ // partition, its guaranteed resource on that partition is 0. And -+ // user-limit-factor computation is based on queue's guaranteed capacity. So -+ // we will not cap user-limit as well as used resource when doing -+ // IGNORE_PARTITION_EXCLUSIVITY allocation. -+ Resource maxUserLimit = Resources.none(); -+ if (schedulingMode == SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY) { -+ maxUserLimit = -+ Resources.multiplyAndRoundDown(queueCapacity, userLimitFactor); -+ } else if (schedulingMode == SchedulingMode.IGNORE_PARTITION_EXCLUSIVITY) { -+ maxUserLimit = -+ labelManager.getResourceByLabel(nodePartition, clusterResource); -+ } -+ -+ // Cap final user limit with maxUserLimit -+ userLimitResource = - Resources.roundUp( - resourceCalculator, - Resources.min( - resourceCalculator, clusterResource, -- Resources.max( -- resourceCalculator, clusterResource, -- Resources.divideAndCeil( -- resourceCalculator, currentCapacity, activeUsers), -- Resources.divideAndCeil( -- resourceCalculator, -- Resources.multiplyAndRoundDown( -- currentCapacity, userLimit), -- 100) -- ), -- Resources.multiplyAndRoundDown(queueCapacity, userLimitFactor) -+ userLimitResource, -+ maxUserLimit - ), - minimumAllocation); - -@@ -1081,11 +1137,11 @@ private Resource computeUserLimit(FiCaSchedulerApp application, - String userName = application.getUser(); - LOG.debug(""User limit computation for "" + userName + - "" in queue "" + getQueueName() + -- "" userLimit="" + userLimit + -+ "" userLimitPercent="" + userLimit + - "" userLimitFactor="" + userLimitFactor + - "" required: "" + required + - "" consumed: "" + user.getUsed() + -- "" limit: "" + limit + -+ "" user-limit-resource: "" + userLimitResource + - "" queueCapacity: "" + queueCapacity + - "" qconsumed: "" + queueUsage.getUsed() + - "" currentCapacity: "" + currentCapacity + -@@ -1093,31 +1149,26 @@ private Resource computeUserLimit(FiCaSchedulerApp application, - "" clusterCapacity: "" + clusterResource - ); - } -- user.setUserResourceLimit(limit); -- return limit; -+ user.setUserResourceLimit(userLimitResource); -+ return userLimitResource; - } - - @Private - protected synchronized boolean canAssignToUser(Resource clusterResource, - String userName, Resource limit, FiCaSchedulerApp application, -- boolean checkReservations, Set requestLabels) { -+ boolean checkReservations, String nodePartition) { - User user = getUser(userName); -- -- String label = CommonNodeLabelsManager.NO_LABEL; -- if (requestLabels != null && !requestLabels.isEmpty()) { -- label = requestLabels.iterator().next(); +- +- interface WriteQueueWaiter { +- public void readyForWrite(); +- } +- +- /* +- * max number of ms that a message can be delivered earlier than +- * overlayDelayMs if that avoids a call to Thread.sleep() +- */ +- private final static int INCOMING_MESSAGE_DELAY_SLACK = 10; ++ static final String chars = ""abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789""; + +- public final static byte[] ID_BYTES = new String(""-OS-F2F-"").getBytes(); +- private final static int HANDSHAKE_RESERVED_BITS_START_POS = 20; +- private final static int HANDSHAKE_RESERVED_BITS_END_POS = 28; +- private final static int HANDSHAKE_INFO_HASH_START_POS = 28; + private final static int HANDSHAKE_INFO_HASH_END_POS = 48; ++ private final static int HANDSHAKE_INFO_HASH_START_POS = 28; + private final static int HANDSHAKE_PEER_ID_POS = 48; +- private static final int HANDSHAKE_END_POS = HANDSHAKE_PEER_ID_POS + 20; ++ private final static int HANDSHAKE_RESERVED_BITS_END_POS = 28; ++ private final static int HANDSHAKE_RESERVED_BITS_START_POS = 20; ++ public final static byte[] ID_BYTES = new String(""-OS-F2F-"").getBytes(); + private final static int HANDSHAKE_PEER_ID_KEEP = ID_BYTES.length; ++ private static final int HANDSHAKE_END_POS = HANDSHAKE_PEER_ID_POS + 20; + private static final int HANDSHAKE_PEER_ID_START_MOD_POS = HANDSHAKE_PEER_ID_POS + + HANDSHAKE_PEER_ID_KEEP; + +- public static InetSocketAddress getRandomAddr() { +- byte[] randomAddr = new byte[16]; +- randomAddr[0] = (byte) 0xfc; +- randomAddr[1] = 0; +- Random r = new Random(); +- byte[] rand = new byte[randomAddr.length - 2]; +- r.nextBytes(rand); +- System.arraycopy(rand, 0, randomAddr, 2, rand.length); +- InetAddress addr; +- try { +- addr = InetAddress.getByAddress(randomAddr); +- InetSocketAddress remoteFakeAddr = new InetSocketAddress(addr, 1); +- return remoteFakeAddr; +- } catch (UnknownHostException e) { +- // TODO Auto-generated catch block +- e.printStackTrace(); +- } +- return null; - } +- +- private ByteBuffer data_already_read = null; +- +- protected boolean closed = false; +- private String closeReason = """"; +- private boolean sentReset = false; +- private final int TIMEOUT = 2 * 60 * 1000; +- private long lastMsgTime; +- +- private final byte[] infoHash; +- private final long startTime; +- private boolean started = false; +- private final int channelId; ++ private final static Logger logger = Logger.getLogger(OverlayTransport.class.getName()); - // Note: We aren't considering the current request since there is a fixed - // overhead of the AM, but it's a > check, not a >= check, so... - if (Resources - .greaterThan(resourceCalculator, clusterResource, -- user.getUsed(label), -+ user.getUsed(nodePartition), - limit)) { - // if enabled, check to see if could we potentially use this node instead - // of a reserved node if the application has reserved containers - if (this.reservationsContinueLooking && checkReservations -- && label.equals(CommonNodeLabelsManager.NO_LABEL)) { -+ && nodePartition.equals(CommonNodeLabelsManager.NO_LABEL)) { - if (Resources.lessThanOrEqual( - resourceCalculator, - clusterResource, -@@ -1136,7 +1187,7 @@ protected synchronized boolean canAssignToUser(Resource clusterResource, - if (LOG.isDebugEnabled()) { - LOG.debug(""User "" + userName + "" in queue "" + getQueueName() - + "" will exceed limit - "" + "" consumed: "" -- + user.getUsed() + "" limit: "" + limit); -+ + user.getUsed(nodePartition) + "" limit: "" + limit); - } - return false; - } -@@ -1176,7 +1227,7 @@ resourceCalculator, required, getMaximumAllocation() +- private final int pathID; + // all operations on this object must be in a synchronized block + private final LinkedList bufferedMessages; - private CSAssignment assignContainersOnNode(Resource clusterResource, - FiCaSchedulerNode node, FiCaSchedulerApp application, Priority priority, -- RMContainer reservedContainer) { -+ RMContainer reservedContainer, SchedulingMode schedulingMode) { +- protected final FriendConnection connection; +- +- private List readWaiter = new LinkedList(); +- private List writeWaiter = new LinkedList(); ++ private final byte[] channelPeerId; ++ private ByteBuffer data_already_read = null; - CSAssignment assigned; +- private int transport_mode; ++ private final byte[] infoHash; -@@ -1190,7 +1241,7 @@ private CSAssignment assignContainersOnNode(Resource clusterResource, - assigned = - assignNodeLocalContainers(clusterResource, nodeLocalResourceRequest, - node, application, priority, reservedContainer, -- allocatedContainer); -+ allocatedContainer, schedulingMode); - if (Resources.greaterThan(resourceCalculator, clusterResource, - assigned.getResource(), Resources.none())) { ++ private volatile boolean outgoing; + private int posInHandshake = 0; -@@ -1219,7 +1270,7 @@ private CSAssignment assignContainersOnNode(Resource clusterResource, - assigned = - assignRackLocalContainers(clusterResource, rackLocalResourceRequest, - node, application, priority, reservedContainer, -- allocatedContainer); -+ allocatedContainer, schedulingMode); - if (Resources.greaterThan(resourceCalculator, clusterResource, - assigned.getResource(), Resources.none())) { +- private byte[] remoteHandshakeInfoHashBytes = new byte[20]; +- private volatile boolean remoteHandshakeRecieved; +- private volatile boolean outgoing; ++ private List readWaiter = new LinkedList(); -@@ -1248,7 +1299,7 @@ private CSAssignment assignContainersOnNode(Resource clusterResource, - assigned = - assignOffSwitchContainers(clusterResource, offSwitchResourceRequest, - node, application, priority, reservedContainer, -- allocatedContainer); -+ allocatedContainer, schedulingMode); +- private final byte[] channelPeerId; ++ private byte[] remoteHandshakeInfoHashBytes = new byte[20]; - // update locality statistics - if (allocatedContainer.getValue() != null) { -@@ -1314,16 +1365,17 @@ protected boolean findNodeToUnreserve(Resource clusterResource, +- private long bytesIn = 0; +- private long bytesOut = 0; +- private Average uploadRateAverage = Average.getInstance(1000, 10); +- private Average downloadRateAverage = Average.getInstance(1000, 10); ++ private volatile boolean remoteHandshakeRecieved; - @Private - protected boolean checkLimitsToReserve(Resource clusterResource, -- FiCaSchedulerApp application, Resource capability) { -+ FiCaSchedulerApp application, Resource capability, String nodePartition, -+ SchedulingMode schedulingMode) { - // we can't reserve if we got here based on the limit - // checks assuming we could unreserve!!! - Resource userLimit = computeUserLimitAndSetHeadroom(application, -- clusterResource, capability, null); -+ clusterResource, capability, nodePartition, schedulingMode); +- private final long overlayDelayMs; ++ private int transport_mode; - // Check queue max-capacity limit, - // TODO: Consider reservation on labels -- if (!canAssignToThisQueue(clusterResource, null, -- this.currentResourceLimits, capability, Resources.none())) { -+ if (!canAssignToThisQueue(clusterResource, RMNodeLabelsManager.NO_LABEL, -+ this.currentResourceLimits, capability, Resources.none(), schedulingMode)) { - if (LOG.isDebugEnabled()) { - LOG.debug(""was going to reserve but hit queue limit""); - } -@@ -1332,7 +1384,7 @@ protected boolean checkLimitsToReserve(Resource clusterResource, +- private final DelayedExecutor delayedOverlayMessageTimer; ++ private List writeWaiter = new LinkedList(); - // Check user limit - if (!canAssignToUser(clusterResource, application.getUser(), userLimit, -- application, false, null)) { -+ application, false, nodePartition)) { - if (LOG.isDebugEnabled()) { - LOG.debug(""was going to reserve but hit user limit""); - } -@@ -1345,12 +1397,13 @@ protected boolean checkLimitsToReserve(Resource clusterResource, - private CSAssignment assignNodeLocalContainers(Resource clusterResource, - ResourceRequest nodeLocalResourceRequest, FiCaSchedulerNode node, - FiCaSchedulerApp application, Priority priority, -- RMContainer reservedContainer, MutableObject allocatedContainer) { -+ RMContainer reservedContainer, MutableObject allocatedContainer, -+ SchedulingMode schedulingMode) { - if (canAssign(application, priority, node, NodeType.NODE_LOCAL, - reservedContainer)) { - return assignContainer(clusterResource, node, application, priority, - nodeLocalResourceRequest, NodeType.NODE_LOCAL, reservedContainer, -- allocatedContainer); -+ allocatedContainer, schedulingMode); + public OverlayTransport(FriendConnection connection, int channelId, byte[] infohash, + int pathID, boolean outgoing, long overlayDelayMs) { +- this.lastMsgTime = System.currentTimeMillis(); +- this.overlayDelayMs = overlayDelayMs; ++ super(connection, channelId, pathID, overlayDelayMs); + this.infoHash = infohash; + this.bufferedMessages = new LinkedList(); +- this.connection = connection; +- this.channelId = channelId; + logger.fine(getDescription() + "": Creating overlay transport""); + this.channelPeerId = generatePeerId(); +- this.pathID = pathID; + this.outgoing = outgoing; +- this.startTime = System.currentTimeMillis(); +- delayedOverlayMessageTimer = DelayedExecutorService.getInstance().getFixedDelayExecutor( +- overlayDelayMs); } - return new CSAssignment(Resources.none(), NodeType.NODE_LOCAL); -@@ -1359,12 +1412,13 @@ private CSAssignment assignNodeLocalContainers(Resource clusterResource, - private CSAssignment assignRackLocalContainers(Resource clusterResource, - ResourceRequest rackLocalResourceRequest, FiCaSchedulerNode node, - FiCaSchedulerApp application, Priority priority, -- RMContainer reservedContainer, MutableObject allocatedContainer) { -+ RMContainer reservedContainer, MutableObject allocatedContainer, -+ SchedulingMode schedulingMode) { - if (canAssign(application, priority, node, NodeType.RACK_LOCAL, - reservedContainer)) { - return assignContainer(clusterResource, node, application, priority, - rackLocalResourceRequest, NodeType.RACK_LOCAL, reservedContainer, -- allocatedContainer); -+ allocatedContainer, schedulingMode); +- /** +- * This method is called ""from above"", when the peer connection is +- * terminated, send a reset to other side +- */ +- public void close(String reason) { +- if (!closed) { +- closeReason = ""peer - "" + reason; +- logger.fine(getDescription() + "": OverlayTransport closed, reason:"" + closeReason); +- +- closed = true; +- this.sendReset(); +- } +- // we don't expect anyone to read whatever we have left in the buffer +- synchronized (bufferedMessages) { +- while (bufferedMessages.size() > 0) { +- bufferedMessages.removeFirst().destroy(); +- } +- } +- // and remove it from the friend connection +- connection.deregisterOverlayTransport(this); +- +- } +- +- /** +- * this method is called from below when a reset is received +- * +- * @param reason +- */ +- public void closeChannelReset() { +- +- if (sentReset) { +- // ok, this is the response to our previous close +- connection.deregisterOverlayTransport(this); +- } else { +- if (!closed) { +- closeReason = ""remote host closed overlay channel""; +- logger.fine(getDescription() + "": OverlayTransport closed, reason:"" + closeReason); +- // this is the remote side saying that the connection is closed +- // send a reset back to confirm +- sendReset(); +- closed = true; +- } +- } +- } +- +- /** +- * this method is called from below if the friend connection dies +- * +- * @param reason +- */ +- public void closeConnectionClosed(String reason) { +- closeReason = reason; +- logger.fine(getDescription() + "": OverlayTransport closed, reason:"" + closeReason); +- +- closed = true; +- connection.deregisterOverlayTransport(this); ++ @Override ++ protected void cleanup() { ++ // not used. } - return new CSAssignment(Resources.none(), NodeType.RACK_LOCAL); -@@ -1373,16 +1427,21 @@ private CSAssignment assignRackLocalContainers(Resource clusterResource, - private CSAssignment assignOffSwitchContainers(Resource clusterResource, - ResourceRequest offSwitchResourceRequest, FiCaSchedulerNode node, - FiCaSchedulerApp application, Priority priority, -- RMContainer reservedContainer, MutableObject allocatedContainer) { -+ RMContainer reservedContainer, MutableObject allocatedContainer, -+ SchedulingMode schedulingMode) { - if (canAssign(application, priority, node, NodeType.OFF_SWITCH, - reservedContainer)) { - return assignContainer(clusterResource, node, application, priority, - offSwitchResourceRequest, NodeType.OFF_SWITCH, reservedContainer, -- allocatedContainer); -+ allocatedContainer, schedulingMode); + public void connectedInbound() { +@@ -258,7 +107,43 @@ public void connectOutbound(ByteBuffer initial_data, ConnectListener listener, + throw new RuntimeException(""not implemented""); } - - return new CSAssignment(Resources.none(), NodeType.OFF_SWITCH); - } -+ -+ private int getActualNodeLocalityDelay() { -+ return Math.min(scheduler.getNumClusterNodes(), getNodeLocalityDelay()); -+ } - boolean canAssign(FiCaSchedulerApp application, Priority priority, - FiCaSchedulerNode node, NodeType type, RMContainer reservedContainer) { -@@ -1417,10 +1476,7 @@ boolean canAssign(FiCaSchedulerApp application, Priority priority, - if (type == NodeType.RACK_LOCAL) { - // 'Delay' rack-local just a little bit... - long missedOpportunities = application.getSchedulingOpportunities(priority); -- return ( -- Math.min(scheduler.getNumClusterNodes(), getNodeLocalityDelay()) < -- missedOpportunities -- ); -+ return getActualNodeLocalityDelay() < missedOpportunities; +- static final String chars = ""abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789""; ++ private void createPeerTransport(DownloadManager downloadManager) { ++ // final check, we only allow this if the osf2f network is enabled, and ++ // osf2f friend search is a valid peer source ++ boolean allowed = checkOSF2FAllowed(downloadManager.getDownloadState().getPeerSources(), ++ downloadManager.getDownloadState().getNetworks()); ++ ++ if (!allowed) { ++ Debug.out(""denied request to create a peer""); ++ this.closeConnectionClosed(""access denied when creating overlay""); ++ return; ++ } ++ ++ PEPeerManager manager = downloadManager.getPeerManager(); ++ ++ PEPeerControl control = (PEPeerControl) manager; ++ // set it up the same way as an incoming connection ++ final NetworkConnection overlayConn = new NetworkConnectionImpl(this, ++ new BTMessageEncoder(), new BTMessageDecoder()); ++ PEPeerTransport pt = PEPeerTransportFactory.createTransport(control, PEPeerSource.PS_OSF2F, ++ overlayConn, null); ++ ++ // start it ++ pt.start(); ++ // and add it to the control ++ control.addPeerTransport(pt); ++ ++ // add the friend ++ pt.setData(OSF2FAzSwtUi.KEY_OVERLAY_TRANSPORT, this); ++ } ++ ++ protected void destroyBufferedMessages() { ++ synchronized (bufferedMessages) { ++ while (bufferedMessages.size() > 0) { ++ bufferedMessages.removeFirst().destroy(); ++ } ++ } ++ } + + public byte[] generatePeerId() { + byte[] peerId = new byte[20]; +@@ -272,20 +157,6 @@ public byte[] generatePeerId() { + return peerId; } - // Check if we need containers on this host -@@ -1460,7 +1516,7 @@ Container createContainer(FiCaSchedulerApp application, FiCaSchedulerNode node, - private CSAssignment assignContainer(Resource clusterResource, FiCaSchedulerNode node, - FiCaSchedulerApp application, Priority priority, - ResourceRequest request, NodeType type, RMContainer rmContainer, -- MutableObject createdContainer) { -+ MutableObject createdContainer, SchedulingMode schedulingMode) { - if (LOG.isDebugEnabled()) { - LOG.debug(""assignContainers: node="" + node.getNodeName() - + "" application="" + application.getApplicationId() -@@ -1469,9 +1525,8 @@ private CSAssignment assignContainer(Resource clusterResource, FiCaSchedulerNode +- public int getChannelId() { +- return channelId; +- } +- +- private String desc = null; +- +- public String getDescription() { +- if (desc == null) { +- desc = NetworkManager.OSF2F_TRANSPORT_PREFIX + "": "" +- + connection.getRemoteFriend().getNick() + "":"" + Integer.toHexString(channelId); +- } +- return desc; +- } +- + public String getEncryption() { + return (""FriendToFriend over SSL""); + } +@@ -294,10 +165,6 @@ public int getMssSize() { + return OSF2FMessage.MAX_MESSAGE_SIZE; } - - // check if the resource request can access the label -- if (!SchedulerUtils.checkNodeLabelExpression( -- node.getLabels(), -- request.getNodeLabelExpression())) { -+ if (!SchedulerUtils.checkResourceRequestMatchingNodePartition(request, -+ node.getPartition(), schedulingMode)) { - // this is a reserved container, but we cannot allocate it now according - // to label not match. This can be caused by node label changed - // We should un-reserve this container. -@@ -1576,8 +1631,8 @@ private CSAssignment assignContainer(Resource clusterResource, FiCaSchedulerNode - // If we're trying to reserve a container here, not container will be - // unreserved for reserving the new one. Check limits again before - // reserve the new container -- if (!checkLimitsToReserve(clusterResource, -- application, capability)) { -+ if (!checkLimitsToReserve(clusterResource, -+ application, capability, node.getPartition(), schedulingMode)) { - return new CSAssignment(Resources.none(), type); - } - } -@@ -1666,7 +1721,7 @@ public void completedContainer(Resource clusterResource, - // Book-keeping - if (removed) { - releaseResource(clusterResource, application, -- container.getResource(), node.getLabels()); -+ container.getResource(), node.getPartition()); - LOG.info(""completedContainer"" + - "" container="" + container + - "" queue="" + this + -@@ -1684,13 +1739,13 @@ public void completedContainer(Resource clusterResource, - synchronized void allocateResource(Resource clusterResource, - SchedulerApplicationAttempt application, Resource resource, -- Set nodeLabels) { -- super.allocateResource(clusterResource, resource, nodeLabels); -+ String nodePartition) { -+ super.allocateResource(clusterResource, resource, nodePartition); - - // Update user metrics - String userName = application.getUser(); - User user = getUser(userName); -- user.assignContainer(resource, nodeLabels); -+ user.assignContainer(resource, nodePartition); - // Note this is a bit unconventional since it gets the object and modifies - // it here, rather then using set routine - Resources.subtractFrom(application.getHeadroom(), resource); // headroom -@@ -1707,13 +1762,13 @@ synchronized void allocateResource(Resource clusterResource, - } +- public int getPathID() { +- return pathID; +- } +- + public TransportEndpoint getTransportEndpoint() { - synchronized void releaseResource(Resource clusterResource, -- FiCaSchedulerApp application, Resource resource, Set nodeLabels) { -- super.releaseResource(clusterResource, resource, nodeLabels); -+ FiCaSchedulerApp application, Resource resource, String nodePartition) { -+ super.releaseResource(clusterResource, resource, nodePartition); - - // Update user metrics - String userName = application.getUser(); - User user = getUser(userName); -- user.releaseContainer(resource, nodeLabels); -+ user.releaseContainer(resource, nodePartition); - metrics.setAvailableResourcesToUser(userName, application.getHeadroom()); - - LOG.info(getQueueName() + -@@ -1723,7 +1778,8 @@ synchronized void releaseResource(Resource clusterResource, - - private void updateAbsoluteCapacityResource(Resource clusterResource) { - absoluteCapacityResource = -- Resources.multiplyAndNormalizeUp(resourceCalculator, clusterResource, -+ Resources.multiplyAndNormalizeUp(resourceCalculator, labelManager -+ .getResourceByLabel(RMNodeLabelsManager.NO_LABEL, clusterResource), - queueCapacities.getAbsoluteCapacity(), minimumAllocation); - } - -@@ -1769,8 +1825,9 @@ resourceCalculator, this, getParent(), clusterResource, - // Update application properties - for (FiCaSchedulerApp application : activeApplications) { - synchronized (application) { -- computeUserLimitAndSetHeadroom(application, clusterResource, -- Resources.none(), null); -+ computeUserLimitAndSetHeadroom(application, clusterResource, -+ Resources.none(), RMNodeLabelsManager.NO_LABEL, -+ SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - } - } - } -@@ -1828,25 +1885,12 @@ public synchronized void finishApplication(boolean wasActive) { - } + return new TransportEndpoint() { +@@ -315,21 +182,7 @@ public int getTransportMode() { + return transport_mode; } -- public void assignContainer(Resource resource, -- Set nodeLabels) { -- if (nodeLabels == null || nodeLabels.isEmpty()) { -- userResourceUsage.incUsed(resource); -- } else { -- for (String label : nodeLabels) { -- userResourceUsage.incUsed(label, resource); +- public void incomingOverlayMsg(final OSF2FChannelDataMsg msg) { +- lastMsgTime = System.currentTimeMillis(); +- if (closed) { +- return; - } -- } -+ public void assignContainer(Resource resource, String nodePartition) { -+ userResourceUsage.incUsed(nodePartition, resource); +- delayedOverlayMessageTimer.queue(overlayDelayMs, INCOMING_MESSAGE_DELAY_SLACK, +- new TimerTask() { +- @Override +- public void run() { +- handleDelayedOverlayMessage(msg); +- } +- }); +- } +- +- private void handleDelayedOverlayMessage(final OSF2FChannelDataMsg msg) { ++ protected void handleDelayedOverlayMessage(final OSF2FChannelDataMsg msg) { + synchronized (bufferedMessages) { + bufferedMessages.add(msg); + +@@ -381,7 +234,7 @@ public boolean isReadyForWrite(final EventWaiter waiter) { + if (closed) { + return false; + } +- if (!connection.isReadyForWrite(new WriteQueueWaiter() { ++ if (!friendConnection.isReadyForWrite(new WriteQueueWaiter() { + public void readyForWrite() { + if (waiter != null) { + logger.finest(getDescription() + "": connection ready, notifying waiter""); +@@ -396,18 +249,10 @@ public void readyForWrite() { + return true; } -- public void releaseContainer(Resource resource, Set nodeLabels) { -- if (nodeLabels == null || nodeLabels.isEmpty()) { -- userResourceUsage.decUsed(resource); -- } else { -- for (String label : nodeLabels) { -- userResourceUsage.decUsed(label, resource); -- } -- } -+ public void releaseContainer(Resource resource, String nodePartition) { -+ userResourceUsage.decUsed(nodePartition, resource); +- public boolean isStarted() { +- return started; +- } +- + public boolean isTCP() { + return true; } - public Resource getUserResourceLimit() { -@@ -1869,7 +1913,7 @@ public void recoverContainer(Resource clusterResource, - FiCaSchedulerNode node = - scheduler.getNode(rmContainer.getContainer().getNodeId()); - allocateResource(clusterResource, attempt, rmContainer.getContainer() -- .getResource(), node.getLabels()); -+ .getResource(), node.getPartition()); +- public boolean isTimedOut() { +- return System.currentTimeMillis() - lastMsgTime > TIMEOUT; +- } +- + private byte modifyIncomingHandShake(byte b) { + if (posInHandshake == -1) { + return b; +@@ -428,7 +273,7 @@ private byte modifyIncomingHandShake(byte b) { + if (!Arrays.equals(infoHash, remoteHandshakeInfoHashBytes)) { + logger.warning(getDescription() + + "": WARNING in "" +- + connection ++ + friendConnection + + "" :: remote host different infohash "" + + ""than what we expected ,expected:\n "" + + new String(Base64.encode(infoHash) + "" got\n"" +@@ -584,12 +429,6 @@ public long read(ByteBuffer[] buffers, int array_offset, int length) throws IOEx + return totalRead; } - getParent().recoverContainer(clusterResource, attempt, rmContainer); - } -@@ -1909,7 +1953,7 @@ public void attachContainer(Resource clusterResource, - FiCaSchedulerNode node = - scheduler.getNode(rmContainer.getContainer().getNodeId()); - allocateResource(clusterResource, application, rmContainer.getContainer() -- .getResource(), node.getLabels()); -+ .getResource(), node.getPartition()); - LOG.info(""movedContainer"" + "" container="" + rmContainer.getContainer() - + "" resource="" + rmContainer.getContainer().getResource() - + "" queueMoveIn="" + this + "" usedCapacity="" + getUsedCapacity() -@@ -1927,7 +1971,7 @@ public void detachContainer(Resource clusterResource, - FiCaSchedulerNode node = - scheduler.getNode(rmContainer.getContainer().getNodeId()); - releaseResource(clusterResource, application, rmContainer.getContainer() -- .getResource(), node.getLabels()); -+ .getResource(), node.getPartition()); - LOG.info(""movedContainer"" + "" container="" + rmContainer.getContainer() - + "" resource="" + rmContainer.getContainer().getResource() - + "" queueMoveOut="" + this + "" usedCapacity="" + getUsedCapacity() -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/ParentQueue.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/ParentQueue.java -index 882498a6808f2..eb64d4384f0a7 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/ParentQueue.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/ParentQueue.java -@@ -56,8 +56,6 @@ - import org.apache.hadoop.yarn.server.resourcemanager.scheduler.NodeType; - import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceLimits; - import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplicationAttempt; --import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerUtils; --import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.AssignmentInformation; - import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerApp; - import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerNode; - import org.apache.hadoop.yarn.util.resource.Resources; -@@ -377,16 +375,29 @@ private synchronized void removeApplication(ApplicationId applicationId, - @Override - public synchronized CSAssignment assignContainers(Resource clusterResource, -- FiCaSchedulerNode node, ResourceLimits resourceLimits) { -- CSAssignment assignment = -- new CSAssignment(Resources.createResource(0, 0), NodeType.NODE_LOCAL); -- Set nodeLabels = node.getLabels(); -- -+ FiCaSchedulerNode node, ResourceLimits resourceLimits, -+ SchedulingMode schedulingMode) { - // if our queue cannot access this node, just return -- if (!SchedulerUtils.checkQueueAccessToNode(accessibleLabels, nodeLabels)) { -- return assignment; -+ if (schedulingMode == SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY -+ && !accessibleToPartition(node.getPartition())) { -+ return NULL_ASSIGNMENT; -+ } -+ -+ // Check if this queue need more resource, simply skip allocation if this -+ // queue doesn't need more resources. -+ if (!super.hasPendingResourceRequest(node.getPartition(), -+ clusterResource, schedulingMode)) { -+ if (LOG.isDebugEnabled()) { -+ LOG.debug(""Skip this queue="" + getQueuePath() -+ + "", because it doesn't need more resource, schedulingMode="" -+ + schedulingMode.name() + "" node-partition="" + node.getPartition()); -+ } -+ return NULL_ASSIGNMENT; +- public void sendReset() { +- sentReset = true; +- connection.sendChannelRst(new OSF2FChannelReset(OSF2FChannelReset.CURRENT_VERSION, +- channelId)); +- } +- + public void setAlreadyRead(ByteBuffer bytes_already_read) { + if (data_already_read != null) { + Debug.out(""push back already performed""); +@@ -631,38 +470,31 @@ public void downloadStarted() { + }); } - -+ CSAssignment assignment = -+ new CSAssignment(Resources.createResource(0, 0), NodeType.NODE_LOCAL); -+ - while (canAssign(clusterResource, node)) { - if (LOG.isDebugEnabled()) { - LOG.debug(""Trying to assign containers to child-queue of "" -@@ -396,15 +407,17 @@ public synchronized CSAssignment assignContainers(Resource clusterResource, - // Are we over maximum-capacity for this queue? - // This will also consider parent's limits and also continuous reservation - // looking -- if (!super.canAssignToThisQueue(clusterResource, nodeLabels, resourceLimits, -- minimumAllocation, Resources.createResource(getMetrics() -- .getReservedMB(), getMetrics().getReservedVirtualCores()))) { -+ if (!super.canAssignToThisQueue(clusterResource, node.getPartition(), -+ resourceLimits, minimumAllocation, Resources.createResource( -+ getMetrics().getReservedMB(), getMetrics() -+ .getReservedVirtualCores()), schedulingMode)) { - break; - } - - // Schedule -- CSAssignment assignedToChild = -- assignContainersToChildQueues(clusterResource, node, resourceLimits); -+ CSAssignment assignedToChild = -+ assignContainersToChildQueues(clusterResource, node, resourceLimits, -+ schedulingMode); - assignment.setType(assignedToChild.getType()); - - // Done if no child-queue assigned anything -@@ -413,7 +426,7 @@ public synchronized CSAssignment assignContainers(Resource clusterResource, - assignedToChild.getResource(), Resources.none())) { - // Track resource utilization for the parent-queue - super.allocateResource(clusterResource, assignedToChild.getResource(), -- nodeLabels); -+ node.getPartition()); - - // Track resource utilization in this pass of the scheduler - Resources -@@ -510,7 +523,8 @@ private ResourceLimits getResourceLimitsOfChild(CSQueue child, - } - - private synchronized CSAssignment assignContainersToChildQueues( -- Resource cluster, FiCaSchedulerNode node, ResourceLimits limits) { -+ Resource cluster, FiCaSchedulerNode node, ResourceLimits limits, -+ SchedulingMode schedulingMode) { - CSAssignment assignment = - new CSAssignment(Resources.createResource(0, 0), NodeType.NODE_LOCAL); - -@@ -523,12 +537,13 @@ private synchronized CSAssignment assignContainersToChildQueues( - LOG.debug(""Trying to assign to queue: "" + childQueue.getQueuePath() - + "" stats: "" + childQueue); - } -- -+ - // Get ResourceLimits of child queue before assign containers - ResourceLimits childLimits = - getResourceLimitsOfChild(childQueue, cluster, limits); - -- assignment = childQueue.assignContainers(cluster, node, childLimits); -+ assignment = childQueue.assignContainers(cluster, node, -+ childLimits, schedulingMode); - if(LOG.isDebugEnabled()) { - LOG.debug(""Assigned to queue: "" + childQueue.getQueuePath() + - "" stats: "" + childQueue + "" --> "" + -@@ -584,7 +599,7 @@ public void completedContainer(Resource clusterResource, - // Book keeping - synchronized (this) { - super.releaseResource(clusterResource, rmContainer.getContainer() -- .getResource(), node.getLabels()); -+ .getResource(), node.getPartition()); - LOG.info(""completedContainer"" + - "" queue="" + getQueueName() + -@@ -653,7 +668,7 @@ public void recoverContainer(Resource clusterResource, - FiCaSchedulerNode node = - scheduler.getNode(rmContainer.getContainer().getNodeId()); - super.allocateResource(clusterResource, rmContainer.getContainer() -- .getResource(), node.getLabels()); -+ .getResource(), node.getPartition()); +- public long getArtificialDelay() { +- return overlayDelayMs; +- } +- +- private void createPeerTransport(DownloadManager downloadManager) { +- // final check, we only allow this if the osf2f network is enabled, and +- // osf2f friend search is a valid peer source +- boolean allowed = checkOSF2FAllowed(downloadManager.getDownloadState().getPeerSources(), +- downloadManager.getDownloadState().getNetworks()); +- +- if (!allowed) { +- Debug.out(""denied request to create a peer""); +- this.closeConnectionClosed(""access denied when creating overlay""); +- return; ++ public long write(ByteBuffer[] buffers, int array_offset, int length) throws IOException { ++ if (closed) { ++ // when closed just ignore the write requests ++ // hopefully the peertransport will read everything in the buffer ++ // and get the exception there when done ++ return 0; + } +- +- PEPeerManager manager = downloadManager.getPeerManager(); +- +- PEPeerControl control = (PEPeerControl) manager; +- // set it up the same way as an incoming connection +- final NetworkConnection overlayConn = new NetworkConnectionImpl(this, +- new BTMessageEncoder(), new BTMessageDecoder()); +- PEPeerTransport pt = PEPeerTransportFactory.createTransport(control, PEPeerSource.PS_OSF2F, +- overlayConn, null); +- +- // start it +- pt.start(); +- // and add it to the control +- control.addPeerTransport(pt); +- +- // add the friend +- pt.setData(OSF2FAzSwtUi.KEY_OVERLAY_TRANSPORT, this); ++ int totalToWrite = 0; ++ int totalWritten = 0; ++ for (int i = array_offset; i < array_offset + length; i++) { ++ totalToWrite += buffers[i].remaining(); ++ } ++ logger.finest(getDescription() + ""got write request for: "" + totalToWrite); ++ // only write one packet at the time ++ if (isReadyForWrite(null)) { ++ DirectByteBuffer msgBuffer = DirectByteBufferPool.getBuffer(DirectByteBuffer.AL_MSG, ++ Math.min(totalToWrite, OSF2FMessage.MAX_PAYLOAD_SIZE)); ++ this.putInBuffer(buffers, array_offset, length, msgBuffer); ++ msgBuffer.flip(DirectByteBuffer.SS_MSG); ++ totalWritten += writeMessageToFriendConnection(msgBuffer); ++ } ++ logger.finest(""wrote "" + totalWritten + "" to overlay channel "" + channelId); ++ bytesOut += totalWritten; ++ uploadRateAverage.addValue(totalWritten); ++ return totalWritten; } - if (parent != null) { - parent.recoverContainer(clusterResource, attempt, rmContainer); -@@ -681,7 +696,7 @@ public void attachContainer(Resource clusterResource, - FiCaSchedulerNode node = - scheduler.getNode(rmContainer.getContainer().getNodeId()); - super.allocateResource(clusterResource, rmContainer.getContainer() -- .getResource(), node.getLabels()); -+ .getResource(), node.getPartition()); - LOG.info(""movedContainer"" + "" queueMoveIn="" + getQueueName() - + "" usedCapacity="" + getUsedCapacity() + "" absoluteUsedCapacity="" - + getAbsoluteUsedCapacity() + "" used="" + queueUsage.getUsed() + "" cluster="" -@@ -701,7 +716,7 @@ public void detachContainer(Resource clusterResource, - scheduler.getNode(rmContainer.getContainer().getNodeId()); - super.releaseResource(clusterResource, - rmContainer.getContainer().getResource(), -- node.getLabels()); -+ node.getPartition()); - LOG.info(""movedContainer"" + "" queueMoveOut="" + getQueueName() - + "" usedCapacity="" + getUsedCapacity() + "" absoluteUsedCapacity="" - + getAbsoluteUsedCapacity() + "" used="" + queueUsage.getUsed() + "" cluster="" -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/SchedulingMode.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/SchedulingMode.java -new file mode 100644 -index 0000000000000..7e7dc37c9bea1 ---- /dev/null -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/SchedulingMode.java -@@ -0,0 +1,44 @@ -+/** -+ * Licensed to the Apache Software Foundation (ASF) under one -+ * or more contributor license agreements. See the NOTICE file -+ * distributed with this work for additional information -+ * regarding copyright ownership. The ASF licenses this file -+ * to you under the Apache License, Version 2.0 (the -+ * ""License""); you may not use this file except in compliance -+ * with the License. You may obtain a copy of the License at -+ * -+ * http://www.apache.org/licenses/LICENSE-2.0 -+ * -+ * Unless required by applicable law or agreed to in writing, software -+ * distributed under the License is distributed on an ""AS IS"" BASIS, -+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -+ * See the License for the specific language governing permissions and -+ * limitations under the License. -+ */ -+ -+package org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity; -+ -+/** -+ * Scheduling modes, see below for detailed explanations -+ */ -+public enum SchedulingMode { -+ /** -+ *

-+ * When a node has partition (say partition=x), only application in the queue -+ * can access to partition=x AND requires for partition=x resource can get -+ * chance to allocate on the node. -+ *

-+ * -+ *

-+ * When a node has no partition, only application requires non-partitioned -+ * resource can get chance to allocate on the node. -+ *

-+ */ -+ RESPECT_PARTITION_EXCLUSIVITY, -+ -+ /** -+ * Only used when a node has partition AND the partition isn't an exclusive -+ * partition AND application requires non-partitioned resource. -+ */ -+ IGNORE_PARTITION_EXCLUSIVITY -+} -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/Application.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/Application.java -index 76ede3940f825..9b7eb840dbecc 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/Application.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/Application.java -@@ -54,6 +54,7 @@ - import org.apache.hadoop.yarn.factories.RecordFactory; - import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; - import org.apache.hadoop.yarn.server.resourcemanager.Task.State; -+import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; - import org.apache.hadoop.yarn.server.resourcemanager.scheduler.Allocation; - import org.apache.hadoop.yarn.server.resourcemanager.scheduler.NodeType; - import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler; -@@ -277,6 +278,9 @@ private synchronized void addResourceRequest( - } else { - request.setNumContainers(request.getNumContainers() + 1); + + /** +@@ -697,76 +529,64 @@ public static boolean checkOSF2FAllowed(String[] peerSources, String[] netSource + return allowed; } -+ if (request.getNodeLabelExpression() == null) { -+ request.setNodeLabelExpression(RMNodeLabelsManager.NO_LABEL); -+ } - - // Note this down for next interaction with ResourceManager - ask.remove(request); -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockAM.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockAM.java -index f62fdb3dcee22..5c107aa38bfb1 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockAM.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockAM.java -@@ -150,8 +150,14 @@ public AllocateResponse allocate( - public AllocateResponse allocate( - String host, int memory, int numContainers, - List releases, String labelExpression) throws Exception { -+ return allocate(host, memory, numContainers, 1, releases, labelExpression); -+ } -+ -+ public AllocateResponse allocate( -+ String host, int memory, int numContainers, int priority, -+ List releases, String labelExpression) throws Exception { - List reqs = -- createReq(new String[] { host }, memory, 1, numContainers, -+ createReq(new String[] { host }, memory, priority, numContainers, - labelExpression); - return allocate(reqs, releases); - } -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java -index 06c6b3275e334..f2b1d8646de51 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java -@@ -21,6 +21,8 @@ - import java.io.IOException; - import java.nio.ByteBuffer; - import java.security.PrivilegedAction; -+import java.util.Arrays; -+import java.util.Collection; - import java.util.List; - import java.util.Map; -@@ -200,10 +202,18 @@ public boolean waitForState(MockNM nm, ContainerId containerId, - - public boolean waitForState(MockNM nm, ContainerId containerId, - RMContainerState containerState, int timeoutMillisecs) throws Exception { -+ return waitForState(Arrays.asList(nm), containerId, containerState, -+ timeoutMillisecs); -+ } -+ -+ public boolean waitForState(Collection nms, ContainerId containerId, -+ RMContainerState containerState, int timeoutMillisecs) throws Exception { - RMContainer container = getResourceScheduler().getRMContainer(containerId); - int timeoutSecs = 0; - while(container == null && timeoutSecs++ < timeoutMillisecs / 100) { -- nm.nodeHeartbeat(true); -+ for (MockNM nm : nms) { -+ nm.nodeHeartbeat(true); -+ } - container = getResourceScheduler().getRMContainer(containerId); - System.out.println(""Waiting for container "" + containerId + "" to be allocated.""); - Thread.sleep(100); -@@ -217,9 +227,11 @@ public boolean waitForState(MockNM nm, ContainerId containerId, - && timeoutSecs++ < timeoutMillisecs / 100) { - System.out.println(""Container : "" + containerId + "" State is : "" - + container.getState() + "" Waiting for state : "" + containerState); -- nm.nodeHeartbeat(true); -+ for (MockNM nm : nms) { -+ nm.nodeHeartbeat(true); -+ } - Thread.sleep(100); -- -+ - if (timeoutMillisecs <= timeoutSecs * 100) { - return false; - } -@@ -650,11 +662,28 @@ public static void finishAMAndVerifyAppState(RMApp rmApp, MockRM rm, MockNM nm, - am.waitForState(RMAppAttemptState.FINISHED); - rm.waitForState(rmApp.getApplicationId(), RMAppState.FINISHED); - } -+ -+ @SuppressWarnings(""rawtypes"") -+ private static void waitForSchedulerAppAttemptAdded( -+ ApplicationAttemptId attemptId, MockRM rm) throws InterruptedException { -+ int tick = 0; -+ // Wait for at most 5 sec -+ while (null == ((AbstractYarnScheduler) rm.getResourceScheduler()) -+ .getApplicationAttempt(attemptId) && tick < 50) { -+ Thread.sleep(100); -+ if (tick % 10 == 0) { -+ System.out.println(""waiting for SchedulerApplicationAttempt="" -+ + attemptId + "" added.""); -+ } -+ tick++; -+ } -+ } +- public long write(ByteBuffer[] buffers, int array_offset, int length) throws IOException { +- if (closed) { +- // when closed just ignore the write requests +- // hopefully the peertransport will read everything in the buffer +- // and get the exception there when done +- return 0; +- } +- int totalToWrite = 0; +- int totalWritten = 0; +- for (int i = array_offset; i < array_offset + length; i++) { +- totalToWrite += buffers[i].remaining(); +- } +- logger.finest(getDescription() + ""got write request for: "" + totalToWrite); +- // only write one packet at the time +- if (isReadyForWrite(null)) { +- DirectByteBuffer msgBuffer = DirectByteBufferPool.getBuffer(DirectByteBuffer.AL_MSG, +- Math.min(totalToWrite, OSF2FMessage.MAX_PAYLOAD_SIZE)); +- this.putInBuffer(buffers, array_offset, length, msgBuffer); +- msgBuffer.flip(DirectByteBuffer.SS_MSG); +- totalWritten += writeMessage(msgBuffer); ++ public static InetSocketAddress getRandomAddr() { ++ byte[] randomAddr = new byte[16]; ++ randomAddr[0] = (byte) 0xfc; ++ randomAddr[1] = 0; ++ Random r = new Random(); ++ byte[] rand = new byte[randomAddr.length - 2]; ++ r.nextBytes(rand); ++ System.arraycopy(rand, 0, randomAddr, 2, rand.length); ++ InetAddress addr; ++ try { ++ addr = InetAddress.getByAddress(randomAddr); ++ InetSocketAddress remoteFakeAddr = new InetSocketAddress(addr, 1); ++ return remoteFakeAddr; ++ } catch (UnknownHostException e) { ++ // TODO Auto-generated catch block ++ e.printStackTrace(); + } +- logger.finest(""wrote "" + totalWritten + "" to overlay channel "" + channelId); +- bytesOut += totalWritten; +- uploadRateAverage.addValue(totalWritten); +- return totalWritten; +- } +- +- protected long writeMessage(DirectByteBuffer msgBuffer) { +- OSF2FChannelDataMsg msg = new OSF2FChannelDataMsg(OSF2FMessage.CURRENT_VERSION, channelId, +- msgBuffer); +- long totalWritten = msgBuffer.remaining(DirectByteBuffer.SS_MSG); +- msg.setForward(false); +- connection.sendChannelMsg(msg, true); +- return totalWritten; ++ return null; + } - public static MockAM launchAM(RMApp app, MockRM rm, MockNM nm) - throws Exception { - rm.waitForState(app.getApplicationId(), RMAppState.ACCEPTED); - RMAppAttempt attempt = app.getCurrentAppAttempt(); -+ waitForSchedulerAppAttemptAdded(attempt.getAppAttemptId(), rm); - System.out.println(""Launch AM "" + attempt.getAppAttemptId()); - nm.nodeHeartbeat(true); - MockAM am = rm.sendAMLaunched(attempt.getAppAttemptId()); -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestApplicationLimits.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestApplicationLimits.java -index 1ca5c97a411a5..46167ca68596b 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestApplicationLimits.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestApplicationLimits.java -@@ -612,7 +612,7 @@ public void testHeadroom() throws Exception { +- public String getRemoteIP() { +- return connection.getRemoteIp().getHostAddress(); +- } ++ private static class OverlayProtocolEndpoint implements ProtocolEndpoint { ++ private ConnectionEndpoint connectionEndpoint; - // Schedule to compute - queue.assignContainers(clusterResource, node_0, new ResourceLimits( -- clusterResource)); -+ clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - Resource expectedHeadroom = Resources.createResource(10*16*GB, 1); - assertEquals(expectedHeadroom, app_0_0.getHeadroom()); +- public Friend getRemoteFriend() { +- return connection.getRemoteFriend(); +- } ++ public OverlayProtocolEndpoint() { ++ connectionEndpoint = new ConnectionEndpoint(getRandomAddr()); ++ } -@@ -632,7 +632,7 @@ public void testHeadroom() throws Exception { +- public long getAge() { +- return System.currentTimeMillis() - startTime; +- } ++ public Transport connectOutbound(boolean connect_with_crypto, boolean allow_fallback, ++ byte[][] shared_secrets, ByteBuffer initial_data, boolean high_priority, ++ ConnectListener listener) { ++ Debug.out(""tried to create outgoing OverlayTransport, this should never happen!!!""); ++ throw new RuntimeException(""not implemented""); ++ } - // Schedule to compute - queue.assignContainers(clusterResource, node_0, new ResourceLimits( -- clusterResource)); // Schedule to compute -+ clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); // Schedule to compute - assertEquals(expectedHeadroom, app_0_0.getHeadroom()); - assertEquals(expectedHeadroom, app_0_1.getHeadroom());// no change - -@@ -652,7 +652,7 @@ public void testHeadroom() throws Exception { - - // Schedule to compute - queue.assignContainers(clusterResource, node_0, new ResourceLimits( -- clusterResource)); // Schedule to compute -+ clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); // Schedule to compute - expectedHeadroom = Resources.createResource(10*16*GB / 2, 1); // changes - assertEquals(expectedHeadroom, app_0_0.getHeadroom()); - assertEquals(expectedHeadroom, app_0_1.getHeadroom()); -@@ -661,7 +661,7 @@ public void testHeadroom() throws Exception { - // Now reduce cluster size and check for the smaller headroom - clusterResource = Resources.createResource(90*16*GB); - queue.assignContainers(clusterResource, node_0, new ResourceLimits( -- clusterResource)); // Schedule to compute -+ clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); // Schedule to compute - expectedHeadroom = Resources.createResource(9*16*GB / 2, 1); // changes - assertEquals(expectedHeadroom, app_0_0.getHeadroom()); - assertEquals(expectedHeadroom, app_0_1.getHeadroom()); -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestChildQueueOrder.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestChildQueueOrder.java -index 23b31faeb8f7d..970a98ad576f6 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestChildQueueOrder.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestChildQueueOrder.java -@@ -44,6 +44,7 @@ - import org.apache.hadoop.yarn.server.resourcemanager.RMContext; - import org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter; - import org.apache.hadoop.yarn.server.resourcemanager.metrics.SystemMetricsPublisher; -+import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; - import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.ContainerAllocationExpirer; - import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; - import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerEventType; -@@ -133,7 +134,7 @@ public CSAssignment answer(InvocationOnMock invocation) throws Throwable { - final Resource allocatedResource = Resources.createResource(allocation); - if (queue instanceof ParentQueue) { - ((ParentQueue)queue).allocateResource(clusterResource, -- allocatedResource, null); -+ allocatedResource, RMNodeLabelsManager.NO_LABEL); - } else { - FiCaSchedulerApp app1 = getMockApplication(0, """"); - ((LeafQueue)queue).allocateResource(clusterResource, app1, -@@ -145,7 +146,7 @@ public CSAssignment answer(InvocationOnMock invocation) throws Throwable { - doReturn(new CSAssignment(Resources.none(), type)). - when(queue) - .assignContainers(eq(clusterResource), eq(node), -- any(ResourceLimits.class)); -+ any(ResourceLimits.class), any(SchedulingMode.class)); +- public long getLastMsgTime() { +- return System.currentTimeMillis() - lastMsgTime; +- } ++ public Transport connectOutbound(boolean connect_with_crypto, boolean allow_fallback, ++ byte[][] shared_secrets, ByteBuffer initial_data, ConnectListener listener) { ++ Debug.out(""tried to create outgoing OverlayTransport, this should never happen!!!""); ++ throw new RuntimeException(""not implemented""); ++ } - // Mock the node's resource availability - Resource available = node.getAvailableResource(); -@@ -157,7 +158,7 @@ public CSAssignment answer(InvocationOnMock invocation) throws Throwable { - } - }). - when(queue).assignContainers(eq(clusterResource), eq(node), -- any(ResourceLimits.class)); -+ any(ResourceLimits.class), any(SchedulingMode.class)); - doNothing().when(node).releaseContainer(any(Container.class)); - } +- public long getBytesIn() { +- return bytesIn; +- } ++ public ConnectionEndpoint getConnectionEndpoint() { ++ return connectionEndpoint; ++ } -@@ -241,6 +242,14 @@ public void testSortedQueues() throws Exception { - CSQueue b = queues.get(B); - CSQueue c = queues.get(C); - CSQueue d = queues.get(D); -+ -+ // Make a/b/c/d has >0 pending resource, so that allocation will continue. -+ queues.get(CapacitySchedulerConfiguration.ROOT).getQueueResourceUsage() -+ .incPending(Resources.createResource(1 * GB)); -+ a.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); -+ b.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); -+ c.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); -+ d.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); +- public long getBytesOut() { +- return bytesOut; +- } ++ public String getDescription() { ++ return ""PROTOCOL_TCP""; ++ } - final String user_0 = ""user_0""; +- public int getUploadRate() { +- return (int) uploadRateAverage.getAverage(); +- } ++ public int getType() { ++ return PROTOCOL_TCP; ++ } -@@ -275,7 +284,7 @@ public void testSortedQueues() throws Exception { - stubQueueAllocation(c, clusterResource, node_0, 0*GB); - stubQueueAllocation(d, clusterResource, node_0, 0*GB); - root.assignContainers(clusterResource, node_0, new ResourceLimits( -- clusterResource)); -+ clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - for(int i=0; i < 2; i++) - { - stubQueueAllocation(a, clusterResource, node_0, 0*GB); -@@ -283,7 +292,7 @@ public void testSortedQueues() throws Exception { - stubQueueAllocation(c, clusterResource, node_0, 0*GB); - stubQueueAllocation(d, clusterResource, node_0, 0*GB); - root.assignContainers(clusterResource, node_0, new ResourceLimits( -- clusterResource)); -+ clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - } - for(int i=0; i < 3; i++) - { -@@ -292,7 +301,7 @@ public void testSortedQueues() throws Exception { - stubQueueAllocation(c, clusterResource, node_0, 1*GB); - stubQueueAllocation(d, clusterResource, node_0, 0*GB); - root.assignContainers(clusterResource, node_0, new ResourceLimits( -- clusterResource)); -+ clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - } - for(int i=0; i < 4; i++) - { -@@ -301,7 +310,7 @@ public void testSortedQueues() throws Exception { - stubQueueAllocation(c, clusterResource, node_0, 0*GB); - stubQueueAllocation(d, clusterResource, node_0, 1*GB); - root.assignContainers(clusterResource, node_0, new ResourceLimits( -- clusterResource)); -+ clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - } - verifyQueueMetrics(a, 1*GB, clusterResource); - verifyQueueMetrics(b, 2*GB, clusterResource); -@@ -335,7 +344,7 @@ public void testSortedQueues() throws Exception { - stubQueueAllocation(c, clusterResource, node_0, 0*GB); - stubQueueAllocation(d, clusterResource, node_0, 0*GB); - root.assignContainers(clusterResource, node_0, new ResourceLimits( -- clusterResource)); -+ clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); +- public int getDownloadRate() { +- return (int) downloadRateAverage.getAverage(); ++ public void setConnectionEndpoint(ConnectionEndpoint ce) { ++ this.connectionEndpoint = ce; ++ } } - verifyQueueMetrics(a, 3*GB, clusterResource); - verifyQueueMetrics(b, 2*GB, clusterResource); -@@ -363,7 +372,7 @@ public void testSortedQueues() throws Exception { - stubQueueAllocation(c, clusterResource, node_0, 0*GB); - stubQueueAllocation(d, clusterResource, node_0, 0*GB); - root.assignContainers(clusterResource, node_0, new ResourceLimits( -- clusterResource)); -+ clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verifyQueueMetrics(a, 2*GB, clusterResource); - verifyQueueMetrics(b, 3*GB, clusterResource); - verifyQueueMetrics(c, 3*GB, clusterResource); -@@ -390,7 +399,7 @@ public void testSortedQueues() throws Exception { - stubQueueAllocation(c, clusterResource, node_0, 0*GB); - stubQueueAllocation(d, clusterResource, node_0, 0*GB); - root.assignContainers(clusterResource, node_0, new ResourceLimits( -- clusterResource)); -+ clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verifyQueueMetrics(a, 3*GB, clusterResource); - verifyQueueMetrics(b, 2*GB, clusterResource); - verifyQueueMetrics(c, 3*GB, clusterResource); -@@ -405,12 +414,14 @@ public void testSortedQueues() throws Exception { - stubQueueAllocation(c, clusterResource, node_0, 0*GB); - stubQueueAllocation(d, clusterResource, node_0, 1*GB); - root.assignContainers(clusterResource, node_0, new ResourceLimits( -- clusterResource)); -+ clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - InOrder allocationOrder = inOrder(d,b); -- allocationOrder.verify(d).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), any(ResourceLimits.class)); -- allocationOrder.verify(b).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), any(ResourceLimits.class)); -+ allocationOrder.verify(d).assignContainers(eq(clusterResource), -+ any(FiCaSchedulerNode.class), any(ResourceLimits.class), -+ any(SchedulingMode.class)); -+ allocationOrder.verify(b).assignContainers(eq(clusterResource), -+ any(FiCaSchedulerNode.class), any(ResourceLimits.class), -+ any(SchedulingMode.class)); - verifyQueueMetrics(a, 3*GB, clusterResource); - verifyQueueMetrics(b, 2*GB, clusterResource); - verifyQueueMetrics(c, 3*GB, clusterResource); -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestContainerAllocation.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestContainerAllocation.java -index 03b8f5c1fe195..54ba61724f95e 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestContainerAllocation.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestContainerAllocation.java -@@ -19,6 +19,8 @@ - package org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity; - import java.util.ArrayList; -+import java.util.Arrays; -+import java.util.HashSet; - import java.util.List; - import java.util.Set; +- public boolean isLANLocal() { +- return connection.getNetworkConnection().isLANLocal(); ++ interface WriteQueueWaiter { ++ public void readyForWrite(); + } +- + } +diff --git a/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/OverlayTransportEncrypted.java b/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/OverlayTransportEncrypted.java +index 0a210a17..ae86c793 100644 +--- a/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/OverlayTransportEncrypted.java ++++ b/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/OverlayTransportEncrypted.java +@@ -17,97 +17,103 @@ + import edu.washington.cs.oneswarm.f2f.messaging.OSF2FMessage; + + public class OverlayTransportEncrypted extends OverlayTransport { +- private final static Logger logger = Logger.getLogger(OverlayTransportEncrypted.class.getName()); ++ private final static Logger logger = Logger ++ .getLogger(OverlayTransportEncrypted.class.getName()); + +- private static final String ENCRYPTION_ALGORITHM = ""AES/CFB/NoPadding""; +- public final static int KEY_LENGTH_BITS = 256; +- final Cipher readCipher; +- final Cipher writeCipher; ++ private static final String ENCRYPTION_ALGORITHM = ""AES/CFB/NoPadding""; ++ public final static int KEY_LENGTH_BITS = 256; ++ final Cipher readCipher; ++ final Cipher writeCipher; + +- public OverlayTransportEncrypted(FriendConnection connection, int channelId, byte[] infohash, int pathID, boolean outgoing, long overlayDelayMs, byte[] key) throws NoSuchAlgorithmException, NoSuchPaddingException, InvalidKeyException { +- super(connection, channelId, infohash, pathID, outgoing, overlayDelayMs); +- if (key.length / 8 != KEY_LENGTH_BITS) { +- throw new InvalidKeyException(""invalid key length""); +- } ++ public OverlayTransportEncrypted(FriendConnection connection, int channelId, byte[] infohash, ++ int pathID, boolean outgoing, long overlayDelayMs, byte[] key) ++ throws NoSuchAlgorithmException, NoSuchPaddingException, InvalidKeyException { ++ super(connection, channelId, infohash, pathID, outgoing, overlayDelayMs); ++ if (key.length / 8 != KEY_LENGTH_BITS) { ++ throw new InvalidKeyException(""invalid key length""); ++ } + +- SecretKeySpec keySpec = new SecretKeySpec(key, 0, key.length, ENCRYPTION_ALGORITHM); +- readCipher = Cipher.getInstance(ENCRYPTION_ALGORITHM); +- readCipher.init(Cipher.DECRYPT_MODE, keySpec); +- writeCipher = Cipher.getInstance(ENCRYPTION_ALGORITHM); +- writeCipher.init(Cipher.ENCRYPT_MODE, keySpec); +- } ++ SecretKeySpec keySpec = new SecretKeySpec(key, 0, key.length, ENCRYPTION_ALGORITHM); ++ readCipher = Cipher.getInstance(ENCRYPTION_ALGORITHM); ++ readCipher.init(Cipher.DECRYPT_MODE, keySpec); ++ writeCipher = Cipher.getInstance(ENCRYPTION_ALGORITHM); ++ writeCipher.init(Cipher.ENCRYPT_MODE, keySpec); ++ } + +- public long write(ByteBuffer[] buffers, int array_offset, int length) throws IOException { ++ public long write(ByteBuffer[] buffers, int array_offset, int length) throws IOException { + +- if (closed) { +- // when closed just ignore the write requests +- // hopefully the peertransport will read everything in the +- // buffer +- // and get the exception there when done +- return 0; +- } +- int totalToWrite = 0; +- int totalWritten = 0; +- try { +- for (int i = array_offset; i < array_offset + length; i++) { +- totalToWrite += buffers[i].remaining(); +- } +- logger.finest(getDescription() + ""got write request for: "" + totalToWrite); +- // only write one packet at the time +- if (isReadyForWrite(null)) { +- DirectByteBuffer msgBuffer = DirectByteBufferPool.getBuffer(DirectByteBuffer.AL_MSG, Math.min(totalToWrite, OSF2FMessage.MAX_PAYLOAD_SIZE)); ++ if (closed) { ++ // when closed just ignore the write requests ++ // hopefully the peertransport will read everything in the ++ // buffer ++ // and get the exception there when done ++ return 0; ++ } ++ int totalToWrite = 0; ++ int totalWritten = 0; ++ try { ++ for (int i = array_offset; i < array_offset + length; i++) { ++ totalToWrite += buffers[i].remaining(); ++ } ++ logger.finest(getDescription() + ""got write request for: "" + totalToWrite); ++ // only write one packet at the time ++ if (isReadyForWrite(null)) { ++ DirectByteBuffer msgBuffer = DirectByteBufferPool.getBuffer( ++ DirectByteBuffer.AL_MSG, ++ Math.min(totalToWrite, OSF2FMessage.MAX_PAYLOAD_SIZE)); + +- ByteBuffer dstBuffer = msgBuffer.getBuffer(DirectByteBuffer.SS_MSG); +- for (int i = 0; i < buffers.length; i++) { +- ByteBuffer currBuffer = buffers[i]; ++ ByteBuffer dstBuffer = msgBuffer.getBuffer(DirectByteBuffer.SS_MSG); ++ for (int i = 0; i < buffers.length; i++) { ++ ByteBuffer currBuffer = buffers[i]; + +- if (currBuffer.remaining() > dstBuffer.remaining()) { +- // we have more to write than what we can fit +- // set the limit of the source to reflect this +- int oldLimit = currBuffer.limit(); +- int newLimit = currBuffer.position() + dstBuffer.remaining(); +- currBuffer.limit(newLimit); +- writeCipher.update(currBuffer, dstBuffer); +- // and restore the limit when done +- currBuffer.limit(oldLimit); +- break; +- } else { +- writeCipher.update(currBuffer, dstBuffer); +- } +- } +- msgBuffer.flip(DirectByteBuffer.SS_MSG); +- totalWritten += writeMessage(msgBuffer); +- } +- } catch (ShortBufferException e) { +- logger.warning(""not enough room in the destination buffer, this should NEVER happen!""); +- e.printStackTrace(); +- super.close(""short buffer exception""); +- } ++ if (currBuffer.remaining() > dstBuffer.remaining()) { ++ // we have more to write than what we can fit ++ // set the limit of the source to reflect this ++ int oldLimit = currBuffer.limit(); ++ int newLimit = currBuffer.position() + dstBuffer.remaining(); ++ currBuffer.limit(newLimit); ++ writeCipher.update(currBuffer, dstBuffer); ++ // and restore the limit when done ++ currBuffer.limit(oldLimit); ++ break; ++ } else { ++ writeCipher.update(currBuffer, dstBuffer); ++ } ++ } ++ msgBuffer.flip(DirectByteBuffer.SS_MSG); ++ totalWritten += writeMessageToFriendConnection(msgBuffer); ++ } ++ } catch (ShortBufferException e) { ++ logger.warning(""not enough room in the destination buffer, this should NEVER happen!""); ++ e.printStackTrace(); ++ super.close(""short buffer exception""); ++ } -@@ -32,6 +34,7 @@ - import org.apache.hadoop.yarn.api.records.ContainerId; - import org.apache.hadoop.yarn.api.records.LogAggregationContext; - import org.apache.hadoop.yarn.api.records.NodeId; -+import org.apache.hadoop.yarn.api.records.NodeLabel; - import org.apache.hadoop.yarn.api.records.Priority; - import org.apache.hadoop.yarn.api.records.Resource; - import org.apache.hadoop.yarn.api.records.ResourceRequest; -@@ -51,9 +54,13 @@ - import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; - import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; - import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerState; -+import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; - import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler; - import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerAppReport; -+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNode; - import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler; -+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerApp; -+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent; - import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager; - import org.apache.hadoop.yarn.server.utils.BuilderUtils; - import org.junit.Assert; -@@ -327,387 +334,4 @@ protected RMSecretManagerService createRMSecretManagerService() { - rm1.waitForState(attempt.getAppAttemptId(), RMAppAttemptState.ALLOCATED); - MockRM.launchAndRegisterAM(app1, rm1, nm1); - } -- -- private Configuration getConfigurationWithQueueLabels(Configuration config) { -- CapacitySchedulerConfiguration conf = -- new CapacitySchedulerConfiguration(config); -- -- // Define top-level queues -- conf.setQueues(CapacitySchedulerConfiguration.ROOT, new String[] {""a"", ""b"", ""c""}); -- conf.setCapacityByLabel(CapacitySchedulerConfiguration.ROOT, ""x"", 100); -- conf.setCapacityByLabel(CapacitySchedulerConfiguration.ROOT, ""y"", 100); +- return totalWritten; +- } ++ return totalWritten; ++ } + +- public long read(ByteBuffer[] buffers, int array_offset, int length) throws IOException { +- DirectByteBuffer[] tempBufferPool = new DirectByteBuffer[buffers.length]; +- ByteBuffer[] tempBuffers = new ByteBuffer[buffers.length]; +- for (int i = 0; i < buffers.length; i++) { +- tempBufferPool[i] = DirectByteBufferPool.getBuffer(DirectByteBuffer.AL_MSG, buffers[i].remaining()); +- tempBuffers[i] = tempBufferPool[i].getBuffer(DirectByteBuffer.SS_MSG); +- } +- long len = super.read(tempBuffers, array_offset, length); +- try { +- for (int i = 0; i < tempBuffers.length; i++) { +- readCipher.update(tempBuffers[i], buffers[i]); +- tempBufferPool[i].returnToPool(); +- } +- } catch (ShortBufferException e) { +- // TODO Auto-generated catch block +- e.printStackTrace(); +- } +- return len; +- } ++ public long read(ByteBuffer[] buffers, int array_offset, int length) throws IOException { ++ DirectByteBuffer[] tempBufferPool = new DirectByteBuffer[buffers.length]; ++ ByteBuffer[] tempBuffers = new ByteBuffer[buffers.length]; ++ for (int i = 0; i < buffers.length; i++) { ++ tempBufferPool[i] = DirectByteBufferPool.getBuffer(DirectByteBuffer.AL_MSG, ++ buffers[i].remaining()); ++ tempBuffers[i] = tempBufferPool[i].getBuffer(DirectByteBuffer.SS_MSG); ++ } ++ long len = super.read(tempBuffers, array_offset, length); ++ try { ++ for (int i = 0; i < tempBuffers.length; i++) { ++ readCipher.update(tempBuffers[i], buffers[i]); ++ tempBufferPool[i].returnToPool(); ++ } ++ } catch (ShortBufferException e) { ++ // TODO Auto-generated catch block ++ e.printStackTrace(); ++ } ++ return len; ++ } + +- public String getDescription() { +- return ""ENCR: "" + super.getDescription(); +- } ++ public String getDescription() { ++ return ""ENCR: "" + super.getDescription(); ++ } + } +diff --git a/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/SearchManager.java b/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/SearchManager.java +index 85eeb392..6095107d 100644 +--- a/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/SearchManager.java ++++ b/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/SearchManager.java +@@ -57,1603 +57,1711 @@ + import edu.washington.cs.oneswarm.f2f.network.DelayedExecutorService.DelayedExecutionEntry; + import edu.washington.cs.oneswarm.f2f.network.DelayedExecutorService.DelayedExecutor; + import edu.washington.cs.oneswarm.f2f.network.FriendConnection.OverlayRegistrationError; ++import edu.washington.cs.oneswarm.f2f.servicesharing.ServiceSharingManager; ++import edu.washington.cs.oneswarm.f2f.servicesharing.ServiceSharingManager.SharedService; + import edu.washington.cs.oneswarm.f2f.share.ShareManagerTools; + import edu.washington.cs.oneswarm.ui.gwt.BackendErrorLog; + + public class SearchManager { + +- public static final String SEARCH_QUEUE_THREAD_NAME = ""DelayedSearchQueue""; +- +- private final static BigFatLock lock = OverlayManager.lock; +- private static Logger logger = Logger.getLogger(SearchManager.class.getName()); +- // search sources are remembered for 1 minute, any replies after this will +- // be dropped +- public static final long MAX_SEARCH_AGE = 60 * 1000; +- public static final int MAX_SEARCH_QUEUE_LENGTH = 100; +-// private static final int MAX_SEARCH_RESP_BEFORE_CANCEL = COConfigurationManager.getIntParameter(""f2f_search_max_paths""); +- +- protected int mMaxSearchResponsesBeforeCancel = COConfigurationManager.getIntParameter(""f2f_search_max_paths""); +- +- // don't respond if average torrent upload rate is less than 10K/s +- private static final double NO_RESPONSE_TORRENT_AVERAGE_RATE = 10000; +- +- private static final double NO_RESPONSE_TOTAL_FRAC_OF_MAX_UPLOAD = 0.9; +- +- private static final double NO_RESPONSE_TRANSPORT_FRAC_OF_MAX_UPLOAD = 0.75; +- /* +- * this is to avoid searches living forever, search uid are remembered for +- * 45min-1h, there are 4 bloom filter buckets that are rotating, each one +- * containing 15minutes worth of searches +- */ +- private static final int RECENT_SEARCH_BUCKETS = 4; - -- final String A = CapacitySchedulerConfiguration.ROOT + "".a""; -- conf.setCapacity(A, 10); -- conf.setMaximumCapacity(A, 15); -- conf.setAccessibleNodeLabels(A, toSet(""x"")); -- conf.setCapacityByLabel(A, ""x"", 100); -- -- final String B = CapacitySchedulerConfiguration.ROOT + "".b""; -- conf.setCapacity(B, 20); -- conf.setAccessibleNodeLabels(B, toSet(""y"")); -- conf.setCapacityByLabel(B, ""y"", 100); -- -- final String C = CapacitySchedulerConfiguration.ROOT + "".c""; -- conf.setCapacity(C, 70); -- conf.setMaximumCapacity(C, 70); -- conf.setAccessibleNodeLabels(C, RMNodeLabelsManager.EMPTY_STRING_SET); -- -- // Define 2nd-level queues -- final String A1 = A + "".a1""; -- conf.setQueues(A, new String[] {""a1""}); -- conf.setCapacity(A1, 100); -- conf.setMaximumCapacity(A1, 100); -- conf.setCapacityByLabel(A1, ""x"", 100); -- -- final String B1 = B + "".b1""; -- conf.setQueues(B, new String[] {""b1""}); -- conf.setCapacity(B1, 100); -- conf.setMaximumCapacity(B1, 100); -- conf.setCapacityByLabel(B1, ""y"", 100); +- private static final long RECENT_SEARCH_MEMORY = 20 * 60 * 1000; +-// static final int SEARCH_DELAY = COConfigurationManager.getIntParameter(""f2f_search_forward_delay""); +- protected int mSearchDelay = COConfigurationManager.getIntParameter(""f2f_search_forward_delay""); - -- final String C1 = C + "".c1""; -- conf.setQueues(C, new String[] {""c1""}); -- conf.setCapacity(C1, 100); -- conf.setMaximumCapacity(C1, 100); -- -- return conf; -- } -- -- private void checkTaskContainersHost(ApplicationAttemptId attemptId, -- ContainerId containerId, ResourceManager rm, String host) { -- YarnScheduler scheduler = rm.getRMContext().getScheduler(); -- SchedulerAppReport appReport = scheduler.getSchedulerAppInfo(attemptId); +- /** +- * This Map is protected by the BigFatLock: lock. We use this to drop searches from friends +- * that are crowding the outgoing search queue early, thus allowing friends that send searches +- * more rarely to get through. +- * +- * This map is emptied once every 60 seconds to deal with accounting errors that may accumulate. +- */ +- class MutableInteger { public int v=0; } +- long lastSearchAccountingFlush = System.currentTimeMillis(); +- private final Map searchesPerFriend = new HashMap(); +- +- private int bloomSearchesBlockedCurr = 0; +- +- private int bloomSearchesBlockedPrev = 0; +- private int bloomSearchesSentCurr = 0; +- private int bloomSearchesSentPrev = 0; +- +- private final HashMap canceledSearches; +- private final DebugChannelSetupErrorStats debugChannelIdErrorSetupErrorStats = new DebugChannelSetupErrorStats(); +- +- private final DelayedSearchQueue delayedSearchQueue; +- +- // private final DeterministicDelayResponseQueue delayedResponseQueue; +- +- private final FileListManager filelistManager; +- +- private final HashMap forwardedSearches; +- private int forwardedSearchNum = 0; +- private List hashSearchStats = new LinkedList(); +- +- private boolean includeLanUploads; +- private final double NO_FORWARD_FRAC_OF_MAX_UPLOAD = 0.9; +- private final OverlayManager overlayManager; +- +- private final Random random = new Random(); +- private final RandomnessManager randomnessManager; +- +- private int rateLimitInKBps; +- +- private final RotatingBloomFilter recentSearches; +- private final HashMap sentSearches; +- private final GlobalManagerStats stats; +- private final TextSearchManager textSearchManager; +- +- private List textSearchStats = new LinkedList(); +- +- private final DelayedExecutor delayedExecutor; +- +- private String[] filteredKeywords = new String[0]; +- +- public SearchManager(OverlayManager overlayManager, FileListManager filelistManager, RandomnessManager randomnessManager, GlobalManagerStats stats) { +- this.stats = stats; +- this.delayedExecutor = DelayedExecutorService.getInstance().getVariableDelayExecutor(); +- // this.delayedResponseQueue = new DeterministicDelayResponseQueue(); +- this.overlayManager = overlayManager; +- this.sentSearches = new HashMap(); +- this.forwardedSearches = new HashMap(); +- this.canceledSearches = new HashMap(); +- this.filelistManager = filelistManager; +- this.randomnessManager = randomnessManager; +- this.textSearchManager = new TextSearchManager(); +- this.recentSearches = new RotatingBloomFilter(RECENT_SEARCH_MEMORY, RECENT_SEARCH_BUCKETS); +- this.delayedSearchQueue = new DelayedSearchQueue(mSearchDelay); +- COConfigurationManager.addAndFireParameterListeners(new String[] { ""LAN Speed Enabled"", ""Max Upload Speed KBs"", ""oneswarm.search.filter.keywords"", +- ""f2f_search_max_paths"", ""f2f_search_forward_delay"" }, new ParameterListener() { +- public void parameterChanged(String parameterName) { +- includeLanUploads = !COConfigurationManager.getBooleanParameter(""LAN Speed Enabled""); +- rateLimitInKBps = COConfigurationManager.getIntParameter(""Max Upload Speed KBs""); +- +- StringList keywords = COConfigurationManager.getStringListParameter(""oneswarm.search.filter.keywords""); +- if (keywords != null) { +- String[] neu = new String[keywords.size()]; +- for (int i = 0; i < keywords.size(); i++) { +- String firstTok = (new StringTokenizer(keywords.get(i))).nextToken(); +- neu[i] = firstTok; +- } +- filteredKeywords = neu; +- logger.fine(""Updated filtered keywords "" + keywords.size()); +- } - -- Assert.assertTrue(appReport.getLiveContainers().size() > 0); -- for (RMContainer c : appReport.getLiveContainers()) { -- if (c.getContainerId().equals(containerId)) { -- Assert.assertEquals(host, c.getAllocatedNode().getHost()); -- } -- } -- } -- -- @SuppressWarnings(""unchecked"") -- private Set toSet(E... elements) { -- Set set = Sets.newHashSet(elements); -- return set; -- } -- -- @Test (timeout = 300000) -- public void testContainerAllocationWithSingleUserLimits() throws Exception { -- final RMNodeLabelsManager mgr = new NullRMNodeLabelsManager(); -- mgr.init(conf); +- mMaxSearchResponsesBeforeCancel = COConfigurationManager.getIntParameter(""f2f_search_max_paths""); - -- // set node -> label -- mgr.addToCluserNodeLabels(ImmutableSet.of(""x"", ""y"")); -- mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance(""h1"", 0), toSet(""x""), -- NodeId.newInstance(""h2"", 0), toSet(""y""))); +- mSearchDelay = COConfigurationManager.getIntParameter(""f2f_search_forward_delay""); +- delayedSearchQueue.setDelay(mSearchDelay); +- } +- }); +- } - -- // inject node label manager -- MockRM rm1 = new MockRM(TestUtils.getConfigurationWithDefaultQueueLabels(conf)) { -- @Override -- public RMNodeLabelsManager createNodeLabelManager() { -- return mgr; -- } -- }; +- private boolean canForwardSearch() { +- double util = fracUpload(); +- if (util == -1 || util < NO_FORWARD_FRAC_OF_MAX_UPLOAD) { +- return true; +- } else { +- logger.finest(""not forwarding search (overloaded, util="" + util + "")""); +- return false; +- } +- } - -- rm1.getRMContext().setNodeLabelManager(mgr); -- rm1.start(); -- MockNM nm1 = rm1.registerNode(""h1:1234"", 8000); // label = x -- rm1.registerNode(""h2:1234"", 8000); // label = y -- MockNM nm3 = rm1.registerNode(""h3:1234"", 8000); // label = +- private boolean canRespondToSearch() { +- double totalUtil = fracUpload(); +- if (totalUtil == -1) { +- return true; +- } +- // ok, check if we are using more than 90% of total +- if (totalUtil < NO_RESPONSE_TOTAL_FRAC_OF_MAX_UPLOAD) { +- return true; +- } +- double transUtil = fracTransportUpload(); +- // check if we are using more than 75% for transports +- if (transUtil < NO_RESPONSE_TRANSPORT_FRAC_OF_MAX_UPLOAD) { +- return true; +- } - -- // launch an app to queue a1 (label = x), and check all container will -- // be allocated in h1 -- RMApp app1 = rm1.submitApp(200, ""app"", ""user"", null, ""a1""); -- MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1); -- -- // A has only 10% of x, so it can only allocate one container in label=empty -- ContainerId containerId = -- ContainerId.newContainerId(am1.getApplicationAttemptId(), 2); -- am1.allocate(""*"", 1024, 1, new ArrayList(), """"); -- Assert.assertTrue(rm1.waitForState(nm3, containerId, -- RMContainerState.ALLOCATED, 10 * 1000)); -- // Cannot allocate 2nd label=empty container -- containerId = -- ContainerId.newContainerId(am1.getApplicationAttemptId(), 3); -- am1.allocate(""*"", 1024, 1, new ArrayList(), """"); -- Assert.assertFalse(rm1.waitForState(nm3, containerId, -- RMContainerState.ALLOCATED, 10 * 1000)); +- double torrentAvgSpeed = getAverageUploadPerRunningTorrent(); +- if (torrentAvgSpeed == -1) { +- return true; +- } +- if (torrentAvgSpeed > NO_RESPONSE_TORRENT_AVERAGE_RATE) { +- return true; +- } +- if (logger.isLoggable(Level.FINER)) { +- logger.finer(""not responding to search (overloaded, util="" + transUtil + "")""); +- } +- return false; +- } - -- // A has default user limit = 100, so it can use all resource in label = x -- // We can allocate floor(8000 / 1024) = 7 containers -- for (int id = 3; id <= 8; id++) { -- containerId = -- ContainerId.newContainerId(am1.getApplicationAttemptId(), id); -- am1.allocate(""*"", 1024, 1, new ArrayList(), ""x""); -- Assert.assertTrue(rm1.waitForState(nm1, containerId, -- RMContainerState.ALLOCATED, 10 * 1000)); -- } -- rm1.close(); -- } -- -- @Test(timeout = 300000) -- public void testContainerAllocateWithComplexLabels() throws Exception { -- /* -- * Queue structure: -- * root (*) -- * ________________ -- * / \ -- * a x(100%), y(50%) b y(50%), z(100%) -- * ________________ ______________ -- * / / \ -- * a1 (x,y) b1(no) b2(y,z) -- * 100% y = 100%, z = 100% -- * -- * Node structure: -- * h1 : x -- * h2 : y -- * h3 : y -- * h4 : z -- * h5 : NO -- * -- * Total resource: -- * x: 4G -- * y: 6G -- * z: 2G -- * *: 2G -- * -- * Resource of -- * a1: x=4G, y=3G, NO=0.2G -- * b1: NO=0.9G (max=1G) -- * b2: y=3, z=2G, NO=0.9G (max=1G) -- * -- * Each node can only allocate two containers -- */ +- public void clearTimedOutSearches() { +- lock.lock(); +- try { +- /* +- * check if we need to rotate the bloom filter of recent searches +- */ +- boolean rotated = recentSearches.rotateIfNeeded(); +- if (rotated) { +- bloomSearchesBlockedPrev = bloomSearchesBlockedCurr; +- bloomSearchesBlockedCurr = 0; +- bloomSearchesSentPrev = bloomSearchesSentCurr; +- bloomSearchesSentCurr = 0; +- } - -- // set node -> label -- mgr.addToCluserNodeLabels(ImmutableSet.of(""x"", ""y"", ""z"")); -- mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance(""h1"", 0), -- toSet(""x""), NodeId.newInstance(""h2"", 0), toSet(""y""), -- NodeId.newInstance(""h3"", 0), toSet(""y""), NodeId.newInstance(""h4"", 0), -- toSet(""z""), NodeId.newInstance(""h5"", 0), -- RMNodeLabelsManager.EMPTY_STRING_SET)); +- for (Iterator iterator = forwardedSearches.values().iterator(); iterator.hasNext();) { +- ForwardedSearch fs = iterator.next(); +- if (fs.isTimedOut()) { +- iterator.remove(); +- } +- } - -- // inject node label manager -- MockRM rm1 = new MockRM(TestUtils.getComplexConfigurationWithQueueLabels(conf)) { -- @Override -- public RMNodeLabelsManager createNodeLabelManager() { -- return mgr; -- } -- }; +- for (Iterator iterator = sentSearches.values().iterator(); iterator.hasNext();) { +- SentSearch sentSearch = iterator.next(); +- if (sentSearch.isTimedOut()) { +- iterator.remove(); +- if (sentSearch.getSearch() instanceof OSF2FHashSearch) { +- hashSearchStats.add(sentSearch.getResponseNum()); +- } else if (sentSearch.getSearch() instanceof OSF2FTextSearch) { +- textSearchStats.add(sentSearch.getResponseNum()); +- } +- } +- } - -- rm1.getRMContext().setNodeLabelManager(mgr); -- rm1.start(); -- MockNM nm1 = rm1.registerNode(""h1:1234"", 2048); -- MockNM nm2 = rm1.registerNode(""h2:1234"", 2048); -- MockNM nm3 = rm1.registerNode(""h3:1234"", 2048); -- MockNM nm4 = rm1.registerNode(""h4:1234"", 2048); -- MockNM nm5 = rm1.registerNode(""h5:1234"", 2048); -- -- ContainerId containerId; +- /* +- * Delete any expired canceled searches +- */ +- LinkedList toDelete = new LinkedList(); +- for (Integer key : canceledSearches.keySet()) { +- long age = System.currentTimeMillis() - canceledSearches.get(key); +- if (age > MAX_SEARCH_AGE) { +- toDelete.add(key); +- } +- } - -- // launch an app to queue a1 (label = x), and check all container will -- // be allocated in h1 -- RMApp app1 = rm1.submitApp(1024, ""app"", ""user"", null, ""a1""); -- MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1); +- for (Integer key : toDelete) { +- canceledSearches.remove(key); +- } - -- // request a container (label = y). can be allocated on nm2 -- am1.allocate(""*"", 1024, 1, new ArrayList(), ""y""); -- containerId = -- ContainerId.newContainerId(am1.getApplicationAttemptId(), 2L); -- Assert.assertTrue(rm1.waitForState(nm2, containerId, -- RMContainerState.ALLOCATED, 10 * 1000)); -- checkTaskContainersHost(am1.getApplicationAttemptId(), containerId, rm1, -- ""h2""); +- textSearchManager.clearOldResponses(); +- } finally { +- lock.unlock(); +- } +- } - -- // launch an app to queue b1 (label = y), and check all container will -- // be allocated in h5 -- RMApp app2 = rm1.submitApp(1024, ""app"", ""user"", null, ""b1""); -- MockAM am2 = MockRM.launchAndRegisterAM(app2, rm1, nm5); +- public List debugCanceledSearches() { +- List l = new LinkedList(); +- lock.lock(); +- try { +- for (Integer s : canceledSearches.keySet()) { +- l.add(""search="" + Integer.toHexString(s) + "" age="" + ((System.currentTimeMillis() - canceledSearches.get(s)) / 1000) + ""s""); +- } +- } finally { +- lock.unlock(); +- } +- return l; +- } - -- // request a container for AM, will succeed -- // and now b1's queue capacity will be used, cannot allocate more containers -- // (Maximum capacity reached) -- am2.allocate(""*"", 1024, 1, new ArrayList()); -- containerId = ContainerId.newContainerId(am2.getApplicationAttemptId(), 2); -- Assert.assertFalse(rm1.waitForState(nm4, containerId, -- RMContainerState.ALLOCATED, 10 * 1000)); -- Assert.assertFalse(rm1.waitForState(nm5, containerId, -- RMContainerState.ALLOCATED, 10 * 1000)); -- -- // launch an app to queue b2 -- RMApp app3 = rm1.submitApp(1024, ""app"", ""user"", null, ""b2""); -- MockAM am3 = MockRM.launchAndRegisterAM(app3, rm1, nm5); +- public List debugForwardedSearches() { +- List l = new LinkedList(); +- lock.lock(); +- try { +- for (ForwardedSearch f : forwardedSearches.values()) { +- l.add(""search="" + Integer.toHexString(f.getSearchId()) + "" responses="" + f.getResponseNum() + "" age="" + (f.getAge() / 1000) + ""s""); +- } +- } finally { +- lock.unlock(); +- } +- return l; +- } - -- // request a container. try to allocate on nm1 (label = x) and nm3 (label = -- // y,z). Will successfully allocate on nm3 -- am3.allocate(""*"", 1024, 1, new ArrayList(), ""y""); -- containerId = ContainerId.newContainerId(am3.getApplicationAttemptId(), 2); -- Assert.assertFalse(rm1.waitForState(nm1, containerId, -- RMContainerState.ALLOCATED, 10 * 1000)); -- Assert.assertTrue(rm1.waitForState(nm3, containerId, -- RMContainerState.ALLOCATED, 10 * 1000)); -- checkTaskContainersHost(am3.getApplicationAttemptId(), containerId, rm1, -- ""h3""); -- -- // try to allocate container (request label = z) on nm4 (label = y,z). -- // Will successfully allocate on nm4 only. -- am3.allocate(""*"", 1024, 1, new ArrayList(), ""z""); -- containerId = ContainerId.newContainerId(am3.getApplicationAttemptId(), 3L); -- Assert.assertTrue(rm1.waitForState(nm4, containerId, -- RMContainerState.ALLOCATED, 10 * 1000)); -- checkTaskContainersHost(am3.getApplicationAttemptId(), containerId, rm1, -- ""h4""); +- public List debugSentSearches() { +- List l = new LinkedList(); +- lock.lock(); +- try { +- for (SentSearch s : sentSearches.values()) { +- l.add(""search="" + Integer.toHexString(s.getSearch().getSearchID()) + "" responses="" + s.getResponseNum() + "" age="" + (s.getAge() / 1000) + ""s""); +- } +- } finally { +- lock.unlock(); +- } +- return l; +- } - -- rm1.close(); -- } +- private void forwardSearch(FriendConnection source, OSF2FSearch search) { +- lock.lock(); +- try { - -- @Test (timeout = 120000) -- public void testContainerAllocateWithLabels() throws Exception { -- // set node -> label -- mgr.addToCluserNodeLabels(ImmutableSet.of(""x"", ""y"")); -- mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance(""h1"", 0), toSet(""x""), -- NodeId.newInstance(""h2"", 0), toSet(""y""))); +- // check if search is canceled or forwarded first +- int searchID = search.getSearchID(); +- if (forwardedSearches.containsKey(searchID)) { +- logger.finest(""not forwarding search, already forwarded. id: "" + searchID); +- return; +- } - -- // inject node label manager -- MockRM rm1 = new MockRM(getConfigurationWithQueueLabels(conf)) { -- @Override -- public RMNodeLabelsManager createNodeLabelManager() { -- return mgr; -- } -- }; +- if (canceledSearches.containsKey(searchID)) { +- logger.finest(""not forwarding search, cancel received. id: "" + searchID); +- return; +- } - -- rm1.getRMContext().setNodeLabelManager(mgr); -- rm1.start(); -- MockNM nm1 = rm1.registerNode(""h1:1234"", 8000); // label = x -- MockNM nm2 = rm1.registerNode(""h2:1234"", 8000); // label = y -- MockNM nm3 = rm1.registerNode(""h3:1234"", 8000); // label = -- -- ContainerId containerId; +- int valueID = search.getValueID(); +- if (recentSearches.contains(searchID, valueID)) { +- bloomSearchesBlockedCurr++; +- logger.finest(""not forwarding search, in recent filter. id: "" + searchID); +- return; +- } +- bloomSearchesSentCurr++; +- forwardedSearchNum++; +- if (logger.isLoggable(Level.FINEST)) { +- logger.finest(""forwarding search "" + search.getDescription() + "" id: "" + searchID); +- } +- forwardedSearches.put(searchID, new ForwardedSearch(source, search)); +- recentSearches.insert(searchID, valueID); +- } finally { +- lock.unlock(); +- } - -- // launch an app to queue a1 (label = x), and check all container will -- // be allocated in h1 -- RMApp app1 = rm1.submitApp(200, ""app"", ""user"", null, ""a1""); -- MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm3); +- overlayManager.forwardSearchOrCancel(source, search.clone()); +- } - -- // request a container. -- am1.allocate(""*"", 1024, 1, new ArrayList(), ""x""); -- containerId = -- ContainerId.newContainerId(am1.getApplicationAttemptId(), 2); -- Assert.assertFalse(rm1.waitForState(nm2, containerId, -- RMContainerState.ALLOCATED, 10 * 1000)); -- Assert.assertTrue(rm1.waitForState(nm1, containerId, -- RMContainerState.ALLOCATED, 10 * 1000)); -- checkTaskContainersHost(am1.getApplicationAttemptId(), containerId, rm1, -- ""h1""); +- private double fracTransportUpload() { - -- // launch an app to queue b1 (label = y), and check all container will -- // be allocated in h2 -- RMApp app2 = rm1.submitApp(200, ""app"", ""user"", null, ""b1""); -- MockAM am2 = MockRM.launchAndRegisterAM(app2, rm1, nm3); +- if (rateLimitInKBps < 1) { +- return -1; +- } +- long uploadRate = overlayManager.getTransportSendRate(includeLanUploads); - -- // request a container. -- am2.allocate(""*"", 1024, 1, new ArrayList(), ""y""); -- containerId = ContainerId.newContainerId(am2.getApplicationAttemptId(), 2); -- Assert.assertFalse(rm1.waitForState(nm1, containerId, -- RMContainerState.ALLOCATED, 10 * 1000)); -- Assert.assertTrue(rm1.waitForState(nm2, containerId, -- RMContainerState.ALLOCATED, 10 * 1000)); -- checkTaskContainersHost(am2.getApplicationAttemptId(), containerId, rm1, -- ""h2""); -- -- // launch an app to queue c1 (label = """"), and check all container will -- // be allocated in h3 -- RMApp app3 = rm1.submitApp(200, ""app"", ""user"", null, ""c1""); -- MockAM am3 = MockRM.launchAndRegisterAM(app3, rm1, nm3); +- double util = uploadRate / (rateLimitInKBps * 1024.0); +- return util; +- } - -- // request a container. -- am3.allocate(""*"", 1024, 1, new ArrayList()); -- containerId = ContainerId.newContainerId(am3.getApplicationAttemptId(), 2); -- Assert.assertFalse(rm1.waitForState(nm2, containerId, -- RMContainerState.ALLOCATED, 10 * 1000)); -- Assert.assertTrue(rm1.waitForState(nm3, containerId, -- RMContainerState.ALLOCATED, 10 * 1000)); -- checkTaskContainersHost(am3.getApplicationAttemptId(), containerId, rm1, -- ""h3""); +- private double fracUpload() { - -- rm1.close(); -- } -- -- @Test (timeout = 120000) -- public void testContainerAllocateWithDefaultQueueLabels() throws Exception { -- // This test is pretty much similar to testContainerAllocateWithLabel. -- // Difference is, this test doesn't specify label expression in ResourceRequest, -- // instead, it uses default queue label expression +- if (rateLimitInKBps < 1) { +- return -1; +- } +- long uploadRate; +- if (!includeLanUploads) { +- uploadRate = stats.getProtocolSendRateNoLAN() + stats.getDataSendRateNoLAN(); +- } else { +- uploadRate = stats.getProtocolSendRate() + stats.getDataSendRate(); +- } - -- // set node -> label -- mgr.addToCluserNodeLabels(ImmutableSet.of(""x"", ""y"")); -- mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance(""h1"", 0), toSet(""x""), -- NodeId.newInstance(""h2"", 0), toSet(""y""))); +- double util = uploadRate / (rateLimitInKBps * 1024.0); +- return util; +- } - -- // inject node label manager -- MockRM rm1 = new MockRM(TestUtils.getConfigurationWithDefaultQueueLabels(conf)) { -- @Override -- public RMNodeLabelsManager createNodeLabelManager() { -- return mgr; -- } -- }; +- public int getAndClearForwardedSearchNum() { +- lock.lock(); +- try { +- int ret = forwardedSearchNum; +- forwardedSearchNum = 0; +- return ret; +- } finally { +- lock.unlock(); +- } +- } - -- rm1.getRMContext().setNodeLabelManager(mgr); -- rm1.start(); -- MockNM nm1 = rm1.registerNode(""h1:1234"", 8000); // label = x -- MockNM nm2 = rm1.registerNode(""h2:1234"", 8000); // label = y -- MockNM nm3 = rm1.registerNode(""h3:1234"", 8000); // label = -- -- ContainerId containerId; +- public List getAndClearHashSearchStats() { +- lock.lock(); +- try { +- List ret = hashSearchStats; +- hashSearchStats = new LinkedList(); +- return ret; +- } finally { +- lock.unlock(); +- } +- } - -- // launch an app to queue a1 (label = x), and check all container will -- // be allocated in h1 -- RMApp app1 = rm1.submitApp(200, ""app"", ""user"", null, ""a1""); -- MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1); +- public List getAndClearTextSearchStats() { +- lock.lock(); +- try { +- List ret = textSearchStats; +- textSearchStats = new LinkedList(); +- return ret; +- } finally { +- lock.unlock(); +- } +- } - -- // request a container. -- am1.allocate(""*"", 1024, 1, new ArrayList()); -- containerId = -- ContainerId.newContainerId(am1.getApplicationAttemptId(), 2); -- Assert.assertFalse(rm1.waitForState(nm3, containerId, -- RMContainerState.ALLOCATED, 10 * 1000)); -- Assert.assertTrue(rm1.waitForState(nm1, containerId, -- RMContainerState.ALLOCATED, 10 * 1000)); -- checkTaskContainersHost(am1.getApplicationAttemptId(), containerId, rm1, -- ""h1""); +- @SuppressWarnings(""unchecked"") +- private double getAverageUploadPerRunningTorrent() { +- LinkedList dms = new LinkedList(); +- final List downloadManagers = AzureusCoreImpl.getSingleton().getGlobalManager().getDownloadManagers(); +- dms.addAll(downloadManagers); +- +- long total = 0; +- int num = 0; +- +- for (DownloadManager dm : dms) { +- final DownloadManagerStats s = dm.getStats(); +- if (s == null) { +- continue; +- } +- final PEPeerManager p = dm.getPeerManager(); +- if (p == null) { +- continue; +- } - -- // launch an app to queue b1 (label = y), and check all container will -- // be allocated in h2 -- RMApp app2 = rm1.submitApp(200, ""app"", ""user"", null, ""b1""); -- MockAM am2 = MockRM.launchAndRegisterAM(app2, rm1, nm2); +- if (p.getNbPeers() == 0 && p.getNbSeeds() == 0) { +- continue; +- } - -- // request a container. -- am2.allocate(""*"", 1024, 1, new ArrayList()); -- containerId = ContainerId.newContainerId(am2.getApplicationAttemptId(), 2); -- Assert.assertFalse(rm1.waitForState(nm3, containerId, -- RMContainerState.ALLOCATED, 10 * 1000)); -- Assert.assertTrue(rm1.waitForState(nm2, containerId, -- RMContainerState.ALLOCATED, 10 * 1000)); -- checkTaskContainersHost(am2.getApplicationAttemptId(), containerId, rm1, -- ""h2""); -- -- // launch an app to queue c1 (label = """"), and check all container will -- // be allocated in h3 -- RMApp app3 = rm1.submitApp(200, ""app"", ""user"", null, ""c1""); -- MockAM am3 = MockRM.launchAndRegisterAM(app3, rm1, nm3); +- long uploadRate = s.getDataSendRate() + s.getProtocolSendRate(); +- total += uploadRate; +- num++; +- } +- if (num == 0) { +- return -1; +- } - -- // request a container. -- am3.allocate(""*"", 1024, 1, new ArrayList()); -- containerId = ContainerId.newContainerId(am3.getApplicationAttemptId(), 2); -- Assert.assertFalse(rm1.waitForState(nm2, containerId, -- RMContainerState.ALLOCATED, 10 * 1000)); -- Assert.assertTrue(rm1.waitForState(nm3, containerId, -- RMContainerState.ALLOCATED, 10 * 1000)); -- checkTaskContainersHost(am3.getApplicationAttemptId(), containerId, rm1, -- ""h3""); +- return ((double) total) / num; - -- rm1.close(); -- } - } -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestLeafQueue.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestLeafQueue.java -index 972cabbf2cc2c..0b5250b4fae87 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestLeafQueue.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestLeafQueue.java -@@ -351,7 +351,7 @@ public void testSingleQueueOneUserMetrics() throws Exception { - - // Only 1 container - a.assignContainers(clusterResource, node_0, new ResourceLimits( -- clusterResource)); -+ clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals( - (int)(node_0.getTotalResource().getMemory() * a.getCapacity()) - (1*GB), - a.getMetrics().getAvailableMB()); -@@ -487,7 +487,7 @@ public void testSingleQueueWithOneUser() throws Exception { - - // Only 1 container - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(1*GB, a.getUsedResources().getMemory()); - assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -498,7 +498,7 @@ public void testSingleQueueWithOneUser() throws Exception { - // Also 2nd -> minCapacity = 1024 since (.1 * 8G) < minAlloc, also - // you can get one container more than user-limit - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(2*GB, a.getUsedResources().getMemory()); - assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -507,7 +507,7 @@ public void testSingleQueueWithOneUser() throws Exception { - - // Can't allocate 3rd due to user-limit - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(2*GB, a.getUsedResources().getMemory()); - assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -517,7 +517,7 @@ public void testSingleQueueWithOneUser() throws Exception { - // Bump up user-limit-factor, now allocate should work - a.setUserLimitFactor(10); - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(3*GB, a.getUsedResources().getMemory()); - assertEquals(3*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -526,7 +526,7 @@ public void testSingleQueueWithOneUser() throws Exception { - - // One more should work, for app_1, due to user-limit-factor - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(4*GB, a.getUsedResources().getMemory()); - assertEquals(3*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(1*GB, app_1.getCurrentConsumption().getMemory()); -@@ -537,7 +537,7 @@ public void testSingleQueueWithOneUser() throws Exception { - // Now - no more allocs since we are at max-cap - a.setMaxCapacity(0.5f); - a.assignContainers(clusterResource, node_0, new ResourceLimits( -- clusterResource)); -+ clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(4*GB, a.getUsedResources().getMemory()); - assertEquals(3*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(1*GB, app_1.getCurrentConsumption().getMemory()); -@@ -653,21 +653,21 @@ public void testUserLimits() throws Exception { - - // 1 container to user_0 - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(2*GB, a.getUsedResources().getMemory()); - assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); - - // Again one to user_0 since he hasn't exceeded user limit yet - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(3*GB, a.getUsedResources().getMemory()); - assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(1*GB, app_1.getCurrentConsumption().getMemory()); - - // One more to user_0 since he is the only active user - a.assignContainers(clusterResource, node_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(4*GB, a.getUsedResources().getMemory()); - assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(2*GB, app_1.getCurrentConsumption().getMemory()); -@@ -719,10 +719,10 @@ public void testComputeUserLimitAndSetHeadroom(){ - 1, qb.getActiveUsersManager().getNumActiveUsers()); - //get headroom - qb.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - qb.computeUserLimitAndSetHeadroom(app_0, clusterResource, app_0 - .getResourceRequest(u0Priority, ResourceRequest.ANY).getCapability(), -- null); -+ """", SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - - //maxqueue 16G, userlimit 13G, - 4G used = 9G - assertEquals(9*GB,app_0.getHeadroom().getMemory()); -@@ -739,10 +739,10 @@ public void testComputeUserLimitAndSetHeadroom(){ - u1Priority, recordFactory))); - qb.submitApplicationAttempt(app_2, user_1); - qb.assignContainers(clusterResource, node_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - qb.computeUserLimitAndSetHeadroom(app_0, clusterResource, app_0 - .getResourceRequest(u0Priority, ResourceRequest.ANY).getCapability(), -- null); -+ """", SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - - assertEquals(8*GB, qb.getUsedResources().getMemory()); - assertEquals(4*GB, app_0.getCurrentConsumption().getMemory()); -@@ -782,12 +782,12 @@ public void testComputeUserLimitAndSetHeadroom(){ - qb.submitApplicationAttempt(app_1, user_0); - qb.submitApplicationAttempt(app_3, user_1); - qb.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - qb.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - qb.computeUserLimitAndSetHeadroom(app_3, clusterResource, app_3 - .getResourceRequest(u1Priority, ResourceRequest.ANY).getCapability(), -- null); -+ """", SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(4*GB, qb.getUsedResources().getMemory()); - //maxqueue 16G, userlimit 7G, used (by each user) 2G, headroom 5G (both) - assertEquals(5*GB, app_3.getHeadroom().getMemory()); -@@ -803,13 +803,13 @@ public void testComputeUserLimitAndSetHeadroom(){ - TestUtils.createResourceRequest(ResourceRequest.ANY, 6*GB, 1, true, - u0Priority, recordFactory))); - qb.assignContainers(clusterResource, node_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - qb.computeUserLimitAndSetHeadroom(app_4, clusterResource, app_4 - .getResourceRequest(u0Priority, ResourceRequest.ANY).getCapability(), -- null); -+ """", SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - qb.computeUserLimitAndSetHeadroom(app_3, clusterResource, app_3 - .getResourceRequest(u1Priority, ResourceRequest.ANY).getCapability(), -- null); -+ """", SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - - - //app3 is user1, active from last test case -@@ -876,7 +876,7 @@ public void testUserHeadroomMultiApp() throws Exception { - priority, recordFactory))); - - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(1*GB, a.getUsedResources().getMemory()); - assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -893,7 +893,7 @@ public void testUserHeadroomMultiApp() throws Exception { - priority, recordFactory))); - - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(2*GB, a.getUsedResources().getMemory()); - assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(1*GB, app_1.getCurrentConsumption().getMemory()); -@@ -982,7 +982,7 @@ public void testHeadroomWithMaxCap() throws Exception { - - // 1 container to user_0 - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(2*GB, a.getUsedResources().getMemory()); - assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -993,7 +993,7 @@ public void testHeadroomWithMaxCap() throws Exception { - - // Again one to user_0 since he hasn't exceeded user limit yet - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(3*GB, a.getUsedResources().getMemory()); - assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(1*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1010,7 +1010,7 @@ public void testHeadroomWithMaxCap() throws Exception { - // No more to user_0 since he is already over user-limit - // and no more containers to queue since it's already at max-cap - a.assignContainers(clusterResource, node_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(3*GB, a.getUsedResources().getMemory()); - assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(1*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1024,7 +1024,7 @@ public void testHeadroomWithMaxCap() throws Exception { - priority, recordFactory))); - assertEquals(1, a.getActiveUsersManager().getNumActiveUsers()); - a.assignContainers(clusterResource, node_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(0*GB, app_2.getHeadroom().getMemory()); // hit queue max-cap - } - -@@ -1095,7 +1095,7 @@ public void testSingleQueueWithMultipleUsers() throws Exception { - - // Only 1 container - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(1*GB, a.getUsedResources().getMemory()); - assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1103,7 +1103,7 @@ public void testSingleQueueWithMultipleUsers() throws Exception { - // Also 2nd -> minCapacity = 1024 since (.1 * 8G) < minAlloc, also - // you can get one container more than user-limit - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(2*GB, a.getUsedResources().getMemory()); - assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1111,7 +1111,7 @@ public void testSingleQueueWithMultipleUsers() throws Exception { - // Can't allocate 3rd due to user-limit - a.setUserLimit(25); - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(2*GB, a.getUsedResources().getMemory()); - assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1130,7 +1130,7 @@ public void testSingleQueueWithMultipleUsers() throws Exception { - // user_0 is at limit inspite of high user-limit-factor - a.setUserLimitFactor(10); - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(5*GB, a.getUsedResources().getMemory()); - assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1140,7 +1140,7 @@ public void testSingleQueueWithMultipleUsers() throws Exception { - // Now allocations should goto app_0 since - // user_0 is at user-limit not above it - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(6*GB, a.getUsedResources().getMemory()); - assertEquals(3*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1151,7 +1151,7 @@ public void testSingleQueueWithMultipleUsers() throws Exception { - // Now - no more allocs since we are at max-cap - a.setMaxCapacity(0.5f); - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(6*GB, a.getUsedResources().getMemory()); - assertEquals(3*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1163,7 +1163,7 @@ public void testSingleQueueWithMultipleUsers() throws Exception { - a.setMaxCapacity(1.0f); - a.setUserLimitFactor(1); - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(7*GB, a.getUsedResources().getMemory()); - assertEquals(3*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1172,7 +1172,7 @@ public void testSingleQueueWithMultipleUsers() throws Exception { - - // Now we should assign to app_3 again since user_2 is under user-limit - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(8*GB, a.getUsedResources().getMemory()); - assertEquals(3*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1272,7 +1272,7 @@ public void testReservation() throws Exception { - - // Only 1 container - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(1*GB, a.getUsedResources().getMemory()); - assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1283,7 +1283,7 @@ public void testReservation() throws Exception { - // Also 2nd -> minCapacity = 1024 since (.1 * 8G) < minAlloc, also - // you can get one container more than user-limit - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(2*GB, a.getUsedResources().getMemory()); - assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1292,7 +1292,7 @@ public void testReservation() throws Exception { - - // Now, reservation should kick in for app_1 - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(6*GB, a.getUsedResources().getMemory()); - assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1309,7 +1309,7 @@ public void testReservation() throws Exception { - ContainerExitStatus.KILLED_BY_RESOURCEMANAGER), - RMContainerEventType.KILL, null, true); - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(5*GB, a.getUsedResources().getMemory()); - assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1326,7 +1326,7 @@ public void testReservation() throws Exception { - ContainerExitStatus.KILLED_BY_RESOURCEMANAGER), - RMContainerEventType.KILL, null, true); - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(4*GB, a.getUsedResources().getMemory()); - assertEquals(0*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(4*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1394,7 +1394,7 @@ public void testStolenReservedContainer() throws Exception { - // Start testing... - - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(2*GB, a.getUsedResources().getMemory()); - assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1404,7 +1404,7 @@ public void testStolenReservedContainer() throws Exception { - - // Now, reservation should kick in for app_1 - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(6*GB, a.getUsedResources().getMemory()); - assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1418,7 +1418,7 @@ public void testStolenReservedContainer() throws Exception { - doReturn(-1).when(a).getNodeLocalityDelay(); - - a.assignContainers(clusterResource, node_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(10*GB, a.getUsedResources().getMemory()); - assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(4*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1435,7 +1435,7 @@ public void testStolenReservedContainer() throws Exception { - ContainerExitStatus.KILLED_BY_RESOURCEMANAGER), - RMContainerEventType.KILL, null, true); - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(8*GB, a.getUsedResources().getMemory()); - assertEquals(0*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(8*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1504,7 +1504,7 @@ public void testReservationExchange() throws Exception { - - // Only 1 container - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(1*GB, a.getUsedResources().getMemory()); - assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1512,14 +1512,14 @@ public void testReservationExchange() throws Exception { - // Also 2nd -> minCapacity = 1024 since (.1 * 8G) < minAlloc, also - // you can get one container more than user-limit - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(2*GB, a.getUsedResources().getMemory()); - assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); - - // Now, reservation should kick in for app_1 - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(6*GB, a.getUsedResources().getMemory()); - assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1534,7 +1534,7 @@ public void testReservationExchange() throws Exception { - ContainerExitStatus.KILLED_BY_RESOURCEMANAGER), - RMContainerEventType.KILL, null, true); - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(5*GB, a.getUsedResources().getMemory()); - assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1544,7 +1544,7 @@ public void testReservationExchange() throws Exception { - - // Re-reserve - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(5*GB, a.getUsedResources().getMemory()); - assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1554,7 +1554,7 @@ public void testReservationExchange() throws Exception { - - // Try to schedule on node_1 now, should *move* the reservation - a.assignContainers(clusterResource, node_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(9*GB, a.getUsedResources().getMemory()); - assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(4*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1572,7 +1572,7 @@ public void testReservationExchange() throws Exception { - ContainerExitStatus.KILLED_BY_RESOURCEMANAGER), - RMContainerEventType.KILL, null, true); - CSAssignment assignment = a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(8*GB, a.getUsedResources().getMemory()); - assertEquals(0*GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(4*GB, app_1.getCurrentConsumption().getMemory()); -@@ -1644,7 +1644,7 @@ public void testLocalityScheduling() throws Exception { - - // Start with off switch, shouldn't allocate due to delay scheduling - assignment = a.assignContainers(clusterResource, node_2, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verify(app_0, never()).allocate(any(NodeType.class), eq(node_2), - any(Priority.class), any(ResourceRequest.class), any(Container.class)); - assertEquals(1, app_0.getSchedulingOpportunities(priority)); -@@ -1653,7 +1653,7 @@ public void testLocalityScheduling() throws Exception { - - // Another off switch, shouldn't allocate due to delay scheduling - assignment = a.assignContainers(clusterResource, node_2, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verify(app_0, never()).allocate(any(NodeType.class), eq(node_2), - any(Priority.class), any(ResourceRequest.class), any(Container.class)); - assertEquals(2, app_0.getSchedulingOpportunities(priority)); -@@ -1662,7 +1662,7 @@ public void testLocalityScheduling() throws Exception { - - // Another off switch, shouldn't allocate due to delay scheduling - assignment = a.assignContainers(clusterResource, node_2, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verify(app_0, never()).allocate(any(NodeType.class), eq(node_2), - any(Priority.class), any(ResourceRequest.class), any(Container.class)); - assertEquals(3, app_0.getSchedulingOpportunities(priority)); -@@ -1672,7 +1672,7 @@ public void testLocalityScheduling() throws Exception { - // Another off switch, now we should allocate - // since missedOpportunities=3 and reqdContainers=3 - assignment = a.assignContainers(clusterResource, node_2, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verify(app_0).allocate(eq(NodeType.OFF_SWITCH), eq(node_2), - any(Priority.class), any(ResourceRequest.class), any(Container.class)); - assertEquals(4, app_0.getSchedulingOpportunities(priority)); // should NOT reset -@@ -1681,7 +1681,7 @@ public void testLocalityScheduling() throws Exception { - - // NODE_LOCAL - node_0 - assignment = a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verify(app_0).allocate(eq(NodeType.NODE_LOCAL), eq(node_0), - any(Priority.class), any(ResourceRequest.class), any(Container.class)); - assertEquals(0, app_0.getSchedulingOpportunities(priority)); // should reset -@@ -1690,7 +1690,7 @@ public void testLocalityScheduling() throws Exception { - - // NODE_LOCAL - node_1 - assignment = a.assignContainers(clusterResource, node_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verify(app_0).allocate(eq(NodeType.NODE_LOCAL), eq(node_1), - any(Priority.class), any(ResourceRequest.class), any(Container.class)); - assertEquals(0, app_0.getSchedulingOpportunities(priority)); // should reset -@@ -1719,14 +1719,14 @@ public void testLocalityScheduling() throws Exception { - - // Shouldn't assign RACK_LOCAL yet - assignment = a.assignContainers(clusterResource, node_3, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(1, app_0.getSchedulingOpportunities(priority)); - assertEquals(2, app_0.getTotalRequiredResources(priority)); - assertEquals(NodeType.NODE_LOCAL, assignment.getType()); // None->NODE_LOCAL - - // Should assign RACK_LOCAL now - assignment = a.assignContainers(clusterResource, node_3, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verify(app_0).allocate(eq(NodeType.RACK_LOCAL), eq(node_3), - any(Priority.class), any(ResourceRequest.class), any(Container.class)); - assertEquals(0, app_0.getSchedulingOpportunities(priority)); // should reset -@@ -1808,7 +1808,7 @@ public void testApplicationPriorityScheduling() throws Exception { - // Start with off switch, shouldn't allocate P1 due to delay scheduling - // thus, no P2 either! - a.assignContainers(clusterResource, node_2, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verify(app_0, never()).allocate(any(NodeType.class), eq(node_2), - eq(priority_1), any(ResourceRequest.class), any(Container.class)); - assertEquals(1, app_0.getSchedulingOpportunities(priority_1)); -@@ -1821,7 +1821,7 @@ public void testApplicationPriorityScheduling() throws Exception { - // Another off-switch, shouldn't allocate P1 due to delay scheduling - // thus, no P2 either! - a.assignContainers(clusterResource, node_2, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verify(app_0, never()).allocate(any(NodeType.class), eq(node_2), - eq(priority_1), any(ResourceRequest.class), any(Container.class)); - assertEquals(2, app_0.getSchedulingOpportunities(priority_1)); -@@ -1833,7 +1833,7 @@ public void testApplicationPriorityScheduling() throws Exception { - - // Another off-switch, shouldn't allocate OFF_SWITCH P1 - a.assignContainers(clusterResource, node_2, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verify(app_0).allocate(eq(NodeType.OFF_SWITCH), eq(node_2), - eq(priority_1), any(ResourceRequest.class), any(Container.class)); - assertEquals(3, app_0.getSchedulingOpportunities(priority_1)); -@@ -1845,7 +1845,7 @@ public void testApplicationPriorityScheduling() throws Exception { - - // Now, DATA_LOCAL for P1 - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verify(app_0).allocate(eq(NodeType.NODE_LOCAL), eq(node_0), - eq(priority_1), any(ResourceRequest.class), any(Container.class)); - assertEquals(0, app_0.getSchedulingOpportunities(priority_1)); -@@ -1857,7 +1857,7 @@ public void testApplicationPriorityScheduling() throws Exception { - - // Now, OFF_SWITCH for P2 - a.assignContainers(clusterResource, node_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verify(app_0, never()).allocate(any(NodeType.class), eq(node_1), - eq(priority_1), any(ResourceRequest.class), any(Container.class)); - assertEquals(0, app_0.getSchedulingOpportunities(priority_1)); -@@ -1934,7 +1934,7 @@ public void testSchedulingConstraints() throws Exception { - - // NODE_LOCAL - node_0_1 - a.assignContainers(clusterResource, node_0_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verify(app_0).allocate(eq(NodeType.NODE_LOCAL), eq(node_0_0), - any(Priority.class), any(ResourceRequest.class), any(Container.class)); - assertEquals(0, app_0.getSchedulingOpportunities(priority)); // should reset -@@ -1943,7 +1943,7 @@ public void testSchedulingConstraints() throws Exception { - // No allocation on node_1_0 even though it's node/rack local since - // required(ANY) == 0 - a.assignContainers(clusterResource, node_1_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verify(app_0, never()).allocate(any(NodeType.class), eq(node_1_0), - any(Priority.class), any(ResourceRequest.class), any(Container.class)); - assertEquals(0, app_0.getSchedulingOpportunities(priority)); // Still zero -@@ -1960,7 +1960,7 @@ public void testSchedulingConstraints() throws Exception { - // No allocation on node_0_1 even though it's node/rack local since - // required(rack_1) == 0 - a.assignContainers(clusterResource, node_0_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verify(app_0, never()).allocate(any(NodeType.class), eq(node_1_0), - any(Priority.class), any(ResourceRequest.class), any(Container.class)); - assertEquals(1, app_0.getSchedulingOpportunities(priority)); -@@ -1968,7 +1968,7 @@ public void testSchedulingConstraints() throws Exception { - - // NODE_LOCAL - node_1 - a.assignContainers(clusterResource, node_1_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verify(app_0).allocate(eq(NodeType.NODE_LOCAL), eq(node_1_0), - any(Priority.class), any(ResourceRequest.class), any(Container.class)); - assertEquals(0, app_0.getSchedulingOpportunities(priority)); // should reset -@@ -2221,7 +2221,7 @@ public void testLocalityConstraints() throws Exception { - // node_0_1 - // Shouldn't allocate since RR(rack_0) = null && RR(ANY) = relax: false - a.assignContainers(clusterResource, node_0_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verify(app_0, never()).allocate(any(NodeType.class), eq(node_0_1), - any(Priority.class), any(ResourceRequest.class), any(Container.class)); - assertEquals(0, app_0.getSchedulingOpportunities(priority)); // should be 0 -@@ -2244,7 +2244,7 @@ public void testLocalityConstraints() throws Exception { - // node_1_1 - // Shouldn't allocate since RR(rack_1) = relax: false - a.assignContainers(clusterResource, node_1_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verify(app_0, never()).allocate(any(NodeType.class), eq(node_0_1), - any(Priority.class), any(ResourceRequest.class), any(Container.class)); - assertEquals(0, app_0.getSchedulingOpportunities(priority)); // should be 0 -@@ -2275,7 +2275,7 @@ public void testLocalityConstraints() throws Exception { - // node_1_1 - // Shouldn't allocate since node_1_1 is blacklisted - a.assignContainers(clusterResource, node_1_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verify(app_0, never()).allocate(any(NodeType.class), eq(node_1_1), - any(Priority.class), any(ResourceRequest.class), any(Container.class)); - assertEquals(0, app_0.getSchedulingOpportunities(priority)); // should be 0 -@@ -2304,7 +2304,7 @@ public void testLocalityConstraints() throws Exception { - // node_1_1 - // Shouldn't allocate since rack_1 is blacklisted - a.assignContainers(clusterResource, node_1_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verify(app_0, never()).allocate(any(NodeType.class), eq(node_1_1), - any(Priority.class), any(ResourceRequest.class), any(Container.class)); - assertEquals(0, app_0.getSchedulingOpportunities(priority)); // should be 0 -@@ -2331,7 +2331,7 @@ public void testLocalityConstraints() throws Exception { - - // Now, should allocate since RR(rack_1) = relax: true - a.assignContainers(clusterResource, node_1_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verify(app_0,never()).allocate(eq(NodeType.RACK_LOCAL), eq(node_1_1), - any(Priority.class), any(ResourceRequest.class), any(Container.class)); - assertEquals(0, app_0.getSchedulingOpportunities(priority)); -@@ -2362,7 +2362,7 @@ public void testLocalityConstraints() throws Exception { - // host_1_1: 7G - - a.assignContainers(clusterResource, node_1_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verify(app_0).allocate(eq(NodeType.NODE_LOCAL), eq(node_1_0), - any(Priority.class), any(ResourceRequest.class), any(Container.class)); - assertEquals(0, app_0.getSchedulingOpportunities(priority)); -@@ -2445,7 +2445,7 @@ public void testAllocateContainerOnNodeWithoutOffSwitchSpecified() - - try { - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - } catch (NullPointerException e) { - Assert.fail(""NPE when allocating container on node but "" - + ""forget to set off-switch request should be handled""); -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestNodeLabelContainerAllocation.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestNodeLabelContainerAllocation.java -new file mode 100644 -index 0000000000000..cf1b26f37e9cf ---- /dev/null -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestNodeLabelContainerAllocation.java -@@ -0,0 +1,1027 @@ -+/** -+ * Licensed to the Apache Software Foundation (ASF) under one -+ * or more contributor license agreements. See the NOTICE file -+ * distributed with this work for additional information -+ * regarding copyright ownership. The ASF licenses this file -+ * to you under the Apache License, Version 2.0 (the -+ * ""License""); you may not use this file except in compliance -+ * with the License. You may obtain a copy of the License at -+ * -+ * http://www.apache.org/licenses/LICENSE-2.0 -+ * -+ * Unless required by applicable law or agreed to in writing, software -+ * distributed under the License is distributed on an ""AS IS"" BASIS, -+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -+ * See the License for the specific language governing permissions and -+ * limitations under the License. -+ */ -+ -+package org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity; -+ -+import java.util.ArrayList; -+import java.util.Arrays; -+import java.util.HashSet; -+import java.util.Set; -+ -+import org.apache.hadoop.conf.Configuration; -+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -+import org.apache.hadoop.yarn.api.records.ContainerId; -+import org.apache.hadoop.yarn.api.records.NodeId; -+import org.apache.hadoop.yarn.api.records.NodeLabel; -+import org.apache.hadoop.yarn.api.records.Priority; -+import org.apache.hadoop.yarn.api.records.ResourceRequest; -+import org.apache.hadoop.yarn.conf.YarnConfiguration; -+import org.apache.hadoop.yarn.server.resourcemanager.MockAM; -+import org.apache.hadoop.yarn.server.resourcemanager.MockNM; -+import org.apache.hadoop.yarn.server.resourcemanager.MockRM; -+import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager; -+import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.NullRMNodeLabelsManager; -+import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; -+import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; -+import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; -+import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerState; -+import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; -+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler; -+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerAppReport; -+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNode; -+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler; -+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerApp; -+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent; -+import org.junit.Assert; -+import org.junit.Before; -+import org.junit.Test; -+ -+import com.google.common.collect.ImmutableMap; -+import com.google.common.collect.ImmutableSet; -+import com.google.common.collect.Sets; -+ -+public class TestNodeLabelContainerAllocation { -+ private final int GB = 1024; -+ -+ private YarnConfiguration conf; -+ -+ RMNodeLabelsManager mgr; -+ -+ @Before -+ public void setUp() throws Exception { -+ conf = new YarnConfiguration(); -+ conf.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class, -+ ResourceScheduler.class); -+ mgr = new NullRMNodeLabelsManager(); -+ mgr.init(conf); -+ } -+ -+ private Configuration getConfigurationWithQueueLabels(Configuration config) { -+ CapacitySchedulerConfiguration conf = -+ new CapacitySchedulerConfiguration(config); -+ -+ // Define top-level queues -+ conf.setQueues(CapacitySchedulerConfiguration.ROOT, new String[] {""a"", ""b"", ""c""}); -+ conf.setCapacityByLabel(CapacitySchedulerConfiguration.ROOT, ""x"", 100); -+ conf.setCapacityByLabel(CapacitySchedulerConfiguration.ROOT, ""y"", 100); -+ -+ final String A = CapacitySchedulerConfiguration.ROOT + "".a""; -+ conf.setCapacity(A, 10); -+ conf.setMaximumCapacity(A, 15); -+ conf.setAccessibleNodeLabels(A, toSet(""x"")); -+ conf.setCapacityByLabel(A, ""x"", 100); -+ -+ final String B = CapacitySchedulerConfiguration.ROOT + "".b""; -+ conf.setCapacity(B, 20); -+ conf.setAccessibleNodeLabels(B, toSet(""y"")); -+ conf.setCapacityByLabel(B, ""y"", 100); -+ -+ final String C = CapacitySchedulerConfiguration.ROOT + "".c""; -+ conf.setCapacity(C, 70); -+ conf.setMaximumCapacity(C, 70); -+ conf.setAccessibleNodeLabels(C, RMNodeLabelsManager.EMPTY_STRING_SET); -+ -+ // Define 2nd-level queues -+ final String A1 = A + "".a1""; -+ conf.setQueues(A, new String[] {""a1""}); -+ conf.setCapacity(A1, 100); -+ conf.setMaximumCapacity(A1, 100); -+ conf.setCapacityByLabel(A1, ""x"", 100); -+ -+ final String B1 = B + "".b1""; -+ conf.setQueues(B, new String[] {""b1""}); -+ conf.setCapacity(B1, 100); -+ conf.setMaximumCapacity(B1, 100); -+ conf.setCapacityByLabel(B1, ""y"", 100); -+ -+ final String C1 = C + "".c1""; -+ conf.setQueues(C, new String[] {""c1""}); -+ conf.setCapacity(C1, 100); -+ conf.setMaximumCapacity(C1, 100); -+ -+ return conf; -+ } -+ -+ private void checkTaskContainersHost(ApplicationAttemptId attemptId, -+ ContainerId containerId, ResourceManager rm, String host) { -+ YarnScheduler scheduler = rm.getRMContext().getScheduler(); -+ SchedulerAppReport appReport = scheduler.getSchedulerAppInfo(attemptId); -+ -+ Assert.assertTrue(appReport.getLiveContainers().size() > 0); -+ for (RMContainer c : appReport.getLiveContainers()) { -+ if (c.getContainerId().equals(containerId)) { -+ Assert.assertEquals(host, c.getAllocatedNode().getHost()); -+ } -+ } -+ } -+ -+ @SuppressWarnings(""unchecked"") -+ private Set toSet(E... elements) { -+ Set set = Sets.newHashSet(elements); -+ return set; -+ } -+ -+ -+ @Test (timeout = 300000) -+ public void testContainerAllocationWithSingleUserLimits() throws Exception { -+ final RMNodeLabelsManager mgr = new NullRMNodeLabelsManager(); -+ mgr.init(conf); -+ -+ // set node -> label -+ mgr.addToCluserNodeLabels(ImmutableSet.of(""x"", ""y"")); -+ mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance(""h1"", 0), toSet(""x""), -+ NodeId.newInstance(""h2"", 0), toSet(""y""))); -+ -+ // inject node label manager -+ MockRM rm1 = new MockRM(TestUtils.getConfigurationWithDefaultQueueLabels(conf)) { -+ @Override -+ public RMNodeLabelsManager createNodeLabelManager() { -+ return mgr; -+ } -+ }; -+ -+ rm1.getRMContext().setNodeLabelManager(mgr); -+ rm1.start(); -+ MockNM nm1 = rm1.registerNode(""h1:1234"", 8000); // label = x -+ rm1.registerNode(""h2:1234"", 8000); // label = y -+ MockNM nm3 = rm1.registerNode(""h3:1234"", 8000); // label = -+ -+ // launch an app to queue a1 (label = x), and check all container will -+ // be allocated in h1 -+ RMApp app1 = rm1.submitApp(200, ""app"", ""user"", null, ""a1""); -+ MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1); -+ -+ // A has only 10% of x, so it can only allocate one container in label=empty -+ ContainerId containerId = -+ ContainerId.newContainerId(am1.getApplicationAttemptId(), 2); -+ am1.allocate(""*"", 1024, 1, new ArrayList(), """"); -+ Assert.assertTrue(rm1.waitForState(nm3, containerId, -+ RMContainerState.ALLOCATED, 10 * 1000)); -+ // Cannot allocate 2nd label=empty container -+ containerId = -+ ContainerId.newContainerId(am1.getApplicationAttemptId(), 3); -+ am1.allocate(""*"", 1024, 1, new ArrayList(), """"); -+ Assert.assertFalse(rm1.waitForState(nm3, containerId, -+ RMContainerState.ALLOCATED, 10 * 1000)); -+ -+ // A has default user limit = 100, so it can use all resource in label = x -+ // We can allocate floor(8000 / 1024) = 7 containers -+ for (int id = 3; id <= 8; id++) { -+ containerId = -+ ContainerId.newContainerId(am1.getApplicationAttemptId(), id); -+ am1.allocate(""*"", 1024, 1, new ArrayList(), ""x""); -+ Assert.assertTrue(rm1.waitForState(nm1, containerId, -+ RMContainerState.ALLOCATED, 10 * 1000)); -+ } -+ rm1.close(); -+ } -+ -+ @Test(timeout = 300000) -+ public void testContainerAllocateWithComplexLabels() throws Exception { -+ /* -+ * Queue structure: -+ * root (*) -+ * ________________ -+ * / \ -+ * a x(100%), y(50%) b y(50%), z(100%) -+ * ________________ ______________ -+ * / / \ -+ * a1 (x,y) b1(no) b2(y,z) -+ * 100% y = 100%, z = 100% -+ * -+ * Node structure: -+ * h1 : x -+ * h2 : y -+ * h3 : y -+ * h4 : z -+ * h5 : NO -+ * -+ * Total resource: -+ * x: 4G -+ * y: 6G -+ * z: 2G -+ * *: 2G -+ * -+ * Resource of -+ * a1: x=4G, y=3G, NO=0.2G -+ * b1: NO=0.9G (max=1G) -+ * b2: y=3, z=2G, NO=0.9G (max=1G) -+ * -+ * Each node can only allocate two containers -+ */ -+ -+ // set node -> label -+ mgr.addToCluserNodeLabels(ImmutableSet.of(""x"", ""y"", ""z"")); -+ mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance(""h1"", 0), -+ toSet(""x""), NodeId.newInstance(""h2"", 0), toSet(""y""), -+ NodeId.newInstance(""h3"", 0), toSet(""y""), NodeId.newInstance(""h4"", 0), -+ toSet(""z""), NodeId.newInstance(""h5"", 0), -+ RMNodeLabelsManager.EMPTY_STRING_SET)); -+ -+ // inject node label manager -+ MockRM rm1 = new MockRM(TestUtils.getComplexConfigurationWithQueueLabels(conf)) { -+ @Override -+ public RMNodeLabelsManager createNodeLabelManager() { -+ return mgr; -+ } -+ }; -+ -+ rm1.getRMContext().setNodeLabelManager(mgr); -+ rm1.start(); -+ MockNM nm1 = rm1.registerNode(""h1:1234"", 2048); -+ MockNM nm2 = rm1.registerNode(""h2:1234"", 2048); -+ MockNM nm3 = rm1.registerNode(""h3:1234"", 2048); -+ MockNM nm4 = rm1.registerNode(""h4:1234"", 2048); -+ MockNM nm5 = rm1.registerNode(""h5:1234"", 2048); -+ -+ ContainerId containerId; -+ -+ // launch an app to queue a1 (label = x), and check all container will -+ // be allocated in h1 -+ RMApp app1 = rm1.submitApp(1024, ""app"", ""user"", null, ""a1""); -+ MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1); -+ -+ // request a container (label = y). can be allocated on nm2 -+ am1.allocate(""*"", 1024, 1, new ArrayList(), ""y""); -+ containerId = -+ ContainerId.newContainerId(am1.getApplicationAttemptId(), 2L); -+ Assert.assertTrue(rm1.waitForState(nm2, containerId, -+ RMContainerState.ALLOCATED, 10 * 1000)); -+ checkTaskContainersHost(am1.getApplicationAttemptId(), containerId, rm1, -+ ""h2""); -+ -+ // launch an app to queue b1 (label = y), and check all container will -+ // be allocated in h5 -+ RMApp app2 = rm1.submitApp(1024, ""app"", ""user"", null, ""b1""); -+ MockAM am2 = MockRM.launchAndRegisterAM(app2, rm1, nm5); -+ -+ // request a container for AM, will succeed -+ // and now b1's queue capacity will be used, cannot allocate more containers -+ // (Maximum capacity reached) -+ am2.allocate(""*"", 1024, 1, new ArrayList()); -+ containerId = ContainerId.newContainerId(am2.getApplicationAttemptId(), 2); -+ Assert.assertFalse(rm1.waitForState(nm4, containerId, -+ RMContainerState.ALLOCATED, 10 * 1000)); -+ Assert.assertFalse(rm1.waitForState(nm5, containerId, -+ RMContainerState.ALLOCATED, 10 * 1000)); -+ -+ // launch an app to queue b2 -+ RMApp app3 = rm1.submitApp(1024, ""app"", ""user"", null, ""b2""); -+ MockAM am3 = MockRM.launchAndRegisterAM(app3, rm1, nm5); -+ -+ // request a container. try to allocate on nm1 (label = x) and nm3 (label = -+ // y,z). Will successfully allocate on nm3 -+ am3.allocate(""*"", 1024, 1, new ArrayList(), ""y""); -+ containerId = ContainerId.newContainerId(am3.getApplicationAttemptId(), 2); -+ Assert.assertFalse(rm1.waitForState(nm1, containerId, -+ RMContainerState.ALLOCATED, 10 * 1000)); -+ Assert.assertTrue(rm1.waitForState(nm3, containerId, -+ RMContainerState.ALLOCATED, 10 * 1000)); -+ checkTaskContainersHost(am3.getApplicationAttemptId(), containerId, rm1, -+ ""h3""); -+ -+ // try to allocate container (request label = z) on nm4 (label = y,z). -+ // Will successfully allocate on nm4 only. -+ am3.allocate(""*"", 1024, 1, new ArrayList(), ""z""); -+ containerId = ContainerId.newContainerId(am3.getApplicationAttemptId(), 3L); -+ Assert.assertTrue(rm1.waitForState(nm4, containerId, -+ RMContainerState.ALLOCATED, 10 * 1000)); -+ checkTaskContainersHost(am3.getApplicationAttemptId(), containerId, rm1, -+ ""h4""); -+ -+ rm1.close(); -+ } -+ -+ @Test (timeout = 120000) -+ public void testContainerAllocateWithLabels() throws Exception { -+ // set node -> label -+ mgr.addToCluserNodeLabels(ImmutableSet.of(""x"", ""y"")); -+ mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance(""h1"", 0), toSet(""x""), -+ NodeId.newInstance(""h2"", 0), toSet(""y""))); -+ -+ // inject node label manager -+ MockRM rm1 = new MockRM(getConfigurationWithQueueLabels(conf)) { -+ @Override -+ public RMNodeLabelsManager createNodeLabelManager() { -+ return mgr; -+ } -+ }; -+ -+ rm1.getRMContext().setNodeLabelManager(mgr); -+ rm1.start(); -+ MockNM nm1 = rm1.registerNode(""h1:1234"", 8000); // label = x -+ MockNM nm2 = rm1.registerNode(""h2:1234"", 8000); // label = y -+ MockNM nm3 = rm1.registerNode(""h3:1234"", 8000); // label = -+ -+ ContainerId containerId; -+ -+ // launch an app to queue a1 (label = x), and check all container will -+ // be allocated in h1 -+ RMApp app1 = rm1.submitApp(200, ""app"", ""user"", null, ""a1""); -+ MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm3); -+ -+ // request a container. -+ am1.allocate(""*"", 1024, 1, new ArrayList(), ""x""); -+ containerId = -+ ContainerId.newContainerId(am1.getApplicationAttemptId(), 2); -+ Assert.assertFalse(rm1.waitForState(nm2, containerId, -+ RMContainerState.ALLOCATED, 10 * 1000)); -+ Assert.assertTrue(rm1.waitForState(nm1, containerId, -+ RMContainerState.ALLOCATED, 10 * 1000)); -+ checkTaskContainersHost(am1.getApplicationAttemptId(), containerId, rm1, -+ ""h1""); -+ -+ // launch an app to queue b1 (label = y), and check all container will -+ // be allocated in h2 -+ RMApp app2 = rm1.submitApp(200, ""app"", ""user"", null, ""b1""); -+ MockAM am2 = MockRM.launchAndRegisterAM(app2, rm1, nm3); -+ -+ // request a container. -+ am2.allocate(""*"", 1024, 1, new ArrayList(), ""y""); -+ containerId = ContainerId.newContainerId(am2.getApplicationAttemptId(), 2); -+ Assert.assertFalse(rm1.waitForState(nm1, containerId, -+ RMContainerState.ALLOCATED, 10 * 1000)); -+ Assert.assertTrue(rm1.waitForState(nm2, containerId, -+ RMContainerState.ALLOCATED, 10 * 1000)); -+ checkTaskContainersHost(am2.getApplicationAttemptId(), containerId, rm1, -+ ""h2""); -+ -+ // launch an app to queue c1 (label = """"), and check all container will -+ // be allocated in h3 -+ RMApp app3 = rm1.submitApp(200, ""app"", ""user"", null, ""c1""); -+ MockAM am3 = MockRM.launchAndRegisterAM(app3, rm1, nm3); -+ -+ // request a container. -+ am3.allocate(""*"", 1024, 1, new ArrayList()); -+ containerId = ContainerId.newContainerId(am3.getApplicationAttemptId(), 2); -+ Assert.assertFalse(rm1.waitForState(nm2, containerId, -+ RMContainerState.ALLOCATED, 10 * 1000)); -+ Assert.assertTrue(rm1.waitForState(nm3, containerId, -+ RMContainerState.ALLOCATED, 10 * 1000)); -+ checkTaskContainersHost(am3.getApplicationAttemptId(), containerId, rm1, -+ ""h3""); -+ -+ rm1.close(); -+ } -+ -+ @Test (timeout = 120000) -+ public void testContainerAllocateWithDefaultQueueLabels() throws Exception { -+ // This test is pretty much similar to testContainerAllocateWithLabel. -+ // Difference is, this test doesn't specify label expression in ResourceRequest, -+ // instead, it uses default queue label expression -+ -+ // set node -> label -+ mgr.addToCluserNodeLabels(ImmutableSet.of(""x"", ""y"")); -+ mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance(""h1"", 0), toSet(""x""), -+ NodeId.newInstance(""h2"", 0), toSet(""y""))); -+ -+ // inject node label manager -+ MockRM rm1 = new MockRM(TestUtils.getConfigurationWithDefaultQueueLabels(conf)) { -+ @Override -+ public RMNodeLabelsManager createNodeLabelManager() { -+ return mgr; -+ } -+ }; -+ -+ rm1.getRMContext().setNodeLabelManager(mgr); -+ rm1.start(); -+ MockNM nm1 = rm1.registerNode(""h1:1234"", 8000); // label = x -+ MockNM nm2 = rm1.registerNode(""h2:1234"", 8000); // label = y -+ MockNM nm3 = rm1.registerNode(""h3:1234"", 8000); // label = -+ -+ ContainerId containerId; -+ -+ // launch an app to queue a1 (label = x), and check all container will -+ // be allocated in h1 -+ RMApp app1 = rm1.submitApp(200, ""app"", ""user"", null, ""a1""); -+ MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1); -+ -+ // request a container. -+ am1.allocate(""*"", 1024, 1, new ArrayList()); -+ containerId = -+ ContainerId.newContainerId(am1.getApplicationAttemptId(), 2); -+ Assert.assertFalse(rm1.waitForState(nm3, containerId, -+ RMContainerState.ALLOCATED, 10 * 1000)); -+ Assert.assertTrue(rm1.waitForState(nm1, containerId, -+ RMContainerState.ALLOCATED, 10 * 1000)); -+ checkTaskContainersHost(am1.getApplicationAttemptId(), containerId, rm1, -+ ""h1""); -+ -+ // launch an app to queue b1 (label = y), and check all container will -+ // be allocated in h2 -+ RMApp app2 = rm1.submitApp(200, ""app"", ""user"", null, ""b1""); -+ MockAM am2 = MockRM.launchAndRegisterAM(app2, rm1, nm2); -+ -+ // request a container. -+ am2.allocate(""*"", 1024, 1, new ArrayList()); -+ containerId = ContainerId.newContainerId(am2.getApplicationAttemptId(), 2); -+ Assert.assertFalse(rm1.waitForState(nm3, containerId, -+ RMContainerState.ALLOCATED, 10 * 1000)); -+ Assert.assertTrue(rm1.waitForState(nm2, containerId, -+ RMContainerState.ALLOCATED, 10 * 1000)); -+ checkTaskContainersHost(am2.getApplicationAttemptId(), containerId, rm1, -+ ""h2""); -+ -+ // launch an app to queue c1 (label = """"), and check all container will -+ // be allocated in h3 -+ RMApp app3 = rm1.submitApp(200, ""app"", ""user"", null, ""c1""); -+ MockAM am3 = MockRM.launchAndRegisterAM(app3, rm1, nm3); -+ -+ // request a container. -+ am3.allocate(""*"", 1024, 1, new ArrayList()); -+ containerId = ContainerId.newContainerId(am3.getApplicationAttemptId(), 2); -+ Assert.assertFalse(rm1.waitForState(nm2, containerId, -+ RMContainerState.ALLOCATED, 10 * 1000)); -+ Assert.assertTrue(rm1.waitForState(nm3, containerId, -+ RMContainerState.ALLOCATED, 10 * 1000)); -+ checkTaskContainersHost(am3.getApplicationAttemptId(), containerId, rm1, -+ ""h3""); -+ -+ rm1.close(); -+ } -+ -+ private void checkPendingResource(MockRM rm, int priority, -+ ApplicationAttemptId attemptId, int memory) { -+ CapacityScheduler cs = (CapacityScheduler) rm.getRMContext().getScheduler(); -+ FiCaSchedulerApp app = cs.getApplicationAttempt(attemptId); -+ ResourceRequest rr = -+ app.getAppSchedulingInfo().getResourceRequest( -+ Priority.newInstance(priority), ""*""); -+ Assert.assertEquals(memory, -+ rr.getCapability().getMemory() * rr.getNumContainers()); -+ } -+ -+ private void checkLaunchedContainerNumOnNode(MockRM rm, NodeId nodeId, -+ int numContainers) { -+ CapacityScheduler cs = (CapacityScheduler) rm.getRMContext().getScheduler(); -+ SchedulerNode node = cs.getSchedulerNode(nodeId); -+ Assert.assertEquals(numContainers, node.getNumContainers()); -+ } -+ -+ @Test -+ public void testPreferenceOfNeedyAppsTowardsNodePartitions() throws Exception { -+ /** -+ * Test case: Submit two application to a queue (app1 first then app2), app1 -+ * asked for no-label, app2 asked for label=x, when node1 has label=x -+ * doing heart beat, app2 will get allocation first, even if app2 submits later -+ * than app1 -+ */ -+ -+ // set node -> label -+ mgr.addToCluserNodeLabels(ImmutableSet.of(""x"", ""y"")); -+ // Makes y to be non-exclusive node labels -+ mgr.updateNodeLabels(Arrays.asList(NodeLabel.newInstance(""y"", false))); -+ mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance(""h1"", 0), toSet(""y""))); -+ -+ // inject node label manager -+ MockRM rm1 = new MockRM(TestUtils.getConfigurationWithQueueLabels(conf)) { -+ @Override -+ public RMNodeLabelsManager createNodeLabelManager() { -+ return mgr; -+ } -+ }; -+ -+ rm1.getRMContext().setNodeLabelManager(mgr); -+ rm1.start(); -+ MockNM nm1 = rm1.registerNode(""h1:1234"", 8 * GB); // label = y -+ MockNM nm2 = rm1.registerNode(""h2:1234"", 100 * GB); // label = -+ -+ // launch an app to queue b1 (label = y), AM container should be launched in nm2 -+ RMApp app1 = rm1.submitApp(1 * GB, ""app"", ""user"", null, ""b1""); -+ MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm2); -+ -+ // launch another app to queue b1 (label = y), AM container should be launched in nm2 -+ RMApp app2 = rm1.submitApp(1 * GB, ""app"", ""user"", null, ""b1""); -+ MockAM am2 = MockRM.launchAndRegisterAM(app2, rm1, nm2); -+ -+ // request container and nm1 do heartbeat (nm2 has label=y), note that app1 -+ // request non-labeled container, and app2 request labeled container, app2 -+ // will get allocated first even if app1 submitted first. -+ am1.allocate(""*"", 1 * GB, 8, new ArrayList()); -+ am2.allocate(""*"", 1 * GB, 8, new ArrayList(), ""y""); -+ -+ CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler(); -+ RMNode rmNode1 = rm1.getRMContext().getRMNodes().get(nm1.getNodeId()); -+ RMNode rmNode2 = rm1.getRMContext().getRMNodes().get(nm2.getNodeId()); -+ -+ // Do node heartbeats many times -+ for (int i = 0; i < 50; i++) { -+ cs.handle(new NodeUpdateSchedulerEvent(rmNode1)); -+ cs.handle(new NodeUpdateSchedulerEvent(rmNode2)); -+ } -+ -+ // App2 will get preference to be allocated on node1, and node1 will be all -+ // used by App2. -+ FiCaSchedulerApp schedulerApp1 = cs.getApplicationAttempt(am1.getApplicationAttemptId()); -+ FiCaSchedulerApp schedulerApp2 = cs.getApplicationAttempt(am2.getApplicationAttemptId()); -+ // app1 get nothing in nm1 (partition=y) -+ checkNumOfContainersInAnAppOnGivenNode(0, nm1.getNodeId(), schedulerApp1); -+ checkNumOfContainersInAnAppOnGivenNode(9, nm2.getNodeId(), schedulerApp1); -+ // app2 get all resource in nm1 (partition=y) -+ checkNumOfContainersInAnAppOnGivenNode(8, nm1.getNodeId(), schedulerApp2); -+ checkNumOfContainersInAnAppOnGivenNode(1, nm2.getNodeId(), schedulerApp2); -+ -+ rm1.close(); -+ } -+ -+ private void checkNumOfContainersInAnAppOnGivenNode(int expectedNum, -+ NodeId nodeId, FiCaSchedulerApp app) { -+ int num = 0; -+ for (RMContainer container : app.getLiveContainers()) { -+ if (container.getAllocatedNode().equals(nodeId)) { -+ num++; -+ } -+ } -+ Assert.assertEquals(expectedNum, num); -+ } -+ -+ @Test -+ public void -+ testPreferenceOfNeedyPrioritiesUnderSameAppTowardsNodePartitions() -+ throws Exception { -+ /** -+ * Test case: Submit one application, it asks label="""" in priority=1 and -+ * label=""x"" in priority=2, when a node with label=x heartbeat, priority=2 -+ * will get allocation first even if there're pending resource in priority=1 -+ */ -+ -+ // set node -> label -+ mgr.addToCluserNodeLabels(ImmutableSet.of(""x"", ""y"")); -+ // Makes y to be non-exclusive node labels -+ mgr.updateNodeLabels(Arrays.asList(NodeLabel.newInstance(""y"", false))); -+ mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance(""h1"", 0), toSet(""y""))); -+ -+ // inject node label manager -+ MockRM rm1 = new MockRM(TestUtils.getConfigurationWithQueueLabels(conf)) { -+ @Override -+ public RMNodeLabelsManager createNodeLabelManager() { -+ return mgr; -+ } -+ }; -+ -+ rm1.getRMContext().setNodeLabelManager(mgr); -+ rm1.start(); -+ MockNM nm1 = rm1.registerNode(""h1:1234"", 8 * GB); // label = y -+ MockNM nm2 = rm1.registerNode(""h2:1234"", 100 * GB); // label = -+ -+ ContainerId nextContainerId; -+ -+ // launch an app to queue b1 (label = y), AM container should be launched in nm3 -+ RMApp app1 = rm1.submitApp(1 * GB, ""app"", ""user"", null, ""b1""); -+ MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm2); -+ -+ // request containers from am2, priority=1 asks for """" and priority=2 asks -+ // for ""y"", ""y"" container should be allocated first -+ nextContainerId = -+ ContainerId.newContainerId(am1.getApplicationAttemptId(), 2); -+ am1.allocate(""*"", 1 * GB, 1, 1, new ArrayList(), """"); -+ am1.allocate(""*"", 1 * GB, 1, 2, new ArrayList(), ""y""); -+ Assert.assertTrue(rm1.waitForState(nm1, nextContainerId, -+ RMContainerState.ALLOCATED, 10 * 1000)); -+ -+ // Check pending resource for am2, priority=1 doesn't get allocated before -+ // priority=2 allocated -+ checkPendingResource(rm1, 1, am1.getApplicationAttemptId(), 1 * GB); -+ checkPendingResource(rm1, 2, am1.getApplicationAttemptId(), 0 * GB); -+ -+ rm1.close(); -+ } -+ -+ @Test -+ public void testNonLabeledResourceRequestGetPreferrenceToNonLabeledNode() -+ throws Exception { -+ /** -+ * Test case: Submit one application, it asks 6 label="""" containers, NM1 -+ * with label=y and NM2 has no label, NM1/NM2 doing heartbeat together. Even -+ * if NM1 has idle resource, containers are all allocated to NM2 since -+ * non-labeled request should get allocation on non-labeled nodes first. -+ */ -+ -+ // set node -> label -+ mgr.addToCluserNodeLabels(ImmutableSet.of(""x"", ""y"")); -+ // Makes x to be non-exclusive node labels -+ mgr.updateNodeLabels(Arrays.asList(NodeLabel.newInstance(""x"", false))); -+ mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance(""h1"", 0), toSet(""x""))); -+ -+ // inject node label manager -+ MockRM rm1 = new MockRM(TestUtils.getConfigurationWithQueueLabels(conf)) { -+ @Override -+ public RMNodeLabelsManager createNodeLabelManager() { -+ return mgr; -+ } -+ }; -+ -+ rm1.getRMContext().setNodeLabelManager(mgr); -+ rm1.start(); -+ MockNM nm1 = rm1.registerNode(""h1:1234"", 8 * GB); // label = y -+ MockNM nm2 = rm1.registerNode(""h2:1234"", 100 * GB); // label = -+ -+ ContainerId nextContainerId; -+ -+ // launch an app to queue b1 (label = y), AM container should be launched in nm3 -+ RMApp app1 = rm1.submitApp(1 * GB, ""app"", ""user"", null, ""b1""); -+ MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm2); -+ -+ // request containers from am2, priority=1 asks for """" * 6 (id from 4 to 9), -+ // nm2/nm3 do -+ // heartbeat at the same time, check containers are always allocated to nm3. -+ // This is to verify when there's resource available in non-labeled -+ // partition, non-labeled resource should allocate to non-labeled partition -+ // first. -+ am1.allocate(""*"", 1 * GB, 6, 1, new ArrayList(), """"); -+ for (int i = 2; i < 2 + 6; i++) { -+ nextContainerId = -+ ContainerId.newContainerId(am1.getApplicationAttemptId(), i); -+ Assert.assertTrue(rm1.waitForState(Arrays.asList(nm1, nm2), -+ nextContainerId, RMContainerState.ALLOCATED, 10 * 1000)); -+ } -+ // no more container allocated on nm1 -+ checkLaunchedContainerNumOnNode(rm1, nm1.getNodeId(), 0); -+ // all 7 (1 AM container + 6 task container) containers allocated on nm2 -+ checkLaunchedContainerNumOnNode(rm1, nm2.getNodeId(), 7); -+ -+ rm1.close(); -+ } -+ -+ @Test -+ public void testPreferenceOfQueuesTowardsNodePartitions() -+ throws Exception { -+ /** -+ * Test case: have a following queue structure: -+ * -+ *
-+     *            root
-+     *         /   |   \
-+     *        a     b    c
-+     *       / \   / \  /  \
-+     *      a1 a2 b1 b2 c1 c2
-+     *     (x)    (x)   (x)
-+     * 
-+ * -+ * Only a1, b1, c1 can access label=x, and their default label=x Each each -+ * has one application, asks for 5 containers. NM1 has label=x -+ * -+ * NM1/NM2 doing heartbeat for 15 times, it should allocate all 15 -+ * containers with label=x -+ */ -+ -+ CapacitySchedulerConfiguration csConf = -+ new CapacitySchedulerConfiguration(this.conf); -+ -+ // Define top-level queues -+ csConf.setQueues(CapacitySchedulerConfiguration.ROOT, new String[] {""a"", ""b"", ""c""}); -+ csConf.setCapacityByLabel(CapacitySchedulerConfiguration.ROOT, ""x"", 100); -+ -+ final String A = CapacitySchedulerConfiguration.ROOT + "".a""; -+ csConf.setCapacity(A, 33); -+ csConf.setAccessibleNodeLabels(A, toSet(""x"")); -+ csConf.setCapacityByLabel(A, ""x"", 33); -+ csConf.setQueues(A, new String[] {""a1"", ""a2""}); -+ -+ final String B = CapacitySchedulerConfiguration.ROOT + "".b""; -+ csConf.setCapacity(B, 33); -+ csConf.setAccessibleNodeLabels(B, toSet(""x"")); -+ csConf.setCapacityByLabel(B, ""x"", 33); -+ csConf.setQueues(B, new String[] {""b1"", ""b2""}); -+ -+ final String C = CapacitySchedulerConfiguration.ROOT + "".c""; -+ csConf.setCapacity(C, 34); -+ csConf.setAccessibleNodeLabels(C, toSet(""x"")); -+ csConf.setCapacityByLabel(C, ""x"", 34); -+ csConf.setQueues(C, new String[] {""c1"", ""c2""}); -+ -+ // Define 2nd-level queues -+ final String A1 = A + "".a1""; -+ csConf.setCapacity(A1, 50); -+ csConf.setCapacityByLabel(A1, ""x"", 100); -+ csConf.setDefaultNodeLabelExpression(A1, ""x""); -+ -+ final String A2 = A + "".a2""; -+ csConf.setCapacity(A2, 50); -+ csConf.setCapacityByLabel(A2, ""x"", 0); -+ -+ final String B1 = B + "".b1""; -+ csConf.setCapacity(B1, 50); -+ csConf.setCapacityByLabel(B1, ""x"", 100); -+ csConf.setDefaultNodeLabelExpression(B1, ""x""); -+ -+ final String B2 = B + "".b2""; -+ csConf.setCapacity(B2, 50); -+ csConf.setCapacityByLabel(B2, ""x"", 0); -+ -+ final String C1 = C + "".c1""; -+ csConf.setCapacity(C1, 50); -+ csConf.setCapacityByLabel(C1, ""x"", 100); -+ csConf.setDefaultNodeLabelExpression(C1, ""x""); -+ -+ final String C2 = C + "".c2""; -+ csConf.setCapacity(C2, 50); -+ csConf.setCapacityByLabel(C2, ""x"", 0); -+ -+ // set node -> label -+ mgr.addToCluserNodeLabels(ImmutableSet.of(""x"", ""y"")); -+ // Makes x to be non-exclusive node labels -+ mgr.updateNodeLabels(Arrays.asList(NodeLabel.newInstance(""x"", false))); -+ mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance(""h1"", 0), toSet(""x""))); -+ -+ // inject node label manager -+ MockRM rm1 = new MockRM(csConf) { -+ @Override -+ public RMNodeLabelsManager createNodeLabelManager() { -+ return mgr; -+ } -+ }; -+ -+ rm1.getRMContext().setNodeLabelManager(mgr); -+ rm1.start(); -+ MockNM nm1 = rm1.registerNode(""h1:1234"", 20 * GB); // label = x -+ MockNM nm2 = rm1.registerNode(""h2:1234"", 100 * GB); // label = -+ -+ // app1 -> a1 -+ RMApp app1 = rm1.submitApp(1 * GB, ""app"", ""user"", null, ""a1""); -+ MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1); -+ -+ // app2 -> a2 -+ RMApp app2 = rm1.submitApp(1 * GB, ""app"", ""user"", null, ""a2""); -+ MockAM am2 = MockRM.launchAndRegisterAM(app2, rm1, nm2); -+ -+ // app3 -> b1 -+ RMApp app3 = rm1.submitApp(1 * GB, ""app"", ""user"", null, ""b1""); -+ MockAM am3 = MockRM.launchAndRegisterAM(app3, rm1, nm1); -+ -+ // app4 -> b2 -+ RMApp app4 = rm1.submitApp(1 * GB, ""app"", ""user"", null, ""b2""); -+ MockAM am4 = MockRM.launchAndRegisterAM(app4, rm1, nm2); -+ -+ // app5 -> c1 -+ RMApp app5 = rm1.submitApp(1 * GB, ""app"", ""user"", null, ""c1""); -+ MockAM am5 = MockRM.launchAndRegisterAM(app5, rm1, nm1); -+ -+ // app6 -> b2 -+ RMApp app6 = rm1.submitApp(1 * GB, ""app"", ""user"", null, ""c2""); -+ MockAM am6 = MockRM.launchAndRegisterAM(app6, rm1, nm2); -+ -+ // Each application request 5 * 1GB container -+ am1.allocate(""*"", 1 * GB, 5, new ArrayList()); -+ am2.allocate(""*"", 1 * GB, 5, new ArrayList()); -+ am3.allocate(""*"", 1 * GB, 5, new ArrayList()); -+ am4.allocate(""*"", 1 * GB, 5, new ArrayList()); -+ am5.allocate(""*"", 1 * GB, 5, new ArrayList()); -+ am6.allocate(""*"", 1 * GB, 5, new ArrayList()); -+ -+ // NM1 do 15 heartbeats -+ CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler(); -+ RMNode rmNode1 = rm1.getRMContext().getRMNodes().get(nm1.getNodeId()); -+ for (int i = 0; i < 15; i++) { -+ cs.handle(new NodeUpdateSchedulerEvent(rmNode1)); -+ } -+ -+ // NM1 get 15 new containers (total is 18, 15 task containers and 3 AM -+ // containers) -+ checkLaunchedContainerNumOnNode(rm1, nm1.getNodeId(), 18); -+ -+ // Check pending resource each application -+ // APP1/APP3/APP5 get satisfied, and APP2/APP2/APP3 get nothing. -+ checkPendingResource(rm1, 1, am1.getApplicationAttemptId(), 0 * GB); -+ checkPendingResource(rm1, 1, am2.getApplicationAttemptId(), 5 * GB); -+ checkPendingResource(rm1, 1, am3.getApplicationAttemptId(), 0 * GB); -+ checkPendingResource(rm1, 1, am4.getApplicationAttemptId(), 5 * GB); -+ checkPendingResource(rm1, 1, am5.getApplicationAttemptId(), 0 * GB); -+ checkPendingResource(rm1, 1, am6.getApplicationAttemptId(), 5 * GB); -+ -+ rm1.close(); -+ } -+ -+ @Test -+ public void testQueuesWithoutAccessUsingPartitionedNodes() throws Exception { -+ /** -+ * Test case: have a following queue structure: -+ * -+ *
-+     *            root
-+     *         /      \
-+     *        a        b
-+     *        (x)
-+     * 
-+ * -+ * Only a can access label=x, two nodes in the cluster, n1 has x and n2 has -+ * no-label. -+ * -+ * When user-limit-factor=5, submit one application in queue b and request -+ * for infinite containers should be able to use up all cluster resources. -+ */ -+ -+ CapacitySchedulerConfiguration csConf = -+ new CapacitySchedulerConfiguration(this.conf); -+ -+ // Define top-level queues -+ csConf.setQueues(CapacitySchedulerConfiguration.ROOT, new String[] {""a"", ""b""}); -+ csConf.setCapacityByLabel(CapacitySchedulerConfiguration.ROOT, ""x"", 100); -+ -+ final String A = CapacitySchedulerConfiguration.ROOT + "".a""; -+ csConf.setCapacity(A, 50); -+ csConf.setAccessibleNodeLabels(A, toSet(""x"")); -+ csConf.setCapacityByLabel(A, ""x"", 100); -+ -+ final String B = CapacitySchedulerConfiguration.ROOT + "".b""; -+ csConf.setCapacity(B, 50); -+ csConf.setAccessibleNodeLabels(B, new HashSet()); -+ csConf.setUserLimitFactor(B, 5); -+ -+ // set node -> label -+ mgr.addToCluserNodeLabels(ImmutableSet.of(""x"")); -+ // Makes x to be non-exclusive node labels -+ mgr.updateNodeLabels(Arrays.asList(NodeLabel.newInstance(""x"", false))); -+ mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance(""h1"", 0), toSet(""x""))); -+ -+ // inject node label manager -+ MockRM rm1 = new MockRM(csConf) { -+ @Override -+ public RMNodeLabelsManager createNodeLabelManager() { -+ return mgr; -+ } -+ }; -+ -+ rm1.getRMContext().setNodeLabelManager(mgr); -+ rm1.start(); -+ MockNM nm1 = rm1.registerNode(""h1:1234"", 10 * GB); // label = x -+ MockNM nm2 = rm1.registerNode(""h2:1234"", 10 * GB); // label = -+ -+ // app1 -> b -+ RMApp app1 = rm1.submitApp(1 * GB, ""app"", ""user"", null, ""b""); -+ MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm2); -+ -+ // Each application request 5 * 1GB container -+ am1.allocate(""*"", 1 * GB, 50, new ArrayList()); -+ -+ // NM1 do 50 heartbeats -+ CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler(); -+ RMNode rmNode1 = rm1.getRMContext().getRMNodes().get(nm1.getNodeId()); -+ RMNode rmNode2 = rm1.getRMContext().getRMNodes().get(nm2.getNodeId()); -+ -+ SchedulerNode schedulerNode1 = cs.getSchedulerNode(nm1.getNodeId()); -+ -+ // How much cycles we waited to be allocated when available resource only on -+ // partitioned node -+ int cycleWaited = 0; -+ for (int i = 0; i < 50; i++) { -+ cs.handle(new NodeUpdateSchedulerEvent(rmNode1)); -+ cs.handle(new NodeUpdateSchedulerEvent(rmNode2)); -+ if (schedulerNode1.getNumContainers() == 0) { -+ cycleWaited++; -+ } -+ } -+ // We will will 10 cycles before get allocated on partitioned node -+ // NM2 can allocate 10 containers totally, exclude already allocated AM -+ // container, we will wait 9 to fulfill non-partitioned node, and need wait -+ // one more cycle before allocating to non-partitioned node -+ Assert.assertEquals(10, cycleWaited); -+ -+ // Both NM1/NM2 launched 10 containers, cluster resource is exhausted -+ checkLaunchedContainerNumOnNode(rm1, nm1.getNodeId(), 10); -+ checkLaunchedContainerNumOnNode(rm1, nm2.getNodeId(), 10); -+ -+ rm1.close(); -+ } -+ -+ @Test -+ public void testAMContainerAllocationWillAlwaysBeExclusive() -+ throws Exception { -+ /** -+ * Test case: Submit one application without partition, trying to allocate a -+ * node has partition=x, it should fail to allocate since AM container will -+ * always respect exclusivity for partitions -+ */ -+ -+ // set node -> label -+ mgr.addToCluserNodeLabels(ImmutableSet.of(""x"", ""y"")); -+ // Makes x to be non-exclusive node labels -+ mgr.updateNodeLabels(Arrays.asList(NodeLabel.newInstance(""x"", false))); -+ mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance(""h1"", 0), toSet(""x""))); -+ -+ // inject node label manager -+ MockRM rm1 = new MockRM(TestUtils.getConfigurationWithQueueLabels(conf)) { -+ @Override -+ public RMNodeLabelsManager createNodeLabelManager() { -+ return mgr; -+ } -+ }; -+ -+ rm1.getRMContext().setNodeLabelManager(mgr); -+ rm1.start(); -+ MockNM nm1 = rm1.registerNode(""h1:1234"", 8 * GB); // label = x -+ -+ // launch an app to queue b1 (label = y), AM container should be launched in nm3 -+ rm1.submitApp(1 * GB, ""app"", ""user"", null, ""b1""); -+ -+ CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler(); -+ RMNode rmNode1 = rm1.getRMContext().getRMNodes().get(nm1.getNodeId()); -+ -+ // Heartbeat for many times, app1 should get nothing -+ for (int i = 0; i < 50; i++) { -+ cs.handle(new NodeUpdateSchedulerEvent(rmNode1)); -+ } -+ -+ Assert.assertEquals(0, cs.getSchedulerNode(nm1.getNodeId()) -+ .getNumContainers()); -+ -+ rm1.close(); -+ } -+ -+ @Test -+ public void -+ testQueueMaxCapacitiesWillNotBeHonoredWhenNotRespectingExclusivity() -+ throws Exception { -+ /** -+ * Test case: have a following queue structure: -+ * -+ *
-+     *            root
-+     *         /      \
-+     *        a        b
-+     *        (x)     (x)
-+     * 
-+ * -+ * a/b can access x, both of them has max-capacity-on-x = 50 -+ * -+ * When doing non-exclusive allocation, app in a (or b) can use 100% of x -+ * resource. -+ */ -+ -+ CapacitySchedulerConfiguration csConf = -+ new CapacitySchedulerConfiguration(this.conf); -+ -+ // Define top-level queues -+ csConf.setQueues(CapacitySchedulerConfiguration.ROOT, new String[] { ""a"", -+ ""b"" }); -+ csConf.setCapacityByLabel(CapacitySchedulerConfiguration.ROOT, ""x"", 100); -+ -+ final String A = CapacitySchedulerConfiguration.ROOT + "".a""; -+ csConf.setCapacity(A, 50); -+ csConf.setAccessibleNodeLabels(A, toSet(""x"")); -+ csConf.setCapacityByLabel(A, ""x"", 50); -+ csConf.setMaximumCapacityByLabel(A, ""x"", 50); -+ -+ final String B = CapacitySchedulerConfiguration.ROOT + "".b""; -+ csConf.setCapacity(B, 50); -+ csConf.setAccessibleNodeLabels(B, toSet(""x"")); -+ csConf.setCapacityByLabel(B, ""x"", 50); -+ csConf.setMaximumCapacityByLabel(B, ""x"", 50); -+ -+ // set node -> label -+ mgr.addToCluserNodeLabels(ImmutableSet.of(""x"")); -+ // Makes x to be non-exclusive node labels -+ mgr.updateNodeLabels(Arrays.asList(NodeLabel.newInstance(""x"", false))); -+ mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance(""h1"", 0), toSet(""x""))); -+ -+ // inject node label manager -+ MockRM rm1 = new MockRM(csConf) { -+ @Override -+ public RMNodeLabelsManager createNodeLabelManager() { -+ return mgr; -+ } -+ }; -+ -+ rm1.getRMContext().setNodeLabelManager(mgr); -+ rm1.start(); -+ MockNM nm1 = rm1.registerNode(""h1:1234"", 10 * GB); // label = x -+ MockNM nm2 = rm1.registerNode(""h2:1234"", 10 * GB); // label = -+ -+ // app1 -> a -+ RMApp app1 = rm1.submitApp(1 * GB, ""app"", ""user"", null, ""a""); -+ MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm2); -+ -+ // app1 asks for 10 partition= containers -+ am1.allocate(""*"", 1 * GB, 10, new ArrayList()); -+ -+ // NM1 do 50 heartbeats -+ CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler(); -+ RMNode rmNode1 = rm1.getRMContext().getRMNodes().get(nm1.getNodeId()); -+ -+ SchedulerNode schedulerNode1 = cs.getSchedulerNode(nm1.getNodeId()); -+ -+ for (int i = 0; i < 50; i++) { -+ cs.handle(new NodeUpdateSchedulerEvent(rmNode1)); -+ } -+ -+ // app1 gets all resource in partition=x -+ Assert.assertEquals(10, schedulerNode1.getNumContainers()); -+ -+ rm1.close(); -+ } -+} -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestParentQueue.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestParentQueue.java -index 7da1c97fec0ef..52d0bc1241bed 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestParentQueue.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestParentQueue.java -@@ -23,7 +23,6 @@ - import static org.junit.Assert.assertTrue; - import static org.junit.Assert.fail; - import static org.mockito.Matchers.any; --import static org.mockito.Matchers.anyBoolean; - import static org.mockito.Matchers.eq; - import static org.mockito.Mockito.doAnswer; - import static org.mockito.Mockito.doReturn; -@@ -45,6 +44,7 @@ - import org.apache.hadoop.yarn.conf.YarnConfiguration; - import org.apache.hadoop.yarn.security.YarnAuthorizationProvider; - import org.apache.hadoop.yarn.server.resourcemanager.RMContext; -+import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; - import org.apache.hadoop.yarn.server.resourcemanager.scheduler.NodeType; - import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceLimits; - import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerApp; -@@ -146,7 +146,7 @@ public CSAssignment answer(InvocationOnMock invocation) throws Throwable { - final Resource allocatedResource = Resources.createResource(allocation); - if (queue instanceof ParentQueue) { - ((ParentQueue)queue).allocateResource(clusterResource, -- allocatedResource, null); -+ allocatedResource, RMNodeLabelsManager.NO_LABEL); - } else { - FiCaSchedulerApp app1 = getMockApplication(0, """"); - ((LeafQueue)queue).allocateResource(clusterResource, app1, -@@ -157,7 +157,7 @@ public CSAssignment answer(InvocationOnMock invocation) throws Throwable { - if (allocation > 0) { - doReturn(new CSAssignment(Resources.none(), type)).when(queue) - .assignContainers(eq(clusterResource), eq(node), -- any(ResourceLimits.class)); -+ any(ResourceLimits.class), any(SchedulingMode.class)); - - // Mock the node's resource availability - Resource available = node.getAvailableResource(); -@@ -168,7 +168,7 @@ public CSAssignment answer(InvocationOnMock invocation) throws Throwable { - return new CSAssignment(allocatedResource, type); - } - }).when(queue).assignContainers(eq(clusterResource), eq(node), -- any(ResourceLimits.class)); -+ any(ResourceLimits.class), any(SchedulingMode.class)); - } - - private float computeQueueAbsoluteUsedCapacity(CSQueue queue, -@@ -228,11 +228,16 @@ public void testSingleLevelQueues() throws Exception { - LeafQueue a = (LeafQueue)queues.get(A); - LeafQueue b = (LeafQueue)queues.get(B); - -+ a.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); -+ b.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); -+ queues.get(CapacitySchedulerConfiguration.ROOT).getQueueResourceUsage() -+ .incPending(Resources.createResource(1 * GB)); -+ - // Simulate B returning a container on node_0 - stubQueueAllocation(a, clusterResource, node_0, 0*GB); - stubQueueAllocation(b, clusterResource, node_0, 1*GB); - root.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verifyQueueMetrics(a, 0*GB, clusterResource); - verifyQueueMetrics(b, 1*GB, clusterResource); - -@@ -240,12 +245,12 @@ public void testSingleLevelQueues() throws Exception { - stubQueueAllocation(a, clusterResource, node_1, 2*GB); - stubQueueAllocation(b, clusterResource, node_1, 1*GB); - root.assignContainers(clusterResource, node_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - InOrder allocationOrder = inOrder(a, b); - allocationOrder.verify(a).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), anyResourceLimits()); -+ any(FiCaSchedulerNode.class), anyResourceLimits(), any(SchedulingMode.class)); - allocationOrder.verify(b).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), anyResourceLimits()); -+ any(FiCaSchedulerNode.class), anyResourceLimits(), any(SchedulingMode.class)); - verifyQueueMetrics(a, 2*GB, clusterResource); - verifyQueueMetrics(b, 2*GB, clusterResource); - -@@ -254,12 +259,12 @@ public void testSingleLevelQueues() throws Exception { - stubQueueAllocation(a, clusterResource, node_0, 1*GB); - stubQueueAllocation(b, clusterResource, node_0, 2*GB); - root.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - allocationOrder = inOrder(b, a); - allocationOrder.verify(b).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), anyResourceLimits()); -+ any(FiCaSchedulerNode.class), anyResourceLimits(), any(SchedulingMode.class)); - allocationOrder.verify(a).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), anyResourceLimits()); -+ any(FiCaSchedulerNode.class), anyResourceLimits(), any(SchedulingMode.class)); - verifyQueueMetrics(a, 3*GB, clusterResource); - verifyQueueMetrics(b, 4*GB, clusterResource); - -@@ -268,12 +273,12 @@ public void testSingleLevelQueues() throws Exception { - stubQueueAllocation(a, clusterResource, node_0, 0*GB); - stubQueueAllocation(b, clusterResource, node_0, 4*GB); - root.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - allocationOrder = inOrder(b, a); - allocationOrder.verify(b).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), anyResourceLimits()); -+ any(FiCaSchedulerNode.class), anyResourceLimits(), any(SchedulingMode.class)); - allocationOrder.verify(a).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), anyResourceLimits()); -+ any(FiCaSchedulerNode.class), anyResourceLimits(), any(SchedulingMode.class)); - verifyQueueMetrics(a, 3*GB, clusterResource); - verifyQueueMetrics(b, 8*GB, clusterResource); - -@@ -282,12 +287,12 @@ public void testSingleLevelQueues() throws Exception { - stubQueueAllocation(a, clusterResource, node_1, 1*GB); - stubQueueAllocation(b, clusterResource, node_1, 1*GB); - root.assignContainers(clusterResource, node_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - allocationOrder = inOrder(a, b); - allocationOrder.verify(b).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), anyResourceLimits()); -+ any(FiCaSchedulerNode.class), anyResourceLimits(), any(SchedulingMode.class)); - allocationOrder.verify(a).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), anyResourceLimits()); -+ any(FiCaSchedulerNode.class), anyResourceLimits(), any(SchedulingMode.class)); - verifyQueueMetrics(a, 4*GB, clusterResource); - verifyQueueMetrics(b, 9*GB, clusterResource); - } -@@ -448,16 +453,27 @@ public void testMultiLevelQueues() throws Exception { - - // Start testing - CSQueue a = queues.get(A); -+ a.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); - CSQueue b = queues.get(B); -+ b.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); - CSQueue c = queues.get(C); -+ c.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); - CSQueue d = queues.get(D); -+ d.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); - - CSQueue a1 = queues.get(A1); -+ a1.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); - CSQueue a2 = queues.get(A2); -+ a2.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); - - CSQueue b1 = queues.get(B1); -+ b1.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); - CSQueue b2 = queues.get(B2); -+ b2.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); - CSQueue b3 = queues.get(B3); -+ b3.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); -+ queues.get(CapacitySchedulerConfiguration.ROOT).getQueueResourceUsage() -+ .incPending(Resources.createResource(1 * GB)); - - // Simulate C returning a container on node_0 - stubQueueAllocation(a, clusterResource, node_0, 0*GB); -@@ -465,7 +481,7 @@ public void testMultiLevelQueues() throws Exception { - stubQueueAllocation(c, clusterResource, node_0, 1*GB); - stubQueueAllocation(d, clusterResource, node_0, 0*GB); - root.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verifyQueueMetrics(a, 0*GB, clusterResource); - verifyQueueMetrics(b, 0*GB, clusterResource); - verifyQueueMetrics(c, 1*GB, clusterResource); -@@ -478,7 +494,7 @@ public void testMultiLevelQueues() throws Exception { - stubQueueAllocation(b2, clusterResource, node_1, 4*GB); - stubQueueAllocation(c, clusterResource, node_1, 0*GB); - root.assignContainers(clusterResource, node_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verifyQueueMetrics(a, 0*GB, clusterResource); - verifyQueueMetrics(b, 4*GB, clusterResource); - verifyQueueMetrics(c, 1*GB, clusterResource); -@@ -490,14 +506,14 @@ public void testMultiLevelQueues() throws Exception { - stubQueueAllocation(b3, clusterResource, node_0, 2*GB); - stubQueueAllocation(c, clusterResource, node_0, 2*GB); - root.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - InOrder allocationOrder = inOrder(a, c, b); - allocationOrder.verify(a).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), anyResourceLimits()); -+ any(FiCaSchedulerNode.class), anyResourceLimits(), any(SchedulingMode.class)); - allocationOrder.verify(c).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), anyResourceLimits()); -+ any(FiCaSchedulerNode.class), anyResourceLimits(), any(SchedulingMode.class)); - allocationOrder.verify(b).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), anyResourceLimits()); -+ any(FiCaSchedulerNode.class), anyResourceLimits(), any(SchedulingMode.class)); - verifyQueueMetrics(a, 1*GB, clusterResource); - verifyQueueMetrics(b, 6*GB, clusterResource); - verifyQueueMetrics(c, 3*GB, clusterResource); -@@ -517,16 +533,16 @@ public void testMultiLevelQueues() throws Exception { - stubQueueAllocation(b1, clusterResource, node_2, 1*GB); - stubQueueAllocation(c, clusterResource, node_2, 1*GB); - root.assignContainers(clusterResource, node_2, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - allocationOrder = inOrder(a, a2, a1, b, c); - allocationOrder.verify(a).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), anyResourceLimits()); -+ any(FiCaSchedulerNode.class), anyResourceLimits(), any(SchedulingMode.class)); - allocationOrder.verify(a2).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), anyResourceLimits()); -+ any(FiCaSchedulerNode.class), anyResourceLimits(), any(SchedulingMode.class)); - allocationOrder.verify(b).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), anyResourceLimits()); -+ any(FiCaSchedulerNode.class), anyResourceLimits(), any(SchedulingMode.class)); - allocationOrder.verify(c).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), anyResourceLimits()); -+ any(FiCaSchedulerNode.class), anyResourceLimits(), any(SchedulingMode.class)); - verifyQueueMetrics(a, 3*GB, clusterResource); - verifyQueueMetrics(b, 8*GB, clusterResource); - verifyQueueMetrics(c, 4*GB, clusterResource); -@@ -622,12 +638,16 @@ public void testOffSwitchScheduling() throws Exception { - // Start testing - LeafQueue a = (LeafQueue)queues.get(A); - LeafQueue b = (LeafQueue)queues.get(B); -+ a.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); -+ b.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); -+ queues.get(CapacitySchedulerConfiguration.ROOT).getQueueResourceUsage() -+ .incPending(Resources.createResource(1 * GB)); - - // Simulate B returning a container on node_0 - stubQueueAllocation(a, clusterResource, node_0, 0*GB, NodeType.OFF_SWITCH); - stubQueueAllocation(b, clusterResource, node_0, 1*GB, NodeType.OFF_SWITCH); - root.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verifyQueueMetrics(a, 0*GB, clusterResource); - verifyQueueMetrics(b, 1*GB, clusterResource); - -@@ -636,12 +656,12 @@ public void testOffSwitchScheduling() throws Exception { - stubQueueAllocation(a, clusterResource, node_1, 2*GB, NodeType.RACK_LOCAL); - stubQueueAllocation(b, clusterResource, node_1, 1*GB, NodeType.OFF_SWITCH); - root.assignContainers(clusterResource, node_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - InOrder allocationOrder = inOrder(a, b); - allocationOrder.verify(a).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), anyResourceLimits()); -+ any(FiCaSchedulerNode.class), anyResourceLimits(), any(SchedulingMode.class)); - allocationOrder.verify(b).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), anyResourceLimits()); -+ any(FiCaSchedulerNode.class), anyResourceLimits(), any(SchedulingMode.class)); - verifyQueueMetrics(a, 2*GB, clusterResource); - verifyQueueMetrics(b, 2*GB, clusterResource); - -@@ -651,12 +671,12 @@ public void testOffSwitchScheduling() throws Exception { - stubQueueAllocation(a, clusterResource, node_0, 1*GB, NodeType.NODE_LOCAL); - stubQueueAllocation(b, clusterResource, node_0, 2*GB, NodeType.OFF_SWITCH); - root.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - allocationOrder = inOrder(b, a); - allocationOrder.verify(b).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), anyResourceLimits()); -+ any(FiCaSchedulerNode.class), anyResourceLimits(), any(SchedulingMode.class)); - allocationOrder.verify(a).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), anyResourceLimits()); -+ any(FiCaSchedulerNode.class), anyResourceLimits(), any(SchedulingMode.class)); - verifyQueueMetrics(a, 2*GB, clusterResource); - verifyQueueMetrics(b, 4*GB, clusterResource); - -@@ -691,12 +711,19 @@ public void testOffSwitchSchedulingMultiLevelQueues() throws Exception { - // Start testing - LeafQueue b3 = (LeafQueue)queues.get(B3); - LeafQueue b2 = (LeafQueue)queues.get(B2); -+ b2.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); -+ b3.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); -+ queues.get(CapacitySchedulerConfiguration.ROOT).getQueueResourceUsage() -+ .incPending(Resources.createResource(1 * GB)); -+ -+ CSQueue b = queues.get(B); -+ b.getQueueResourceUsage().incPending(Resources.createResource(1 * GB)); - - // Simulate B3 returning a container on node_0 - stubQueueAllocation(b2, clusterResource, node_0, 0*GB, NodeType.OFF_SWITCH); - stubQueueAllocation(b3, clusterResource, node_0, 1*GB, NodeType.OFF_SWITCH); - root.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - verifyQueueMetrics(b2, 0*GB, clusterResource); - verifyQueueMetrics(b3, 1*GB, clusterResource); - -@@ -705,12 +732,12 @@ public void testOffSwitchSchedulingMultiLevelQueues() throws Exception { - stubQueueAllocation(b2, clusterResource, node_1, 1*GB, NodeType.RACK_LOCAL); - stubQueueAllocation(b3, clusterResource, node_1, 1*GB, NodeType.OFF_SWITCH); - root.assignContainers(clusterResource, node_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - InOrder allocationOrder = inOrder(b2, b3); - allocationOrder.verify(b2).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), anyResourceLimits()); -+ any(FiCaSchedulerNode.class), anyResourceLimits(), any(SchedulingMode.class)); - allocationOrder.verify(b3).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), anyResourceLimits()); -+ any(FiCaSchedulerNode.class), anyResourceLimits(), any(SchedulingMode.class)); - verifyQueueMetrics(b2, 1*GB, clusterResource); - verifyQueueMetrics(b3, 2*GB, clusterResource); - -@@ -720,12 +747,12 @@ public void testOffSwitchSchedulingMultiLevelQueues() throws Exception { - stubQueueAllocation(b2, clusterResource, node_0, 1*GB, NodeType.NODE_LOCAL); - stubQueueAllocation(b3, clusterResource, node_0, 1*GB, NodeType.OFF_SWITCH); - root.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - allocationOrder = inOrder(b3, b2); - allocationOrder.verify(b3).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), anyResourceLimits()); -+ any(FiCaSchedulerNode.class), anyResourceLimits(), any(SchedulingMode.class)); - allocationOrder.verify(b2).assignContainers(eq(clusterResource), -- any(FiCaSchedulerNode.class), anyResourceLimits()); -+ any(FiCaSchedulerNode.class), anyResourceLimits(), any(SchedulingMode.class)); - verifyQueueMetrics(b2, 1*GB, clusterResource); - verifyQueueMetrics(b3, 3*GB, clusterResource); - -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestReservations.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestReservations.java -index e8a8243203365..47be61809881f 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestReservations.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestReservations.java -@@ -48,10 +48,10 @@ - import org.apache.hadoop.yarn.event.DrainDispatcher; - import org.apache.hadoop.yarn.factories.RecordFactory; - import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; --import org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager; - import org.apache.hadoop.yarn.server.resourcemanager.RMContext; - import org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter; - import org.apache.hadoop.yarn.server.resourcemanager.metrics.SystemMetricsPublisher; -+import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; - import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; - import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.ContainerAllocationExpirer; - import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; -@@ -266,7 +266,7 @@ public void testReservation() throws Exception { - // Start testing... - // Only AM - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(2 * GB, a.getUsedResources().getMemory()); - assertEquals(2 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0 * GB, a.getMetrics().getReservedMB()); -@@ -278,7 +278,7 @@ public void testReservation() throws Exception { - - // Only 1 map - simulating reduce - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(5 * GB, a.getUsedResources().getMemory()); - assertEquals(5 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0 * GB, a.getMetrics().getReservedMB()); -@@ -290,7 +290,7 @@ public void testReservation() throws Exception { - - // Only 1 map to other node - simulating reduce - a.assignContainers(clusterResource, node_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(8 * GB, a.getUsedResources().getMemory()); - assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0 * GB, a.getMetrics().getReservedMB()); -@@ -305,7 +305,7 @@ public void testReservation() throws Exception { - - // try to assign reducer (5G on node 0 and should reserve) - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(13 * GB, a.getUsedResources().getMemory()); - assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(5 * GB, a.getMetrics().getReservedMB()); -@@ -321,7 +321,7 @@ public void testReservation() throws Exception { - - // assign reducer to node 2 - a.assignContainers(clusterResource, node_2, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(18 * GB, a.getUsedResources().getMemory()); - assertEquals(13 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(5 * GB, a.getMetrics().getReservedMB()); -@@ -338,7 +338,7 @@ public void testReservation() throws Exception { - // node_1 heartbeat and unreserves from node_0 in order to allocate - // on node_1 - a.assignContainers(clusterResource, node_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(18 * GB, a.getUsedResources().getMemory()); - assertEquals(18 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0 * GB, a.getMetrics().getReservedMB()); -@@ -422,7 +422,7 @@ public void testReservationNoContinueLook() throws Exception { - // Start testing... - // Only AM - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(2 * GB, a.getUsedResources().getMemory()); - assertEquals(2 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0 * GB, a.getMetrics().getReservedMB()); -@@ -434,7 +434,7 @@ public void testReservationNoContinueLook() throws Exception { - - // Only 1 map - simulating reduce - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(5 * GB, a.getUsedResources().getMemory()); - assertEquals(5 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0 * GB, a.getMetrics().getReservedMB()); -@@ -446,7 +446,7 @@ public void testReservationNoContinueLook() throws Exception { - - // Only 1 map to other node - simulating reduce - a.assignContainers(clusterResource, node_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(8 * GB, a.getUsedResources().getMemory()); - assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0 * GB, a.getMetrics().getReservedMB()); -@@ -461,7 +461,7 @@ public void testReservationNoContinueLook() throws Exception { - - // try to assign reducer (5G on node 0 and should reserve) - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(13 * GB, a.getUsedResources().getMemory()); - assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(5 * GB, a.getMetrics().getReservedMB()); -@@ -477,7 +477,7 @@ public void testReservationNoContinueLook() throws Exception { - - // assign reducer to node 2 - a.assignContainers(clusterResource, node_2, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(18 * GB, a.getUsedResources().getMemory()); - assertEquals(13 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(5 * GB, a.getMetrics().getReservedMB()); -@@ -494,7 +494,7 @@ public void testReservationNoContinueLook() throws Exception { - // node_1 heartbeat and won't unreserve from node_0, potentially stuck - // if AM doesn't handle - a.assignContainers(clusterResource, node_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(18 * GB, a.getUsedResources().getMemory()); - assertEquals(13 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(5 * GB, a.getMetrics().getReservedMB()); -@@ -570,7 +570,7 @@ public void testAssignContainersNeedToUnreserve() throws Exception { - // Start testing... - // Only AM - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(2 * GB, a.getUsedResources().getMemory()); - assertEquals(2 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0 * GB, a.getMetrics().getReservedMB()); -@@ -581,7 +581,7 @@ public void testAssignContainersNeedToUnreserve() throws Exception { - - // Only 1 map - simulating reduce - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(5 * GB, a.getUsedResources().getMemory()); - assertEquals(5 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0 * GB, a.getMetrics().getReservedMB()); -@@ -592,7 +592,7 @@ public void testAssignContainersNeedToUnreserve() throws Exception { - - // Only 1 map to other node - simulating reduce - a.assignContainers(clusterResource, node_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(8 * GB, a.getUsedResources().getMemory()); - assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0 * GB, a.getMetrics().getReservedMB()); -@@ -606,7 +606,7 @@ public void testAssignContainersNeedToUnreserve() throws Exception { - - // try to assign reducer (5G on node 0 and should reserve) - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(13 * GB, a.getUsedResources().getMemory()); - assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(5 * GB, a.getMetrics().getReservedMB()); -@@ -621,7 +621,7 @@ public void testAssignContainersNeedToUnreserve() throws Exception { - - // could allocate but told need to unreserve first - a.assignContainers(clusterResource, node_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(13 * GB, a.getUsedResources().getMemory()); - assertEquals(13 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0 * GB, a.getMetrics().getReservedMB()); -@@ -823,7 +823,7 @@ public void testAssignToQueue() throws Exception { - // Start testing... - // Only AM - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(2 * GB, a.getUsedResources().getMemory()); - assertEquals(2 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0 * GB, a.getMetrics().getReservedMB()); -@@ -834,7 +834,7 @@ public void testAssignToQueue() throws Exception { - - // Only 1 map - simulating reduce - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(5 * GB, a.getUsedResources().getMemory()); - assertEquals(5 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0 * GB, a.getMetrics().getReservedMB()); -@@ -845,7 +845,7 @@ public void testAssignToQueue() throws Exception { - - // Only 1 map to other node - simulating reduce - a.assignContainers(clusterResource, node_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(8 * GB, a.getUsedResources().getMemory()); - assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0 * GB, a.getMetrics().getReservedMB()); -@@ -860,15 +860,16 @@ public void testAssignToQueue() throws Exception { - Resource capability = Resources.createResource(32 * GB, 0); - boolean res = - a.canAssignToThisQueue(clusterResource, -- CommonNodeLabelsManager.EMPTY_STRING_SET, new ResourceLimits( -- clusterResource), capability, Resources.none()); -+ RMNodeLabelsManager.NO_LABEL, new ResourceLimits( -+ clusterResource), capability, Resources.none(), -+ SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertFalse(res); - - // now add in reservations and make sure it continues if config set - // allocate to queue so that the potential new capacity is greater then - // absoluteMaxCapacity - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(13 * GB, a.getUsedResources().getMemory()); - assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(5 * GB, a.getMetrics().getReservedMB()); -@@ -881,16 +882,17 @@ CommonNodeLabelsManager.EMPTY_STRING_SET, new ResourceLimits( - capability = Resources.createResource(5 * GB, 0); - res = - a.canAssignToThisQueue(clusterResource, -- CommonNodeLabelsManager.EMPTY_STRING_SET, new ResourceLimits( -- clusterResource), capability, Resources -- .createResource(5 * GB)); -+ RMNodeLabelsManager.NO_LABEL, new ResourceLimits( -+ clusterResource), capability, Resources.createResource(5 * GB), -+ SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertTrue(res); - - // tell to not check reservations - res = - a.canAssignToThisQueue(clusterResource, -- CommonNodeLabelsManager.EMPTY_STRING_SET, new ResourceLimits( -- clusterResource), capability, Resources.none()); -+ RMNodeLabelsManager.NO_LABEL, new ResourceLimits( -+ clusterResource), capability, Resources.none(), -+ SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertFalse(res); - - refreshQueuesTurnOffReservationsContLook(a, csConf); -@@ -899,15 +901,16 @@ CommonNodeLabelsManager.EMPTY_STRING_SET, new ResourceLimits( - // in since feature is off - res = - a.canAssignToThisQueue(clusterResource, -- CommonNodeLabelsManager.EMPTY_STRING_SET, new ResourceLimits( -- clusterResource), capability, Resources.none()); -+ RMNodeLabelsManager.NO_LABEL, new ResourceLimits( -+ clusterResource), capability, Resources.none(), -+ SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertFalse(res); - - res = - a.canAssignToThisQueue(clusterResource, -- CommonNodeLabelsManager.EMPTY_STRING_SET, new ResourceLimits( -- clusterResource), capability, Resources -- .createResource(5 * GB)); -+ RMNodeLabelsManager.NO_LABEL, new ResourceLimits( -+ clusterResource), capability, Resources.createResource(5 * GB), -+ SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertFalse(res); - } - -@@ -1008,7 +1011,7 @@ public void testAssignToUser() throws Exception { - // Start testing... - // Only AM - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(2 * GB, a.getUsedResources().getMemory()); - assertEquals(2 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0 * GB, a.getMetrics().getReservedMB()); -@@ -1019,7 +1022,7 @@ public void testAssignToUser() throws Exception { - - // Only 1 map - simulating reduce - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(5 * GB, a.getUsedResources().getMemory()); - assertEquals(5 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0 * GB, a.getMetrics().getReservedMB()); -@@ -1030,7 +1033,7 @@ public void testAssignToUser() throws Exception { - - // Only 1 map to other node - simulating reduce - a.assignContainers(clusterResource, node_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(8 * GB, a.getUsedResources().getMemory()); - assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0 * GB, a.getMetrics().getReservedMB()); -@@ -1044,7 +1047,7 @@ public void testAssignToUser() throws Exception { - // allocate to queue so that the potential new capacity is greater then - // absoluteMaxCapacity - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(13 * GB, a.getUsedResources().getMemory()); - assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(5 * GB, app_0.getCurrentReservation().getMemory()); -@@ -1059,18 +1062,18 @@ public void testAssignToUser() throws Exception { - // set limit so subtrace reservations it can continue - Resource limit = Resources.createResource(12 * GB, 0); - boolean res = a.canAssignToUser(clusterResource, user_0, limit, app_0, -- true, null); -+ true, """"); - assertTrue(res); - - // tell it not to check for reservations and should fail as already over - // limit -- res = a.canAssignToUser(clusterResource, user_0, limit, app_0, false, null); -+ res = a.canAssignToUser(clusterResource, user_0, limit, app_0, false, """"); - assertFalse(res); - - refreshQueuesTurnOffReservationsContLook(a, csConf); - - // should now return false since feature off -- res = a.canAssignToUser(clusterResource, user_0, limit, app_0, true, null); -+ res = a.canAssignToUser(clusterResource, user_0, limit, app_0, true, """"); - assertFalse(res); - } - -@@ -1143,7 +1146,7 @@ public void testReservationsNoneAvailable() throws Exception { - // Start testing... - // Only AM - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(2 * GB, a.getUsedResources().getMemory()); - assertEquals(2 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0 * GB, a.getMetrics().getReservedMB()); -@@ -1155,7 +1158,7 @@ public void testReservationsNoneAvailable() throws Exception { - - // Only 1 map - simulating reduce - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(5 * GB, a.getUsedResources().getMemory()); - assertEquals(5 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0 * GB, a.getMetrics().getReservedMB()); -@@ -1167,7 +1170,7 @@ public void testReservationsNoneAvailable() throws Exception { - - // Only 1 map to other node - simulating reduce - a.assignContainers(clusterResource, node_1, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(8 * GB, a.getUsedResources().getMemory()); - assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0 * GB, a.getMetrics().getReservedMB()); -@@ -1183,7 +1186,7 @@ public void testReservationsNoneAvailable() throws Exception { - // some resource. Even with continous reservation looking, we don't allow - // unreserve resource to reserve container. - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(Resources.createResource(10 * GB))); -+ new ResourceLimits(Resources.createResource(10 * GB)), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(8 * GB, a.getUsedResources().getMemory()); - assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0 * GB, a.getMetrics().getReservedMB()); -@@ -1199,7 +1202,7 @@ public void testReservationsNoneAvailable() throws Exception { - // used (8G) + required (5G). It will not reserved since it has to unreserve - // some resource. Unfortunately, there's nothing to unreserve. - a.assignContainers(clusterResource, node_2, -- new ResourceLimits(Resources.createResource(10 * GB))); -+ new ResourceLimits(Resources.createResource(10 * GB)), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(8 * GB, a.getUsedResources().getMemory()); - assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0 * GB, a.getMetrics().getReservedMB()); -@@ -1213,7 +1216,7 @@ public void testReservationsNoneAvailable() throws Exception { - - // let it assign 5G to node_2 - a.assignContainers(clusterResource, node_2, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(13 * GB, a.getUsedResources().getMemory()); - assertEquals(13 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(0 * GB, a.getMetrics().getReservedMB()); -@@ -1226,7 +1229,7 @@ public void testReservationsNoneAvailable() throws Exception { - - // reserve 8G node_0 - a.assignContainers(clusterResource, node_0, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(21 * GB, a.getUsedResources().getMemory()); - assertEquals(13 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(8 * GB, a.getMetrics().getReservedMB()); -@@ -1241,7 +1244,7 @@ public void testReservationsNoneAvailable() throws Exception { - // continued to try due to having reservation above, - // but hits queue limits so can't reserve anymore. - a.assignContainers(clusterResource, node_2, -- new ResourceLimits(clusterResource)); -+ new ResourceLimits(clusterResource), SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); - assertEquals(21 * GB, a.getUsedResources().getMemory()); - assertEquals(13 * GB, app_0.getCurrentConsumption().getMemory()); - assertEquals(8 * GB, a.getMetrics().getReservedMB()); -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestUtils.java -index 62135b91df4d7..84abf4e5445bf 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestUtils.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestUtils.java -@@ -160,6 +160,7 @@ public static ResourceRequest createResourceRequest( - request.setCapability(capability); - request.setRelaxLocality(relaxLocality); - request.setPriority(priority); -+ request.setNodeLabelExpression(RMNodeLabelsManager.NO_LABEL); - return request; - } - -@@ -273,6 +274,7 @@ public static Configuration getConfigurationWithQueueLabels(Configuration config - conf.setCapacity(B1, 100); - conf.setMaximumCapacity(B1, 100); - conf.setCapacityByLabel(B1, ""y"", 100); -+ conf.setMaximumApplicationMasterResourcePerQueuePercent(B1, 1f); - - final String C1 = C + "".c1""; - conf.setQueues(C, new String[] {""c1""});" -4916128227c75b9ead023d37ba86a2685aebf62c,Delta Spike,"DELTASPIKE-289 enable WindowContext via Extension -",c,https://github.com/apache/deltaspike,"diff --git a/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/scope/window/DeltaSpikeContextExtension.java b/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/scope/window/DeltaSpikeContextExtension.java -new file mode 100644 -index 000000000..9f03783b2 ---- /dev/null -+++ b/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/scope/window/DeltaSpikeContextExtension.java -@@ -0,0 +1,38 @@ -+/* -+ * Licensed to the Apache Software Foundation (ASF) under one -+ * or more contributor license agreements. See the NOTICE file -+ * distributed with this work for additional information -+ * regarding copyright ownership. The ASF licenses this file -+ * to you under the Apache License, Version 2.0 (the -+ * ""License""); you may not use this file except in compliance -+ * with the License. You may obtain a copy of the License at -+ * -+ * http://www.apache.org/licenses/LICENSE-2.0 -+ * -+ * Unless required by applicable law or agreed to in writing, -+ * software distributed under the License is distributed on an -+ * ""AS IS"" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -+ * KIND, either express or implied. See the License for the -+ * specific language governing permissions and limitations -+ * under the License. -+ */ -+package org.apache.deltaspike.core.impl.scope.window; -+ -+import javax.enterprise.event.Observes; -+import javax.enterprise.inject.spi.AfterBeanDiscovery; -+import javax.enterprise.inject.spi.BeanManager; -+ -+/** -+ * Handle all DeltaSpike WindowContext and ConversationContext -+ * related features. -+ */ -+public class DeltaSpikeContextExtension -+{ -+ private DefaultWindowContext windowContext; -+ -+ public void registerDeltaSpikeContexts(@Observes AfterBeanDiscovery afterBeanDiscovery, BeanManager beanManager) -+ { -+ windowContext = new DefaultWindowContext(beanManager); -+ afterBeanDiscovery.addContext(windowContext); -+ } -+}" -887139648d2e693bb50f286810231150bf1fba9f,drools,BZ-986000 - DRL-to-RuleModel marshalling- improvements--,p,https://github.com/kiegroup/drools,"diff --git a/drools-workbench-models/drools-workbench-models-commons/src/main/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceImpl.java b/drools-workbench-models/drools-workbench-models-commons/src/main/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceImpl.java -index 069fea6cbb3..bfb2af28075 100644 ---- a/drools-workbench-models/drools-workbench-models-commons/src/main/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceImpl.java -+++ b/drools-workbench-models/drools-workbench-models-commons/src/main/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceImpl.java -@@ -740,8 +740,10 @@ public void generateSeparator( FieldConstraint constr, - if ( !gctx.isHasOutput() ) { - return; - } -- if ( gctx.getDepth() == 0 ) { -- buf.append( "", "" ); -+ if ( gctx.getDepth() == 0 ) { -+ if (buf.length() > 2 && !(buf.charAt(buf.length() - 2) == ',')) { -+ buf.append("", ""); -+ } - } else { - CompositeFieldConstraint cconstr = (CompositeFieldConstraint) gctx.getParent().getFieldConstraint(); - buf.append( cconstr.getCompositeJunctionType() + "" "" ); -@@ -800,18 +802,18 @@ private void generateSingleFieldConstraint( final SingleFieldConstraint constr, - assertConstraintValue( constr ); - - if ( isConstraintComplete( constr ) ) { -- SingleFieldConstraint parent = (SingleFieldConstraint) constr.getParent(); -- StringBuilder parentBuf = new StringBuilder(); -- while ( parent != null ) { -- String fieldName = parent.getFieldName(); -- parentBuf.insert( 0, -- fieldName + ""."" ); -- parent = (SingleFieldConstraint) parent.getParent(); -- } -- buf.append( parentBuf ); - if ( constr instanceof SingleFieldConstraintEBLeftSide ) { - buf.append( ( (SingleFieldConstraintEBLeftSide) constr ).getExpressionLeftSide().getText() ); - } else { -+ SingleFieldConstraint parent = (SingleFieldConstraint) constr.getParent(); -+ StringBuilder parentBuf = new StringBuilder(); -+ while ( parent != null ) { -+ String fieldName = parent.getFieldName(); -+ parentBuf.insert( 0, -+ fieldName + ""."" ); -+ parent = (SingleFieldConstraint) parent.getParent(); -+ } -+ buf.append( parentBuf ); - String fieldName = constr.getFieldName(); - buf.append( fieldName ); - } -diff --git a/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceTest.java b/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceTest.java -index a2b7bfaa608..71e2ede3d3f 100644 ---- a/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceTest.java -+++ b/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceTest.java -@@ -275,7 +275,6 @@ public void testSumAsGivenValue() { - } - - @Test -- @Ignore - public void testNotNull() { - String expected = """" + - ""rule \""my rule\"" \n"" + -@@ -284,6 +283,37 @@ public void testNotNull() { - "" Customer( contact != null , contact.tel1 > 15 )\n"" + - "" then\n"" + - ""end\n""; -+ -+ PackageDataModelOracle dmo = mock(PackageDataModelOracle.class); -+ when( -+ dmo.getProjectModelFields() -+ ).thenReturn( -+ new HashMap() {{ -+ put(""Customer"", -+ new ModelField[]{ -+ new ModelField( -+ ""contact"", -+ ""Contact"", -+ ModelField.FIELD_CLASS_TYPE.TYPE_DECLARATION_CLASS, -+ ModelField.FIELD_ORIGIN.DECLARED, -+ FieldAccessorsAndMutators.BOTH, -+ ""Contact"" -+ ) -+ }); -+ put(""Contact"", -+ new ModelField[]{ -+ new ModelField( -+ ""tel1"", -+ ""Integer"", -+ ModelField.FIELD_CLASS_TYPE.TYPE_DECLARATION_CLASS, -+ ModelField.FIELD_ORIGIN.DECLARED, -+ FieldAccessorsAndMutators.BOTH, -+ ""Integer"" -+ ) -+ }); -+ }} -+ ); -+ - final RuleModel m = new RuleModel(); - - FactPattern factPattern = new FactPattern(); -@@ -304,7 +334,7 @@ public void testNotNull() { - - m.name = ""my rule""; - -- checkMarshallUnmarshall(expected, m); -+ checkMarshallUnmarshall(expected, m, dmo); - } - - @Test" -f6d8ce0ddb06773f1b9a269bdb94ac8840673d82,tapiji,"Prepares maven pom files for building tapiji translator (RCP) -",a,https://github.com/tapiji/tapiji,"diff --git a/org.eclipselabs.tapiji.translator.swt.compat/META-INF/MANIFEST.MF b/org.eclipselabs.tapiji.translator.swt.compat/META-INF/MANIFEST.MF -index ac6e3ae8..c1fcc5b9 100644 ---- a/org.eclipselabs.tapiji.translator.swt.compat/META-INF/MANIFEST.MF -+++ b/org.eclipselabs.tapiji.translator.swt.compat/META-INF/MANIFEST.MF -@@ -2,7 +2,7 @@ Manifest-Version: 1.0 - Bundle-ManifestVersion: 2 - Bundle-Name: RCP Compatibiltiy for TapiJI Translator - Bundle-SymbolicName: org.eclipselabs.tapiji.translator.swt.compat --Bundle-Version: 0.0.2.qualifier -+Bundle-Version: 0.9.0.B1 - Bundle-Activator: org.eclipselabs.tapiji.translator.compat.Activator - Bundle-ActivationPolicy: lazy - Bundle-RequiredExecutionEnvironment: JavaSE-1.6 -diff --git a/org.eclipselabs.tapiji.translator.swt.compat/pom.xml b/org.eclipselabs.tapiji.translator.swt.compat/pom.xml -new file mode 100644 -index 00000000..dc9aaae2 ---- /dev/null -+++ b/org.eclipselabs.tapiji.translator.swt.compat/pom.xml -@@ -0,0 +1,29 @@ -+ -+ -+ -+ -+ -+ 4.0.0 -+ org.eclipselabs.tapiji.translator.swt.compat -+ eclipse-plugin -+ -+ -+ org.eclipselabs.tapiji -+ org.eclipselabs.tapiji.translator.parent -+ 0.9.0.B1 -+ .. -+ -+ -+ 0.9.0.B1 -+ -diff --git a/org.eclipselabs.tapiji.translator.swt.product/.classpath b/org.eclipselabs.tapiji.translator.swt.product/.classpath -new file mode 100644 -index 00000000..ad32c83a ---- /dev/null -+++ b/org.eclipselabs.tapiji.translator.swt.product/.classpath -@@ -0,0 +1,7 @@ -+ -+ -+ -+ -+ -+ -+ -diff --git a/org.eclipselabs.tapiji.translator.swt.product/.project b/org.eclipselabs.tapiji.translator.swt.product/.project -new file mode 100644 -index 00000000..650df245 ---- /dev/null -+++ b/org.eclipselabs.tapiji.translator.swt.product/.project -@@ -0,0 +1,7 @@ -+ -+ -+ org.eclipselabs.tapiji.translator.swt.parent -+ -+ -+ -+ -diff --git a/org.eclipselabs.tapiji.translator.swt.product/icons/TapiJI_128.png b/org.eclipselabs.tapiji.translator.swt.product/icons/TapiJI_128.png -new file mode 100644 -index 00000000..98514e0e -Binary files /dev/null and b/org.eclipselabs.tapiji.translator.swt.product/icons/TapiJI_128.png differ -diff --git a/org.eclipselabs.tapiji.translator.swt.product/icons/TapiJI_16.png b/org.eclipselabs.tapiji.translator.swt.product/icons/TapiJI_16.png -new file mode 100644 -index 00000000..73b82c2f -Binary files /dev/null and b/org.eclipselabs.tapiji.translator.swt.product/icons/TapiJI_16.png differ -diff --git a/org.eclipselabs.tapiji.translator.swt.product/icons/TapiJI_32.png b/org.eclipselabs.tapiji.translator.swt.product/icons/TapiJI_32.png -new file mode 100644 -index 00000000..3984eba6 -Binary files /dev/null and b/org.eclipselabs.tapiji.translator.swt.product/icons/TapiJI_32.png differ -diff --git a/org.eclipselabs.tapiji.translator.swt.product/icons/TapiJI_48.png b/org.eclipselabs.tapiji.translator.swt.product/icons/TapiJI_48.png -new file mode 100644 -index 00000000..190a92e4 -Binary files /dev/null and b/org.eclipselabs.tapiji.translator.swt.product/icons/TapiJI_48.png differ -diff --git a/org.eclipselabs.tapiji.translator.swt.product/icons/TapiJI_64.png b/org.eclipselabs.tapiji.translator.swt.product/icons/TapiJI_64.png -new file mode 100644 -index 00000000..89988073 -Binary files /dev/null and b/org.eclipselabs.tapiji.translator.swt.product/icons/TapiJI_64.png differ -diff --git a/org.eclipselabs.tapiji.translator.swt/org.eclipselabs.tapiji.translator.product b/org.eclipselabs.tapiji.translator.swt.product/org.eclipselabs.tapiji.translator.swt.product.product -similarity index 94% -rename from org.eclipselabs.tapiji.translator.swt/org.eclipselabs.tapiji.translator.product -rename to org.eclipselabs.tapiji.translator.swt.product/org.eclipselabs.tapiji.translator.swt.product.product -index ff6c0744..6b53d02a 100644 ---- a/org.eclipselabs.tapiji.translator.swt/org.eclipselabs.tapiji.translator.product -+++ b/org.eclipselabs.tapiji.translator.swt.product/org.eclipselabs.tapiji.translator.swt.product.product -@@ -1,7 +1,7 @@ - - - -- -+ - - - -@@ -239,9 +239,7 @@ litigation. - - - -- -- -- -+ - - - -@@ -252,7 +250,6 @@ litigation. - - - -- - - - -@@ -288,11 +285,6 @@ litigation. - - - -- -- -- -- -- - - - -@@ -300,10 +292,7 @@ litigation. - - - -- -- - -- - - - -@@ -317,8 +306,7 @@ litigation. - - - -- -- -+ - - - -diff --git a/org.eclipselabs.tapiji.translator.swt.product/pom.xml b/org.eclipselabs.tapiji.translator.swt.product/pom.xml -new file mode 100644 -index 00000000..67d70438 ---- /dev/null -+++ b/org.eclipselabs.tapiji.translator.swt.product/pom.xml -@@ -0,0 +1,44 @@ -+ -+ -+ -+ -+ -+ 4.0.0 -+ org.eclipselabs.tapiji.translator.swt.product -+ eclipse-application -+ -+ -+ org.eclipselabs.tapiji -+ org.eclipselabs.tapiji.translator.parent -+ 0.9.0.B1 -+ .. -+ -+ -+ -+ -diff --git a/org.eclipselabs.tapiji.translator.swt.product/splash.bmp b/org.eclipselabs.tapiji.translator.swt.product/splash.bmp -new file mode 100644 -index 00000000..9283331e -Binary files /dev/null and b/org.eclipselabs.tapiji.translator.swt.product/splash.bmp differ -diff --git a/org.eclipselabs.tapiji.translator.swt/fragment.xml b/org.eclipselabs.tapiji.translator.swt/fragment.xml -index c5df4ff8..d7f1d28d 100644 ---- a/org.eclipselabs.tapiji.translator.swt/fragment.xml -+++ b/org.eclipselabs.tapiji.translator.swt/fragment.xml -@@ -6,11 +6,11 @@ - id=""product"" - point=""org.eclipse.core.runtime.products""> - - -+ value=""platform:/plugin/org.eclipselabs.tapiji.translator/icons/TapiJI_16.png,platform:/plugin/org.eclipselabs.tapiji.translator/icons/TapiJI_32.png,platform:/plugin/org.eclipselabs.tapiji.translator/icons/TapiJI_48.png,platform:/plugin/org.eclipselabs.tapiji.translator/icons/TapiJI_64.png,platform:/plugin/org.eclipselabs.tapiji.translator/icons/TapiJI_128.png""> - - - -+ value=""platform:/plugin/org.eclipselabs.tapiji.translator/icons/TapiJI_128.png""> - - -+ -+ -+ -+ -+ 4.0.0 -+ org.eclipselabs.tapiji.translator.swt -+ eclipse-plugin -+ -+ -+ org.eclipselabs.tapiji -+ org.eclipselabs.tapiji.translator.parent -+ 0.9.0.B1 -+ .. -+ -+ -+ -diff --git a/org.eclipselabs.tapiji.translator/META-INF/MANIFEST.MF b/org.eclipselabs.tapiji.translator/META-INF/MANIFEST.MF -index 2eeeb311..8432e599 100644 ---- a/org.eclipselabs.tapiji.translator/META-INF/MANIFEST.MF -+++ b/org.eclipselabs.tapiji.translator/META-INF/MANIFEST.MF -@@ -2,7 +2,7 @@ Manifest-Version: 1.0 - Bundle-ManifestVersion: 2 - Bundle-Name: TapiJI Translator - Bundle-SymbolicName: org.eclipselabs.tapiji.translator;singleton:=true --Bundle-Version: 0.0.2.qualifier -+Bundle-Version: 0.9.0.B1 - Bundle-Activator: org.eclipselabs.tapiji.translator.Activator - Require-Bundle: org.eclipse.ui;resolution:=optional, - org.eclipse.core.runtime;bundle-version=""[3.5.0,4.0.0)"", -diff --git a/org.eclipselabs.tapiji.translator/plugin.xml b/org.eclipselabs.tapiji.translator/plugin.xml -index 4e251edb..077db6a4 100644 ---- a/org.eclipselabs.tapiji.translator/plugin.xml -+++ b/org.eclipselabs.tapiji.translator/plugin.xml -@@ -80,4 +80,48 @@ - - - -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ - -diff --git a/org.eclipselabs.tapiji.translator/pom.xml b/org.eclipselabs.tapiji.translator/pom.xml -new file mode 100644 -index 00000000..84cbce9f ---- /dev/null -+++ b/org.eclipselabs.tapiji.translator/pom.xml -@@ -0,0 +1,22 @@ -+ -+ -+ -+ -+ -+ 4.0.0 -+ org.eclipselabs.tapiji.translator -+ eclipse-plugin -+ -+ -+ org.eclipselabs.tapiji -+ org.eclipselabs.tapiji.translator.parent -+ 0.9.0.B1 -+ .. -+ -+ -+ -diff --git a/pom.xml b/pom.xml -index dfa7fd1a..d4ce706f 100644 ---- a/pom.xml -+++ b/pom.xml -@@ -1,129 +1,194 @@ - - -- -+ - - -- 4.0.0 -- org.eclipse.babel.plugins -- org.eclipse.babel.tapiji.tools.parent -- 0.0.2-SNAPSHOT -- pom -+ xsi:schemaLocation=""http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"" -+ xmlns=""http://maven.apache.org/POM/4.0.0"" xmlns:xsi=""http://www.w3.org/2001/XMLSchema-instance""> -+ 4.0.0 -+ org.eclipselabs.tapiji -+ org.eclipselabs.tapiji.translator.parent -+ 0.9.0.B1 -+ pom - -- -- -- 3.0 -- -+ -+ org.eclipselabs.tapiji.translator.swt.compat -+ org.eclipselabs.tapiji.translator -+ org.eclipselabs.tapiji.translator.swt.product -+ - -- -- 0.16.0 -- -- -- -- -- indigo -- p2 -- http://download.eclipse.org/releases/indigo -- -- -+ -+ 0.16.0 -+ http://download.eclipse.org/eclipse/updates/3.6 -+ http://build.eclipse.org/technology/babel/tools-updates-nightly -+ - -- -- -- maven.eclipse.org -- http://maven.eclipse.org/nexus/content/repositories/milestone-indigo/ -- -- -+ -+ -+ indigo rcp -+ -+ http://download.eclipse.org/releases/indigo -+ http://build.eclipse.org/technology/babel/tools-updates-nightly -+ -+ -+ true -+ -+ maven.profile -+ swt-editor -+ -+ -+ -+ -+ -+ org.eclipse.tycho -+ target-platform-configuration -+ ${tycho-version} -+ -+ -+ -+ ignore -+ -+ -+ eclipse-plugin -+ org.eclipselabs.tapiji.translator.swt.compat -+ 0.0.0 -+ -+ -+ eclipse-plugin -+ org.eclipse.babel.editor.swt.compat -+ 0.9.0 -+ -+ -+ eclipse-plugin -+ org.eclipse.ui -+ 0.0.0 -+ -+ -+ eclipse-plugin -+ org.eclipse.jface.text -+ 0.0.0 -+ -+ -+ eclipse-plugin -+ org.eclipse.ui.editors -+ 0.0.0 -+ -+ -+ eclipse-plugin -+ org.eclipse.ui.ide -+ 0.0.0 -+ -+ -+ eclipse-plugin -+ org.eclipse.ui.workbench.texteditor -+ 0.0.0 -+ -+ -+ eclipse-plugin -+ org.eclipse.ui.forms -+ 0.0.0 -+ -+ -+ eclipse-plugin -+ org.eclipse.ltk.core.refactoring -+ 0.0.0 -+ -+ -+ eclipse-plugin -+ org.eclipse.ltk.ui.refactoring -+ 0.0.0 -+ -+ -+ eclipse-plugin -+ org.junit -+ 0.0.0 -+ -+ -+ eclipse-plugin -+ org.eclipse.ui.forms -+ 3.5.101 -+ -+ -+ -+ -+ -+ -+ -+ -+ - -- -- -- -- -- org.eclipse.tycho -- tycho-maven-plugin -- ${tycho-version} -- true -- -+ -+ -+ eclipse-repository -+ ${eclipse-repository-url} -+ p2 -+ -+ -+ babel-extension-plugins -+ ${babel-plugins-url} -+ p2 -+ -+ - -- -- -- org.eclipse.tycho -- tycho-source-plugin -- ${tycho-version} -- -- -- plugin-source -- -- plugin-source -- -- -- -- -+ -+ -+ -+ org.eclipse.tycho -+ tycho-maven-plugin -+ ${tycho-version} -+ true -+ -+ -+ org.eclipse.tycho -+ target-platform-configuration -+ ${tycho-version} -+ -+ -+ -+ linux -+ gtk -+ x86_64 -+ -+ -+ linux -+ gtk -+ x86 -+ -+ -+ macosx -+ carbon -+ x86 -+ -+ -+ macosx -+ cocoa -+ x86 -+ -+ -+ macosx -+ cocoa -+ x86_64 -+ -+ -+ win32 -+ win32 -+ x86 -+ -+ -+ win32 -+ win32 -+ x86_64 -+ -+ -+ -+ -+ -+ - -- -- org.eclipse.tycho -- target-platform-configuration -- ${tycho-version} -- -- -- -- org.eclipse.babel.plugins -- org.eclipse.babel.tapiji.tools.target -- 0.0.2-SNAPSHOT -- -- -- -- -- -- linux -- gtk -- x86_64 -- -- -- win32 -- win32 -- x86_64 -- -- -- -- -- -- -- -- -- -- org.eclipse.dash.maven -- eclipse-signing-maven-plugin -- 1.0.5 -- -- -- -- -- -- org.eclipse.babel.core -- org.eclipse.babel.editor -- org.eclipse.babel.tapiji.tools.core -- org.eclipse.babel.tapiji.tools.core.ui -- org.eclipse.babel.tapiji.tools.java -- org.eclipse.babel.tapiji.tools.java.feature -- org.eclipse.babel.tapiji.tools.java.ui -- org.eclipse.babel.tapiji.tools.rbmanager -- org.eclipse.babel.editor.nls -- org.eclipse.babel.tapiji.tools.target -- org.eclipse.babel.core.pdeutils -- org.eclipse.babel.repository -- - " -1c31074ed8a27a91cc794c5ebf26c64fcb0fda75,Vala,"libsoup-2.4: update to 2.30.0 - -Fixes bug 615047. -",a,https://github.com/GNOME/vala/,⚠️ Could not parse repo info -ce5970c5bf7bf7925b0830054da067c1c89e7b0f,Vala,"vapigen: fix a crash if type_name is used for pointer arguments - -Fixes bug 614348. -",c,https://github.com/GNOME/vala/,⚠️ Could not parse repo info -614faccf1d353c3b4835e6df0e6902839d54b5f6,hadoop,YARN-1910. Fixed a race condition in TestAMRMTokens- that causes the test to fail more often on Windows. Contributed by Xuan Gong.- svn merge --ignore-ancestry -c 1586192 ../../trunk/--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1586193 13f79535-47bb-0310-9956-ffa450edef68-,c,https://github.com/apache/hadoop,"diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt -index 2abb35dfa9b02..188a80035ac85 100644 ---- a/hadoop-yarn-project/CHANGES.txt -+++ b/hadoop-yarn-project/CHANGES.txt -@@ -79,6 +79,9 @@ Release 2.4.1 - UNRELEASED - YARN-1908. Fixed DistributedShell to not fail in secure clusters. (Vinod - Kumar Vavilapalli and Jian He via vinodkv) - -+ YARN-1910. Fixed a race condition in TestAMRMTokens that causes the test to -+ fail more often on Windows. (Xuan Gong via vinodkv) -+ - Release 2.4.0 - 2014-04-07 - - INCOMPATIBLE CHANGES -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestAMRMTokens.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestAMRMTokens.java -index aa894c5f6a920..64602bd888e27 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestAMRMTokens.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestAMRMTokens.java -@@ -48,6 +48,7 @@ - import org.apache.hadoop.yarn.server.resourcemanager.TestAMAuthorization.MyContainerManager; - import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; - import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; -+import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; - import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptContainerFinishedEvent; - import org.apache.hadoop.yarn.server.utils.BuilderUtils; - import org.apache.hadoop.yarn.util.Records; -@@ -63,6 +64,7 @@ public class TestAMRMTokens { - private static final Log LOG = LogFactory.getLog(TestAMRMTokens.class); - - private final Configuration conf; -+ private static final int maxWaitAttempts = 50; - - @Parameters - public static Collection configs() { -@@ -153,6 +155,16 @@ public void testTokenExpiry() throws Exception { - new RMAppAttemptContainerFinishedEvent(applicationAttemptId, - containerStatus)); - -+ // Make sure the RMAppAttempt is at Finished State. -+ // Both AMRMToken and ClientToAMToken have been removed. -+ int count = 0; -+ while (attempt.getState() != RMAppAttemptState.FINISHED -+ && count < maxWaitAttempts) { -+ Thread.sleep(100); -+ count++; -+ } -+ Assert.assertTrue(attempt.getState() == RMAppAttemptState.FINISHED); -+ - // Now simulate trying to allocate. RPC call itself should throw auth - // exception. - rpc.stopProxy(rmClient, conf); // To avoid using cached client" -f16e39ecb1dcb4d5964235ef94d84ab4d70ac314,hadoop,Merge -c 1529538 from trunk to branch-2 to fix- YARN-1090. Fixed CS UI to better reflect applications as non-schedulable and- not as pending. Contributed by Jian He.--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1529539 13f79535-47bb-0310-9956-ffa450edef68-,c,https://github.com/apache/hadoop,"diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt -index 6a997b42ecf8d..4babceb873418 100644 ---- a/hadoop-yarn-project/CHANGES.txt -+++ b/hadoop-yarn-project/CHANGES.txt -@@ -166,6 +166,9 @@ Release 2.1.2 - UNRELEASED - - YARN-1032. Fixed NPE in RackResolver. (Lohit Vijayarenu via acmurthy) - -+ YARN-1090. Fixed CS UI to better reflect applications as non-schedulable -+ and not as pending. (Jian He via acmurthy) -+ - Release 2.1.1-beta - 2013-09-23 - - INCOMPATIBLE CHANGES -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/QueueMetrics.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/QueueMetrics.java -index 9d2c739e480cf..8a030952504fb 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/QueueMetrics.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/QueueMetrics.java -@@ -73,7 +73,7 @@ public class QueueMetrics implements MetricsSource { - @Metric(""Reserved CPU in virtual cores"") MutableGaugeInt reservedVCores; - @Metric(""# of reserved containers"") MutableGaugeInt reservedContainers; - @Metric(""# of active users"") MutableGaugeInt activeUsers; -- @Metric(""# of active users"") MutableGaugeInt activeApplications; -+ @Metric(""# of active applications"") MutableGaugeInt activeApplications; - private final MutableGaugeInt[] runningTime; - private TimeBucketMetrics runBuckets; - -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java -index 0bf851722e218..900c1a62ddade 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java -@@ -98,24 +98,25 @@ protected void render(Block html) { - for (UserInfo entry: users) { - activeUserList.append(entry.getUsername()).append("" <"") - .append(getPercentage(entry.getResourcesUsed(), usedResources)) -- .append("", Active Apps: "" + entry.getNumActiveApplications()) -- .append("", Pending Apps: "" + entry.getNumPendingApplications()) -+ .append("", Schedulable Apps: "" + entry.getNumActiveApplications()) -+ .append("", Non-Schedulable Apps: "" + entry.getNumPendingApplications()) - .append("">
""); //Force line break - } - - ResponseInfo ri = info(""\'"" + lqinfo.getQueuePath().substring(5) + ""\' Queue Status""). - _(""Queue State:"", lqinfo.getQueueState()). - _(""Used Capacity:"", percent(lqinfo.getUsedCapacity() / 100)). -+ _(""Absolute Used Capacity:"", percent(lqinfo.getAbsoluteUsedCapacity() / 100)). - _(""Absolute Capacity:"", percent(lqinfo.getAbsoluteCapacity() / 100)). - _(""Absolute Max Capacity:"", percent(lqinfo.getAbsoluteMaxCapacity() / 100)). - _(""Used Resources:"", StringEscapeUtils.escapeHtml(lqinfo.getUsedResources().toString())). -- _(""Num Active Applications:"", Integer.toString(lqinfo.getNumActiveApplications())). -- _(""Num Pending Applications:"", Integer.toString(lqinfo.getNumPendingApplications())). -+ _(""Num Schedulable Applications:"", Integer.toString(lqinfo.getNumActiveApplications())). -+ _(""Num Non-Schedulable Applications:"", Integer.toString(lqinfo.getNumPendingApplications())). - _(""Num Containers:"", Integer.toString(lqinfo.getNumContainers())). - _(""Max Applications:"", Integer.toString(lqinfo.getMaxApplications())). - _(""Max Applications Per User:"", Integer.toString(lqinfo.getMaxApplicationsPerUser())). -- _(""Max Active Applications:"", Integer.toString(lqinfo.getMaxActiveApplications())). -- _(""Max Active Applications Per User:"", Integer.toString(lqinfo.getMaxActiveApplicationsPerUser())). -+ _(""Max Schedulable Applications:"", Integer.toString(lqinfo.getMaxActiveApplications())). -+ _(""Max Schedulable Applications Per User:"", Integer.toString(lqinfo.getMaxActiveApplicationsPerUser())). - _(""Configured Capacity:"", percent(lqinfo.getCapacity() / 100)). - _(""Configured Max Capacity:"", percent(lqinfo.getMaxCapacity() / 100)). - _(""Configured Minimum User Limit Percent:"", Integer.toString(lqinfo.getUserLimit()) + ""%"")." -e56f26140787fbe76b3c155c0248558287370e2c,Delta Spike,"DELTASPIKE-208 explicitely enable global alternatives - -This now works on Weld, OWB with BDA enabled, etc -",c,https://github.com/apache/deltaspike,"diff --git a/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/exclude/extension/ExcludeExtension.java b/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/exclude/extension/ExcludeExtension.java -index 88a59724e..22dd33cb1 100644 ---- a/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/exclude/extension/ExcludeExtension.java -+++ b/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/exclude/extension/ExcludeExtension.java -@@ -44,8 +44,10 @@ - import java.net.URL; - import java.util.ArrayList; - import java.util.Arrays; -+import java.util.HashMap; - import java.util.HashSet; - import java.util.List; -+import java.util.Map; - import java.util.Set; - import java.util.jar.Attributes; - import java.util.jar.Manifest; -@@ -60,27 +62,56 @@ - */ - public class ExcludeExtension implements Extension, Deactivatable - { -- private static final Logger LOG = Logger.getLogger(ExcludeExtension.class.getName()); -+ private static final String GLOBAL_ALTERNATIVES = ""globalAlternatives.""; - -- private static Boolean isWeld1Detected = false; -+ private static final Logger LOG = Logger.getLogger(ExcludeExtension.class.getName()); - - private boolean isActivated = true; - private boolean isGlobalAlternativeActivated = true; - private boolean isCustomProjectStageBeanFilterActivated = true; - -+ /** -+ * Contains the globalAlternatives which should get used -+ * KEY=Interface class name -+ * VALUE=Implementation class name -+ */ -+ private Map globalAlternatives = new HashMap(); -+ -+ - @SuppressWarnings(""UnusedDeclaration"") - protected void init(@Observes BeforeBeanDiscovery beforeBeanDiscovery, BeanManager beanManager) - { - isActivated = - ClassDeactivationUtils.isActivated(getClass()); - -- isGlobalAlternativeActivated = -- ClassDeactivationUtils.isActivated(GlobalAlternative.class); -- - isCustomProjectStageBeanFilterActivated = - ClassDeactivationUtils.isActivated(CustomProjectStageBeanFilter.class); - -- isWeld1Detected = isWeld1(beanManager); -+ isGlobalAlternativeActivated = -+ ClassDeactivationUtils.isActivated(GlobalAlternative.class); -+ if (isGlobalAlternativeActivated) -+ { -+ Map allProperties = ConfigResolver.getAllProperties(); -+ for (Map.Entry property : allProperties.entrySet()) -+ { -+ if (property.getKey().startsWith(GLOBAL_ALTERNATIVES)) -+ { -+ String interfaceName = property.getKey().substring(GLOBAL_ALTERNATIVES.length()); -+ String implementation = property.getValue(); -+ if (LOG.isLoggable(Level.FINE)) -+ { -+ LOG.fine(""Enabling global alternative for interface "" + interfaceName + "": "" + implementation); -+ } -+ -+ globalAlternatives.put(interfaceName, implementation); -+ } -+ } -+ -+ if (globalAlternatives.isEmpty()) -+ { -+ isGlobalAlternativeActivated = false; -+ } -+ } - } - - /** -@@ -101,9 +132,9 @@ protected void initProjectStage(@Observes AfterDeploymentValidation afterDeploym - protected void vetoBeans(@Observes ProcessAnnotatedType processAnnotatedType, BeanManager beanManager) - { - //we need to do it before the exclude logic to keep the @Exclude support for global alternatives -- if (isGlobalAlternativeActivated && isWeld1Detected) -+ if (isGlobalAlternativeActivated) - { -- activateGlobalAlternativesWeld1(processAnnotatedType, beanManager); -+ activateGlobalAlternatives(processAnnotatedType, beanManager); - } - - if (isCustomProjectStageBeanFilterActivated) -@@ -158,8 +189,8 @@ protected void vetoCustomProjectStageBeans(ProcessAnnotatedType processAnnotated - - - -- private void activateGlobalAlternativesWeld1(ProcessAnnotatedType processAnnotatedType, -- BeanManager beanManager) -+ private void activateGlobalAlternatives(ProcessAnnotatedType processAnnotatedType, -+ BeanManager beanManager) - { - Class currentBean = processAnnotatedType.getAnnotatedType().getJavaClass(); - -@@ -184,7 +215,7 @@ private void activateGlobalAlternativesWeld1(ProcessAnnotatedType processAnnotat - { - alternativeBeanAnnotations = new HashSet(); - -- configuredBeanName = ConfigResolver.getPropertyValue(currentType.getName()); -+ configuredBeanName = globalAlternatives.get(currentType.getName()); - if (configuredBeanName != null && configuredBeanName.length() > 0) - { - alternativeBeanClass = ClassUtils.tryToLoadClassForName(configuredBeanName); -@@ -442,26 +473,6 @@ private void veto(ProcessAnnotatedType processAnnotatedType, String vetoType) - processAnnotatedType.getAnnotatedType().getJavaClass()); - } - -- private boolean isWeld1(BeanManager beanManager) -- { -- if (beanManager.getClass().getName().startsWith(""org.apache"")) -- { -- return false; -- } -- -- if (beanManager.getClass().getName().startsWith(""org.jboss.weld"")) -- { -- String version = getJarVersion(beanManager.getClass()); -- -- if (version != null && version.startsWith(""1."")) -- { -- return true; -- } -- } -- -- return false; -- } -- - private static String getJarVersion(Class targetClass) - { - String manifestFileLocation = getManifestFileLocationOfClass(targetClass); -diff --git a/deltaspike/core/impl/src/test/resources/META-INF/apache-deltaspike.properties b/deltaspike/core/impl/src/test/resources/META-INF/apache-deltaspike.properties -index b935ffcb8..ba2908684 100644 ---- a/deltaspike/core/impl/src/test/resources/META-INF/apache-deltaspike.properties -+++ b/deltaspike/core/impl/src/test/resources/META-INF/apache-deltaspike.properties -@@ -20,10 +20,10 @@ org.apache.deltaspike.core.spi.activation.ClassDeactivator=org.apache.deltaspike - testProperty02=test_value_02 - db=prodDB - --org.apache.deltaspike.test.core.api.alternative.global.BaseBean1=org.apache.deltaspike.test.core.api.alternative.global.SubBaseBean2 --org.apache.deltaspike.test.core.api.alternative.global.BaseInterface1=org.apache.deltaspike.test.core.api.alternative.global.BaseInterface1AlternativeImplementation -+globalAlternatives.org.apache.deltaspike.test.core.api.alternative.global.BaseBean1=org.apache.deltaspike.test.core.api.alternative.global.SubBaseBean2 -+globalAlternatives.org.apache.deltaspike.test.core.api.alternative.global.BaseInterface1=org.apache.deltaspike.test.core.api.alternative.global.BaseInterface1AlternativeImplementation - --org.apache.deltaspike.test.core.api.alternative.global.qualifier.BaseInterface=org.apache.deltaspike.test.core.api.alternative.global.qualifier.AlternativeBaseBeanB -+globalAlternatives.org.apache.deltaspike.test.core.api.alternative.global.qualifier.BaseInterface=org.apache.deltaspike.test.core.api.alternative.global.qualifier.AlternativeBaseBeanB - - configProperty1=14 - configProperty2=7" -72d780f2cab2892259331c8d6f2d5b33d291d416,drools,"JBRULES-3200 Support for dynamic typing (aka- ""traits"")--",a,https://github.com/kiegroup/drools,"diff --git a/drools-compiler/src/main/java/org/drools/lang/descr/TypeDeclarationDescr.java b/drools-compiler/src/main/java/org/drools/lang/descr/TypeDeclarationDescr.java -index dbfb8b253b6..bfb18418e9f 100644 ---- a/drools-compiler/src/main/java/org/drools/lang/descr/TypeDeclarationDescr.java -+++ b/drools-compiler/src/main/java/org/drools/lang/descr/TypeDeclarationDescr.java -@@ -102,6 +102,18 @@ public void setType( String name, String namespace ) { - type = new QualifiedName( name, namespace ); - } - -+ public String getSuperTypeName() { -+ return superTypes == null ? null : superTypes.get(0).getName(); -+ } -+ -+ public String getSuperTypeNamespace() { -+ return superTypes == null ? null : superTypes.get(0).getNamespace(); -+ } -+ -+ public String getSupertTypeFullName() { -+ return superTypes == null ? null : superTypes.get(0).getFullName(); -+ } -+ - - /** - * @return the fields" -9b07112e30871a4a4f8253c8418e93417dcdad97,drools,JBRULES-1906: NPE when LiteralRestriction value is- set to null--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@24509 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-,c,https://github.com/kiegroup/drools,"diff --git a/drools-verifier/src/main/java/org/drools/verifier/components/Field.java b/drools-verifier/src/main/java/org/drools/verifier/components/Field.java -index 4e4b6463cc8..453f1b59d12 100644 ---- a/drools-verifier/src/main/java/org/drools/verifier/components/Field.java -+++ b/drools-verifier/src/main/java/org/drools/verifier/components/Field.java -@@ -18,6 +18,7 @@ public static class FieldType { - public static final FieldType VARIABLE = new FieldType(""Variable""); - public static final FieldType OBJECT = new FieldType(""Object""); - public static final FieldType ENUM = new FieldType(""Enum""); -+ public static final FieldType UNKNOWN = new FieldType(""Unknown""); - - private final String string; - -diff --git a/drools-verifier/src/main/java/org/drools/verifier/components/LiteralRestriction.java b/drools-verifier/src/main/java/org/drools/verifier/components/LiteralRestriction.java -index 73133feb2db..756d31dac0d 100644 ---- a/drools-verifier/src/main/java/org/drools/verifier/components/LiteralRestriction.java -+++ b/drools-verifier/src/main/java/org/drools/verifier/components/LiteralRestriction.java -@@ -8,7 +8,7 @@ - import org.drools.verifier.report.components.Cause; - - /** -- * -+ * - * @author Toni Rikkola - */ - public class LiteralRestriction extends Restriction implements Cause { -@@ -31,7 +31,7 @@ public RestrictionType getRestrictionType() { - - /** - * Compares two LiteralRestrictions by value. -- * -+ * - * @param restriction - * Restriction that this object is compared to. - * @return a negative integer, zero, or a positive integer as this object is -@@ -68,6 +68,8 @@ public int compareValues(LiteralRestriction restriction) - } - } else if (valueType == Field.FieldType.STRING) { - return stringValue.compareTo(restriction.getValueAsString()); -+ } else if (valueType == Field.FieldType.UNKNOWN) { -+ return 0; - } - - throw new DataFormatException(""Value types did not match. Value type "" -@@ -109,6 +111,15 @@ public Date getDateValue() { - - public void setValue(String value) { - -+ if (value == null) { -+ stringValue = null; -+ valueType = Field.FieldType.UNKNOWN; -+ return; -+ } -+ -+ stringValue = value; -+ valueType = Field.FieldType.STRING; -+ - if (""true"".equals(value) || ""false"".equals(value)) { - booleanValue = value.equals(""true""); - valueType = Field.FieldType.BOOLEAN; -@@ -147,11 +158,9 @@ public void setValue(String value) { - // Not a date. - } - -- stringValue = value; -- valueType = Field.FieldType.STRING; - } - -- public boolean isBooleanValue() { -+ public boolean getBooleanValue() { - return booleanValue; - } - -diff --git a/drools-verifier/src/test/java/org/drools/verifier/components/LiteralRestrictionTest.java b/drools-verifier/src/test/java/org/drools/verifier/components/LiteralRestrictionTest.java -new file mode 100644 -index 00000000000..668e9bed5c7 ---- /dev/null -+++ b/drools-verifier/src/test/java/org/drools/verifier/components/LiteralRestrictionTest.java -@@ -0,0 +1,44 @@ -+package org.drools.verifier.components; -+ -+import junit.framework.TestCase; -+ -+public class LiteralRestrictionTest extends TestCase { -+ -+ public void testSetValue() { -+ LiteralRestriction booleanRestriction = new LiteralRestriction(); -+ booleanRestriction.setValue(""true""); -+ -+ assertEquals(Field.FieldType.BOOLEAN, booleanRestriction.getValueType()); -+ assertEquals(true, booleanRestriction.getBooleanValue()); -+ -+ LiteralRestriction intRestriction = new LiteralRestriction(); -+ intRestriction.setValue(""1""); -+ -+ assertEquals(Field.FieldType.INT, intRestriction.getValueType()); -+ assertEquals(1, intRestriction.getIntValue()); -+ -+ LiteralRestriction doubleRestriction = new LiteralRestriction(); -+ doubleRestriction.setValue(""1.0""); -+ -+ assertEquals(Field.FieldType.DOUBLE, doubleRestriction.getValueType()); -+ assertEquals(1.0, doubleRestriction.getDoubleValue()); -+ -+ LiteralRestriction dateRestriction = new LiteralRestriction(); -+ dateRestriction.setValue(""11-jan-2008""); -+ -+ assertEquals(Field.FieldType.DATE, dateRestriction.getValueType()); -+ -+ LiteralRestriction stringRestriction = new LiteralRestriction(); -+ stringRestriction.setValue(""test test""); -+ -+ assertEquals(Field.FieldType.STRING, stringRestriction.getValueType()); -+ assertEquals(""test test"", stringRestriction.getValueAsString()); -+ -+ LiteralRestriction nullRestriction = new LiteralRestriction(); -+ nullRestriction.setValue(null); -+ -+ assertEquals(Field.FieldType.UNKNOWN, nullRestriction.getValueType()); -+ assertEquals(null, nullRestriction.getValueAsString()); -+ assertEquals(null, nullRestriction.getValueAsObject()); -+ } -+}" -5fee1b116bcd427168f1fafc7948c2e44520cc5c,intellij-community,PY-16335 Preserve formatting of converted- collection literals--,c,https://github.com/JetBrains/intellij-community,"diff --git a/python/src/com/jetbrains/python/codeInsight/intentions/PyBaseConvertCollectionLiteralIntention.java b/python/src/com/jetbrains/python/codeInsight/intentions/PyBaseConvertCollectionLiteralIntention.java -index 0ac0409edf1f4..daa669af001e3 100644 ---- a/python/src/com/jetbrains/python/codeInsight/intentions/PyBaseConvertCollectionLiteralIntention.java -+++ b/python/src/com/jetbrains/python/codeInsight/intentions/PyBaseConvertCollectionLiteralIntention.java -@@ -98,16 +98,17 @@ public void invoke(@NotNull Project project, Editor editor, PsiFile file) throws - replacedElement = literal; - } - -+ final String innerText = stripLiteralBraces(replacedElement); - final PyElementGenerator elementGenerator = PyElementGenerator.getInstance(project); - final PyExpression newLiteral = elementGenerator.createExpressionFromText(LanguageLevel.forElement(file), -- myLeftBrace + stripLiteralBraces(literal) + myRightBrace); -+ myLeftBrace + innerText + myRightBrace); - replacedElement.replace(newLiteral); - } - - @NotNull -- private static String stripLiteralBraces(@NotNull PySequenceExpression literal) { -+ private static String stripLiteralBraces(@NotNull PsiElement literal) { - if (literal instanceof PyTupleExpression) { -- return literal.getText().trim(); -+ return literal.getText(); - } - - final PsiElement firstChild = literal.getFirstChild(); -@@ -130,7 +131,7 @@ private static String stripLiteralBraces(@NotNull PySequenceExpression literal) - contentEndOffset = replacedText.length(); - } - -- return literal.getText().substring(contentStartOffset, contentEndOffset).trim(); -+ return literal.getText().substring(contentStartOffset, contentEndOffset); - } - - @Nullable -diff --git a/python/testData/intentions/PyConvertCollectionLiteralIntentionTest/convertLiteralPreservesFormattingAndComments.py b/python/testData/intentions/PyConvertCollectionLiteralIntentionTest/convertLiteralPreservesFormattingAndComments.py -new file mode 100644 -index 0000000000000..0c688956c9b78 ---- /dev/null -+++ b/python/testData/intentions/PyConvertCollectionLiteralIntentionTest/convertLiteralPreservesFormattingAndComments.py -@@ -0,0 +1,4 @@ -+xs = ( -+ 1, 2, # comment 1 -+ 3 # comment 2 -+) -\ No newline at end of file -diff --git a/python/testData/intentions/PyConvertCollectionLiteralIntentionTest/convertLiteralPreservesFormattingAndComments_after.py b/python/testData/intentions/PyConvertCollectionLiteralIntentionTest/convertLiteralPreservesFormattingAndComments_after.py -new file mode 100644 -index 0000000000000..adccc398ae06a ---- /dev/null -+++ b/python/testData/intentions/PyConvertCollectionLiteralIntentionTest/convertLiteralPreservesFormattingAndComments_after.py -@@ -0,0 +1,4 @@ -+xs = [ -+ 1, 2, # comment 1 -+ 3 # comment 2 -+] -\ No newline at end of file -diff --git a/python/testSrc/com/jetbrains/python/intentions/PyConvertCollectionLiteralIntentionTest.java b/python/testSrc/com/jetbrains/python/intentions/PyConvertCollectionLiteralIntentionTest.java -index 0e50c05e8d1f3..7b10e2bd57cf1 100644 ---- a/python/testSrc/com/jetbrains/python/intentions/PyConvertCollectionLiteralIntentionTest.java -+++ b/python/testSrc/com/jetbrains/python/intentions/PyConvertCollectionLiteralIntentionTest.java -@@ -103,4 +103,9 @@ public void testConvertSetWithoutClosingBraceToTuple() { - public void testConvertSetToList() { - doIntentionTest(CONVERT_SET_TO_LIST); - } -+ -+ // PY-16335 -+ public void testConvertLiteralPreservesFormattingAndComments() { -+ doIntentionTest(CONVERT_TUPLE_TO_LIST); -+ } - }" -5dabaf626e0a3493889eadcbd5ebf73d4e145912,camel,CAMEL-1091 - Fix compilation issue on Java 1.5--git-svn-id: https://svn.apache.org/repos/asf/activemq/camel/trunk@718279 13f79535-47bb-0310-9956-ffa450edef68-,c,https://github.com/apache/camel,"diff --git a/components/camel-jetty/src/test/java/org/apache/camel/component/jetty/InterfacesTest.java b/components/camel-jetty/src/test/java/org/apache/camel/component/jetty/InterfacesTest.java -index b36e2faef83de..337ac66092c4a 100644 ---- a/components/camel-jetty/src/test/java/org/apache/camel/component/jetty/InterfacesTest.java -+++ b/components/camel-jetty/src/test/java/org/apache/camel/component/jetty/InterfacesTest.java -@@ -15,25 +15,27 @@ public class InterfacesTest extends ContextTestSupport { - - private String remoteInterfaceAddress; - -- public InterfacesTest() throws SocketException { -- // retirieve an address of some remote network interface -+ public InterfacesTest() throws IOException { -+ // Retrieve an address of some remote network interface - Enumeration interfaces = NetworkInterface.getNetworkInterfaces(); - - while(interfaces.hasMoreElements()) { - NetworkInterface interfaze = interfaces.nextElement(); -- if (!interfaze.isUp() || interfaze.isLoopback()) { -- continue; -- } - Enumeration addresses = interfaze.getInetAddresses(); -- if(addresses.hasMoreElements()) { -- remoteInterfaceAddress = addresses.nextElement().getHostAddress(); -+ if(addresses.hasMoreElements()) { -+ InetAddress nextAddress = addresses.nextElement(); -+ if (nextAddress.isLoopbackAddress() || nextAddress.isReachable(2000)) { -+ break; -+ } -+ remoteInterfaceAddress = nextAddress.getHostAddress(); - } - }; - - } - - public void testLocalInterfaceHandled() throws IOException, InterruptedException { -- getMockEndpoint(""mock:endpoint"").expectedMessageCount(3); -+ int expectedMessages = (remoteInterfaceAddress != null) ? 3 : 2; -+ getMockEndpoint(""mock:endpoint"").expectedMessageCount(expectedMessages); - - URL localUrl = new URL(""http://localhost:4567/testRoute""); - String localResponse = IOUtils.toString(localUrl.openStream()); -@@ -44,9 +46,11 @@ public void testLocalInterfaceHandled() throws IOException, InterruptedException - localResponse = IOUtils.toString(localUrl.openStream()); - assertEquals(""local-differentPort"", localResponse); - -- URL url = new URL(""http://"" + remoteInterfaceAddress + "":4567/testRoute""); -- String remoteResponse = IOUtils.toString(url.openStream()); -- assertEquals(""remote"", remoteResponse); -+ if (remoteInterfaceAddress != null) { -+ URL url = new URL(""http://"" + remoteInterfaceAddress + "":4567/testRoute""); -+ String remoteResponse = IOUtils.toString(url.openStream()); -+ assertEquals(""remote"", remoteResponse); -+ } - - assertMockEndpointsSatisfied(); - } -@@ -65,9 +69,11 @@ public void configure() throws Exception { - .setBody().constant(""local-differentPort"") - .to(""mock:endpoint""); - -- from(""jetty:http://"" + remoteInterfaceAddress + "":4567/testRoute"") -- .setBody().constant(""remote"") -- .to(""mock:endpoint""); -+ if (remoteInterfaceAddress != null) { -+ from(""jetty:http://"" + remoteInterfaceAddress + "":4567/testRoute"") -+ .setBody().constant(""remote"") -+ .to(""mock:endpoint""); -+ } - } - }; - }" -35c49ea2db324c527a45be9a219cc05f2a718950,Mylyn Reviews,"ANTLR based dsl implementation - -removed existing xtext based dsl and replaced with antlr impl. -",c,https://github.com/eclipse-mylyn/org.eclipse.mylyn.reviews,"diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/internal/ReviewTaskMapper.java b/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/internal/ReviewTaskMapper.java -index aad5c57d..75b03c6e 100644 ---- a/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/internal/ReviewTaskMapper.java -+++ b/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/internal/ReviewTaskMapper.java -@@ -23,104 +23,153 @@ - import org.eclipse.mylyn.reviews.tasks.core.PatchScopeItem; - import org.eclipse.mylyn.reviews.tasks.core.Rating; - import org.eclipse.mylyn.reviews.tasks.core.ResourceScopeItem; -+import org.eclipse.mylyn.reviews.tasks.core.ReviewResult; - import org.eclipse.mylyn.reviews.tasks.core.ReviewScope; - import org.eclipse.mylyn.reviews.tasks.core.TaskComment; --import org.eclipse.mylyn.reviews.tasks.dsl.parser.antlr.ReviewDslParser; --import org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.AttachmentSource; --import org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ChangedReviewScope; --import org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ChangesetDef; --import org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.PatchDef; --import org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ResourceDef; --import org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ResultEnum; --import org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ReviewDslFactory; --import org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ReviewResult; --import org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ReviewScopeItem; --import org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.Source; --import org.eclipse.xtext.parser.IParseResult; --import org.eclipse.xtext.parsetree.reconstr.Serializer; -+import org.eclipse.mylyn.reviews.tasks.dsl.IReviewDslMapper; -+import org.eclipse.mylyn.reviews.tasks.dsl.IReviewDslSerializer; -+import org.eclipse.mylyn.reviews.tasks.dsl.ReviewDslAttachmentScopeItem; -+import org.eclipse.mylyn.reviews.tasks.dsl.ReviewDslAttachmentScopeItem.Type; -+import org.eclipse.mylyn.reviews.tasks.dsl.ParseException; -+import org.eclipse.mylyn.reviews.tasks.dsl.ReviewDslChangesetScopeItem; -+import org.eclipse.mylyn.reviews.tasks.dsl.ReviewDslResult; -+import org.eclipse.mylyn.reviews.tasks.dsl.ReviewDslScope; -+import org.eclipse.mylyn.reviews.tasks.dsl.ReviewDslScopeItem; - - /** - * @author mattk - * - */ - public class ReviewTaskMapper implements IReviewMapper { -- private ReviewDslParser parser; -- private Serializer serializer; -+ private IReviewDslMapper parser; -+ private IReviewDslSerializer serializer; - -- public ReviewTaskMapper(ReviewDslParser parser, Serializer serializer) { -+ public ReviewTaskMapper(IReviewDslMapper parser, -+ IReviewDslSerializer serializer) { - this.parser = parser; - this.serializer = serializer; - } - -- private org.eclipse.mylyn.reviews.tasks.core.ReviewResult mapResult( -- org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ReviewResult parsed, -- TaskComment comment) { -- if (parsed == null) -- return null; -+ @Override -+ public ReviewScope mapTaskToScope(ITaskProperties properties) -+ throws CoreException { -+ Assert.isNotNull(properties); -+ try { -+ ReviewDslScope parsedReviewScope = parser -+ .parseReviewScope(properties.getDescription()); -+ ReviewScope originalScope = mapReviewScope(properties, -+ parsedReviewScope); -+ // FIXME changed review scope -+ // for (TaskComment comment : properties.getComments()) { -+ // if (properties.getReporter().equals(comment.getAuthor())) { -+ // ChangedReviewScope changedScope = -+ // parser.parseChangedReviewScope(comment.getText()); -+ // applyChangedScope(properties, originalScope, changedScope); -+ // } -+ // } -+ // } -+ return originalScope; -+ } catch (ParseException ex) { -+ // ignore -+ } -+ return null; - -- org.eclipse.mylyn.reviews.tasks.core.ReviewResult result = new org.eclipse.mylyn.reviews.tasks.core.ReviewResult(); -- result.setReviewer(comment.getAuthor()); -- result.setDate(comment.getDate()); -- result.setRating(mapRating(parsed.getResult())); -- result.setComment(parsed.getComment()); -- return result; - } - -- private Rating mapRating(ResultEnum result) { -- switch (result) { -+ @Override -+ public void mapScopeToTask(ReviewScope scope, ITaskProperties taskProperties) { -+ ReviewDslScope scope2 = mapScope(scope); -+ -+ taskProperties.setDescription(serializer.serialize(scope2)); -+ } -+ -+ @Override -+ public void mapResultToTask( -+ org.eclipse.mylyn.reviews.tasks.core.ReviewResult res, -+ ITaskProperties taskProperties) { -+ ReviewDslResult result = new ReviewDslResult(); -+ ReviewDslResult.Rating rating = ReviewDslResult.Rating.WARNING; -+ switch (res.getRating()) { -+ case FAIL: -+ rating = ReviewDslResult.Rating.FAILED; -+ break; - case PASSED: -- return Rating.PASSED; -- case FAILED: -- return Rating.FAIL; -- case WARNING: -- return Rating.WARNING; -+ rating = ReviewDslResult.Rating.PASSED; -+ break; - case TODO: -- return Rating.TODO; -+ rating = ReviewDslResult.Rating.TODO; -+ break; -+ case WARNING: -+ rating = ReviewDslResult.Rating.WARNING; -+ break; - } -- throw new IllegalArgumentException(); -+ result.setRating(rating); -+ result.setComment(res.getComment()); -+ -+ String resultAsText = serializer.serialize(result); -+ taskProperties.setNewCommentText(resultAsText); - } - - @Override -- public ReviewScope mapTaskToScope(ITaskProperties properties) -- throws CoreException { -- Assert.isNotNull(properties); -- IParseResult parsed = parser.doParse(properties.getDescription()); -+ public org.eclipse.mylyn.reviews.tasks.core.ReviewResult mapCurrentReviewResult( -+ ITaskProperties taskProperties) { -+ Assert.isNotNull(taskProperties); -+ if (taskProperties.getNewCommentText() == null) -+ return null; -+ ReviewResult result = null; -+ try { -+ ReviewDslResult res = parser.parseReviewResult(taskProperties -+ .getNewCommentText()); -+ result = new ReviewResult(); -+ if (res == null) -+ return null; -+ result.setComment(res.getComment()); -+ result.setRating(mapRating(res.getRating())); -+ // FIXME filecomment, linecomment -+ // FIXME author is current -+ // result.setReviewer() -+ // result.setDate() -+ } catch (ParseException ex) { -+ /* ignore */ -+ } -+ return result; -+ } - -- org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ReviewScope scope = (org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ReviewScope) parsed -- .getRootASTElement(); -- ReviewScope originalScope = mapReviewScope(properties, scope); -- for (TaskComment comment : properties.getComments()) { -- if (properties.getReporter().equals(comment.getAuthor())) { -- parsed = parser.doParse(comment.getText()); -- if (parsed.getRootASTElement() instanceof ChangedReviewScope) { -- ChangedReviewScope changedScope = (ChangedReviewScope) parsed -- .getRootASTElement(); -- applyChangedScope(properties, originalScope, changedScope); -+ @Override -+ public List mapTaskToResults(ITaskProperties taskProperties) { -+ List results = new ArrayList(); -+ for (TaskComment comment : taskProperties.getComments()) { -+ try { -+ ReviewDslResult parsed = parser.parseReviewResult(comment -+ .getText()); -+ if (parsed != null) { -+ results.add(mapResult(parsed, comment)); - } -+ } catch (ParseException ex) { -+ // ignore - } - } -- return originalScope; -+ return results; - } - -- private void applyChangedScope(ITaskProperties properties, -- ReviewScope originalScope, ChangedReviewScope changedScope) -- throws CoreException { -- for (ReviewScopeItem scope : changedScope.getScope()) { -- IReviewScopeItem item = mapReviewScopeItem(properties, scope); -- originalScope.addScope(item); -- } -- } -+ // FIXME Changed Review scope -+ // private void applyChangedScope(ITaskProperties properties, -+ // ReviewScope originalScope, ChangedReviewScope changedScope) -+ // throws CoreException { -+ // for (ReviewScopeItem scope : changedScope.getScope()) { -+ // IReviewScopeItem item = mapReviewScopeItem(properties, scope); -+ // originalScope.addScope(item); -+ // } -+ // } - - private ReviewScope mapReviewScope(ITaskProperties properties, -- org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ReviewScope scope) -- throws CoreException { -+ ReviewDslScope scope) throws CoreException { - if (scope == null) - return null; - - ReviewScope mappedScope = new ReviewScope(); - mappedScope.setCreator(properties.getReporter()); -- for (org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ReviewScopeItem s : scope -- .getScope()) { -+ for (ReviewDslScopeItem s : scope.getItems()) { - IReviewScopeItem item = mapReviewScopeItem(properties, s); - if (item != null) { - mappedScope.addScope(item); -@@ -130,174 +179,93 @@ private ReviewScope mapReviewScope(ITaskProperties properties, - } - - private IReviewScopeItem mapReviewScopeItem(ITaskProperties properties, -- org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ReviewScopeItem s) -- throws CoreException { -+ ReviewDslScopeItem s) throws CoreException { - IReviewScopeItem item = null; -- if (s instanceof PatchDef) { -- item = mapPatchDef(properties, (PatchDef) s); -- } else if (s instanceof ResourceDef) { -- ResourceDef res = (ResourceDef) s; -- item = mapResourceDef(properties, res); -- } else if (s instanceof ChangesetDef) { -- ChangesetDef res = (ChangesetDef) s; -- item = mapChangesetDef(properties, res); -+ if (s instanceof ReviewDslAttachmentScopeItem) { -+ item = mapPatchDef(properties, (ReviewDslAttachmentScopeItem) s); -+ } else if (s instanceof ReviewDslChangesetScopeItem) { -+ item = mapChangesetDef(properties, (ReviewDslChangesetScopeItem) s); - } - return item; - } - - private ChangesetScopeItem mapChangesetDef(ITaskProperties properties, -- ChangesetDef cs) throws CoreException { -- return new ChangesetScopeItem(cs.getRevision(), cs.getUrl()); -+ ReviewDslChangesetScopeItem cs) throws CoreException { -+ return new ChangesetScopeItem(cs.getRevision(), cs.getRepoUrl()); - } - -- private ResourceScopeItem mapResourceDef(ITaskProperties properties, -- ResourceDef res) throws CoreException { -- Source source = res.getSource(); -- Attachment att = null; -- if (source instanceof AttachmentSource) { -- att = parseAttachmenSource(properties, source); -- } -- return new ResourceScopeItem(att); -- } -+ private IReviewScopeItem mapPatchDef(ITaskProperties properties, -+ ReviewDslAttachmentScopeItem scopeItem) throws CoreException { - -- private PatchScopeItem mapPatchDef(ITaskProperties properties, -- PatchDef patch) throws CoreException { -- Source source = patch.getSource(); -- Attachment att = null; -- if (source instanceof AttachmentSource) { -- att = parseAttachmenSource(properties, source); -+ Attachment att = ReviewsUtil.findAttachment(scopeItem.getFileName(), -+ scopeItem.getAuthor(), scopeItem.getCreatedDate(), -+ properties.loadFor(scopeItem.getTaskId())); -+ if (scopeItem.getType() == Type.PATCH) { -+ return new PatchScopeItem(att); -+ } else { -+ return new ResourceScopeItem(att); - } -- return new PatchScopeItem(att); - } - -- private Attachment parseAttachmenSource(ITaskProperties properties, -- Source source) throws CoreException { -- AttachmentSource attachment = (AttachmentSource) source; -+ private ReviewResult mapResult(ReviewDslResult parsed, TaskComment comment) { -+ if (parsed == null) -+ return null; - -- Attachment att = ReviewsUtil.findAttachment(attachment.getFilename(), -- attachment.getAuthor(), attachment.getCreatedDate(), -- properties.loadFor(attachment.getTaskId())); -- return att; -+ ReviewResult result = new ReviewResult(); -+ result.setReviewer(comment.getAuthor()); -+ result.setDate(comment.getDate()); -+ result.setRating(mapRating(parsed.getRating())); -+ result.setComment(parsed.getComment()); -+ return result; - } - -- @Override -- public void mapScopeToTask(ReviewScope scope, ITaskProperties taskProperties) { -- org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ReviewScope scope2 = mapScope(scope); -- -- taskProperties.setDescription(serializer.serialize(scope2)); -+ private Rating mapRating(ReviewDslResult.Rating result) { -+ switch (result) { -+ case PASSED: -+ return Rating.PASSED; -+ case FAILED: -+ return Rating.FAIL; -+ case WARNING: -+ return Rating.WARNING; -+ case TODO: -+ return Rating.TODO; -+ } -+ throw new IllegalArgumentException(); - } - -- private org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ReviewScope mapScope( -- ReviewScope scope) { -- org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ReviewScope scope2 = ReviewDslFactory.eINSTANCE -- .createReviewScope(); -+ private ReviewDslScope mapScope(ReviewScope scope) { -+ ReviewDslScope scope2 = new ReviewDslScope(); -+ - for (IReviewScopeItem item : scope.getItems()) { -- scope2.getScope().add(mapScopeItem(item)); -+ scope2.getItems().add(mapScopeItem(item)); - } - return scope2; - } - -- private org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ReviewScopeItem mapScopeItem( -- IReviewScopeItem item) { -+ private ReviewDslScopeItem mapScopeItem(IReviewScopeItem item) { - if (item instanceof PatchScopeItem) { - PatchScopeItem patchItem = (PatchScopeItem) item; -- PatchDef patch = ReviewDslFactory.eINSTANCE.createPatchDef(); -- Attachment attachment = patchItem.getAttachment(); -- AttachmentSource source = mapAttachment(attachment); -- patch.setSource(source); -- -- return patch; -+ return createAttachmentScopeItem(Type.PATCH, -+ patchItem.getAttachment()); - } else if (item instanceof ResourceScopeItem) { - ResourceScopeItem resourceItem = (ResourceScopeItem) item; -- ResourceDef resource = ReviewDslFactory.eINSTANCE -- .createResourceDef(); -- Attachment attachment = resourceItem.getAttachment(); -- AttachmentSource source = mapAttachment(attachment); -- resource.setSource(source); -- return resource; -+ return createAttachmentScopeItem(Type.RESOURCE, -+ resourceItem.getAttachment()); - } else if (item instanceof ChangesetScopeItem) { - ChangesetScopeItem changesetItem = (ChangesetScopeItem) item; -- ChangesetDef changeset = ReviewDslFactory.eINSTANCE -- .createChangesetDef(); -+ ReviewDslChangesetScopeItem changeset = new ReviewDslChangesetScopeItem(); - changeset.setRevision(changesetItem.getRevisionId()); -- changeset.setUrl(changesetItem.getRepositoryUrl()); -+ changeset.setRepoUrl(changesetItem.getRepositoryUrl()); - return changeset; - } - return null; - } - -- private AttachmentSource mapAttachment(Attachment attachment) { -- AttachmentSource source = ReviewDslFactory.eINSTANCE -- .createAttachmentSource(); -- source.setAuthor(attachment.getAuthor()); -- source.setCreatedDate(attachment.getDate()); -- source.setFilename(attachment.getFileName()); -- source.setTaskId(attachment.getTask().getTaskId()); -- return source; -- } -- -- @Override -- public void mapResultToTask( -- org.eclipse.mylyn.reviews.tasks.core.ReviewResult res, -- ITaskProperties taskProperties) { -- ReviewResult result = ReviewDslFactory.eINSTANCE.createReviewResult(); -- ResultEnum rating = ResultEnum.WARNING; -- switch (res.getRating()) { -- case FAIL: -- rating = ResultEnum.FAILED; -- break; -- case PASSED: -- rating = ResultEnum.PASSED; -- break; -- case TODO: -- rating = ResultEnum.TODO; -- break; -- case WARNING: -- rating = ResultEnum.WARNING; -- break; -- } -- result.setResult(rating); -- result.setComment(res.getComment()); -- -- String resultAsText = serializer.serialize(result); -- taskProperties.setNewCommentText(resultAsText); -- } -- -- @Override -- public org.eclipse.mylyn.reviews.tasks.core.ReviewResult mapCurrentReviewResult( -- ITaskProperties taskProperties) { -- Assert.isNotNull(taskProperties); -- if (taskProperties.getNewCommentText() == null) -- return null; -- IParseResult parsed = parser -- .doParse(taskProperties.getNewCommentText()); -- -- org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ReviewResult res = (org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ReviewResult) parsed -- .getRootASTElement(); -- org.eclipse.mylyn.reviews.tasks.core.ReviewResult result = new org.eclipse.mylyn.reviews.tasks.core.ReviewResult(); -- if (res == null) -- return null; -- result.setComment(res.getComment()); -- result.setRating(mapRating(res.getResult())); -- // FIXME author is current -- // result.setReviewer() -- // result.setDate() -- return result; -- } -- -- @Override -- public List mapTaskToResults( -- ITaskProperties taskProperties) { -- List results = new ArrayList(); -- for (TaskComment comment : taskProperties.getComments()) { -- IParseResult parsed = parser.doParse(comment.getText()); -- if (parsed.getRootASTElement() != null) { -- results.add(mapResult( -- (org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ReviewResult) parsed -- .getRootASTElement(), comment)); -- } -- } -- return results; -+ private ReviewDslAttachmentScopeItem createAttachmentScopeItem(Type type, -+ Attachment attachment) { -+ return new ReviewDslAttachmentScopeItem(type, attachment.getFileName(), -+ attachment.getAuthor(), attachment.getDate(), attachment -+ .getTask().getTaskId()); - } - - } -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/internal/ReviewsUtil.java b/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/internal/ReviewsUtil.java -index 5907e378..1cb3bcc8 100644 ---- a/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/internal/ReviewsUtil.java -+++ b/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/internal/ReviewsUtil.java -@@ -23,14 +23,11 @@ - import org.eclipse.mylyn.reviews.tasks.core.patch.GitPatchPathFindingStrategy; - import org.eclipse.mylyn.reviews.tasks.core.patch.ITargetPathStrategy; - import org.eclipse.mylyn.reviews.tasks.core.patch.SimplePathFindingStrategy; --import org.eclipse.mylyn.reviews.tasks.dsl.ReviewDslStandaloneSetup; --import org.eclipse.mylyn.reviews.tasks.dsl.parser.antlr.ReviewDslParser; -+import org.eclipse.mylyn.reviews.tasks.dsl.internal.ReviewDslMapper; -+import org.eclipse.mylyn.reviews.tasks.dsl.internal.ReviewDslSerializer; - import org.eclipse.mylyn.tasks.core.ITask; - import org.eclipse.mylyn.tasks.core.ITaskContainer; - import org.eclipse.mylyn.tasks.core.data.ITaskDataManager; --import org.eclipse.xtext.parsetree.reconstr.Serializer; -- --import com.google.inject.Injector; - - /** - * @author Kilian Matt -@@ -91,14 +88,9 @@ public static Attachment findAttachment(String filename, String author, - return null; - } - -- public static ReviewTaskMapper createMapper() { -- Injector createInjectorAndDoEMFRegistration = new ReviewDslStandaloneSetup() -- .createInjectorAndDoEMFRegistration(); -- ReviewDslParser parser = createInjectorAndDoEMFRegistration -- .getInstance(ReviewDslParser.class); -- Serializer serializer = createInjectorAndDoEMFRegistration -- .getInstance(Serializer.class); -- return new ReviewTaskMapper(parser, serializer); -+ public static IReviewMapper createMapper() { -+ return new ReviewTaskMapper(new ReviewDslMapper(), -+ new ReviewDslSerializer()); - } - - } -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/.classpath b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/.classpath -index 7e8449de..304e8618 100644 ---- a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/.classpath -+++ b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/.classpath -@@ -1,7 +1,6 @@ - - - -- - - - -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/META-INF/MANIFEST.MF b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/META-INF/MANIFEST.MF -index de4ab427..e28d532f 100644 ---- a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/META-INF/MANIFEST.MF -+++ b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/META-INF/MANIFEST.MF -@@ -5,27 +5,9 @@ Bundle-Vendor: Eclipse Mylyn - Bundle-Version: 0.7.0.qualifier - Bundle-SymbolicName: org.eclipse.mylyn.reviews.tasks.dsl;singleton:=true - Bundle-ActivationPolicy: lazy --Require-Bundle: org.eclipse.xtext, -- org.eclipse.xtext.generator;resolution:=optional, -- org.apache.commons.logging;resolution:=optional, -- org.eclipse.emf.codegen.ecore;resolution:=optional, -- org.eclipse.emf.mwe.utils;resolution:=optional, -- org.eclipse.emf.mwe2.launch;resolution:=optional, -- com.ibm.icu;resolution:=optional, -- org.eclipse.xtext.xtend;resolution:=optional, -- org.eclipse.xtext.util, -- org.eclipse.emf.ecore, -- org.eclipse.emf.common, -- org.antlr.runtime -+Require-Bundle: org.antlr.runtime;bundle-version=""3.0.0"" - Import-Package: org.apache.log4j - Bundle-RequiredExecutionEnvironment: J2SE-1.5 - Export-Package: org.eclipse.mylyn.reviews.tasks.dsl, -- org.eclipse.mylyn.reviews.tasks.dsl.parseTreeConstruction, -- org.eclipse.mylyn.reviews.tasks.dsl.parser.antlr, -- org.eclipse.mylyn.reviews.tasks.dsl.parser.antlr.internal, -- org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl, -- org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.impl, -- org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.util, -- org.eclipse.mylyn.reviews.tasks.dsl.services, -- org.eclipse.mylyn.reviews.tasks.dsl.validation -+ org.eclipse.mylyn.reviews.tasks.dsl.internal - -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/plugin.xml b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/plugin.xml -index 8881d7bf..20bb679e 100644 ---- a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/plugin.xml -+++ b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/plugin.xml -@@ -3,13 +3,6 @@ - - - -- -- -- -- - - - -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/plugin.xml_gen b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/plugin.xml_gen -deleted file mode 100644 -index 8881d7bf..00000000 ---- a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/plugin.xml_gen -+++ /dev/null -@@ -1,18 +0,0 @@ -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/pom.xml b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/pom.xml -index 0eaa057b..a652c89f 100644 ---- a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/pom.xml -+++ b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/pom.xml -@@ -17,14 +17,6 @@ - target/classes - - -- -- org.fornax.toolsupport -- fornax-oaw-m2-plugin -- -- mwe2 -- ${project.basedir}/src/org/eclipse/mylyn/reviews/tasks/dsl/GenerateReviewDsl.mwe2 -- -- - - org.sonatype.tycho - maven-osgi-source-plugin -@@ -38,41 +30,5 @@ - maven-pmd-plugin - - -- -- -- -- org.fornax.toolsupport -- fornax-oaw-m2-plugin -- 3.2.0-SNAPSHOT -- -- mwe2 -- -- -- -- generate-sources -- -- run-workflow -- -- -- -- -- -- - -- -- -- fornax-snapshots -- http://fornax.itemis.de/nexus/content/repositories/snapshots -- -- false -- -- -- true -- -- -- -- fornax-releases -- http://fornax.itemis.de/nexus/content/repositories/releases/ -- -- - -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/GenerateReviewDsl.mwe2 b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/GenerateReviewDsl.mwe2 -deleted file mode 100644 -index 06d08ac7..00000000 ---- a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/GenerateReviewDsl.mwe2 -+++ /dev/null -@@ -1,100 +0,0 @@ --module org.eclipse.mylyn.reviews.tasks.dsl.ReviewDsl -- --import org.eclipse.emf.mwe.utils.* --import org.eclipse.xtext.generator.* --import org.eclipse.xtext.ui.generator.* -- --var grammarURI = ""classpath:/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDsl.xtext"" --var file.extensions = ""review-dsl"" --var projectName = ""org.eclipse.mylyn.reviews.tasks.dsl"" --var runtimeProject = ""../${projectName}"" -- --Workflow { -- bean = StandaloneSetup { -- platformUri = ""${runtimeProject}/.."" -- } -- -- component = DirectoryCleaner { -- directory = ""${runtimeProject}/src-gen"" -- } -- -- component = DirectoryCleaner { -- directory = ""${runtimeProject}.ui/src-gen"" -- } -- -- component = Generator { -- pathRtProject = runtimeProject -- pathUiProject = ""${runtimeProject}.ui"" -- projectNameRt = projectName -- projectNameUi = ""${projectName}.ui"" -- language = { -- uri = grammarURI -- fileExtensions = file.extensions -- -- // Java API to access grammar elements (required by several other fragments) -- fragment = grammarAccess.GrammarAccessFragment {} -- -- // generates Java API for the generated EPackages -- fragment = ecore.EcoreGeneratorFragment { -- // referencedGenModels = ""uri to genmodel, uri to next genmodel"" -- } -- -- // the serialization component -- fragment = parseTreeConstructor.ParseTreeConstructorFragment {} -- -- // a custom ResourceFactory for use with EMF -- fragment = resourceFactory.ResourceFactoryFragment { -- fileExtensions = file.extensions -- } -- -- // The antlr parser generator fragment. -- fragment = parser.antlr.XtextAntlrGeneratorFragment { -- // options = { -- // backtrack = true -- // } -- } -- -- // java-based API for validation -- fragment = validation.JavaValidatorFragment { -- composedCheck = ""org.eclipse.xtext.validation.ImportUriValidator"" -- composedCheck = ""org.eclipse.xtext.validation.NamesAreUniqueValidator"" -- // registerForImportedPackages = true -- } -- -- // scoping and exporting API -- // fragment = scoping.ImportURIScopingFragment {} -- // fragment = exporting.SimpleNamesFragment {} -- -- // scoping and exporting API -- fragment = scoping.ImportNamespacesScopingFragment {} -- fragment = exporting.QualifiedNamesFragment {} -- fragment = builder.BuilderIntegrationFragment {} -- -- // formatter API -- fragment = formatting.FormatterFragment {} -- -- // labeling API -- fragment = labeling.LabelProviderFragment {} -- -- // outline API -- fragment = outline.TransformerFragment {} -- fragment = outline.OutlineNodeAdapterFactoryFragment {} -- fragment = outline.QuickOutlineFragment {} -- -- // quickfix API -- fragment = quickfix.QuickfixProviderFragment {} -- -- // content assist API -- fragment = contentAssist.JavaBasedContentAssistFragment {} -- -- // generates a more lightweight Antlr parser and lexer tailored for content assist -- fragment = parser.antlr.XtextAntlrUiGeneratorFragment {} -- -- // project wizard (optional) -- // fragment = projectWizard.SimpleProjectWizardFragment { -- // generatorProjectName = ""${projectName}.generator"" -- // modelFileExtension = file.extensions -- // } -- } -- } --} -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/IReviewDslMapper.java b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/IReviewDslMapper.java -new file mode 100644 -index 00000000..6d2de436 ---- /dev/null -+++ b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/IReviewDslMapper.java -@@ -0,0 +1,29 @@ -+/******************************************************************************* -+ * Copyright (c) 2011 Research Group for Industrial Software (INSO), Vienna University of Technology. -+ * All rights reserved. This program and the accompanying materials -+ * are made available under the terms of the Eclipse Public License v1.0 -+ * which accompanies this distribution, and is available at -+ * http://www.eclipse.org/legal/epl-v10.html -+ * -+ * Contributors: -+ * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation -+ *******************************************************************************/ -+ -+package org.eclipse.mylyn.reviews.tasks.dsl; -+ -+/** -+ * -+ * @author mattk -+ * -+ */ -+public interface IReviewDslMapper { -+ -+ public abstract ReviewDslResult parseReviewResult(String text) -+ throws ParseException; -+ -+ public abstract ReviewDslScope parseReviewScope(String text) -+ throws ParseException; -+ -+ public abstract ReviewDslResult parseChangedReviewScope(String text); -+ -+} -\ No newline at end of file -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/IReviewDslSerializer.java b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/IReviewDslSerializer.java -new file mode 100644 -index 00000000..a014d272 ---- /dev/null -+++ b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/IReviewDslSerializer.java -@@ -0,0 +1,25 @@ -+/******************************************************************************* -+ * Copyright (c) 2011 Research Group for Industrial Software (INSO), Vienna University of Technology. -+ * All rights reserved. This program and the accompanying materials -+ * are made available under the terms of the Eclipse Public License v1.0 -+ * which accompanies this distribution, and is available at -+ * http://www.eclipse.org/legal/epl-v10.html -+ * -+ * Contributors: -+ * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation -+ *******************************************************************************/ -+ -+package org.eclipse.mylyn.reviews.tasks.dsl; -+ -+/** -+ * -+ * @author mattk -+ * -+ */ -+public interface IReviewDslSerializer { -+ -+ public abstract String serialize(ReviewDslScope scope); -+ -+ public abstract String serialize(ReviewDslResult result); -+ -+} -\ No newline at end of file -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ParseException.java b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ParseException.java -new file mode 100644 -index 00000000..5dc74dea ---- /dev/null -+++ b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ParseException.java -@@ -0,0 +1,27 @@ -+/******************************************************************************* -+ * Copyright (c) 2011 Research Group for Industrial Software (INSO), Vienna University of Technology. -+ * All rights reserved. This program and the accompanying materials -+ * are made available under the terms of the Eclipse Public License v1.0 -+ * which accompanies this distribution, and is available at -+ * http://www.eclipse.org/legal/epl-v10.html -+ * -+ * Contributors: -+ * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation -+ *******************************************************************************/ -+ -+package org.eclipse.mylyn.reviews.tasks.dsl; -+ -+/** -+ * -+ * @author mattk -+ * -+ */ -+public class ParseException extends Exception { -+ -+ private static final long serialVersionUID = -7998527695103083639L; -+ -+ public ParseException(String message) { -+ super(message); -+ } -+ -+} -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDsl.xtext b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDsl.xtext -deleted file mode 100644 -index ea536461..00000000 ---- a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDsl.xtext -+++ /dev/null -@@ -1,55 +0,0 @@ --grammar org.eclipse.mylyn.reviews.tasks.dsl.ReviewDsl with org.eclipse.xtext.common.Terminals -- --generate reviewDsl ""http://www.eclipse.org/mylyn/reviews/tasks/dsl/ReviewDsl"" -- --Model: -- ReviewResult | ReviewScope | ChangedReviewScope; -- --ReviewResult: -- ""Review result:"" result=ResultEnum -- (""Comment:"" comment=STRING)? (filecomments+=FileComment)*; -- --enum ResultEnum: -- passed=""PASSED"" | failed=""FAILED"" -- | todo=""TODO"" | warning=""WARNING""; -- --FileComment: -- ""File"" path=STRING "":"" comment=STRING? -- linecomments+=LineComment*; -- --LineComment: -- ""Line"" start=INT (""-"" end=INT)? "":"" comment=STRING; -- --ReviewScope: -- {ReviewScope} ""Review scope:"" -- (scope+=ReviewScopeItem)*; -- --ChangedReviewScope: -- ""Updated review scope:"" -- //(refines"" (refineOriginal?=""original scope""| (""scope from comment #"" refineComment=INT)) "":"" -- (scope+=ReviewScopeItem)+; -- --ChangesetDef: -- (""Changeset"" revision=STRING ""from"" url=STRING); -- --ReviewScopeItem: -- ResourceDef | PatchDef | ChangesetDef; -- --ResourceDef: -- (""Resource"" source=Source); -- --PatchDef: -- (""Patch"" source=Source); -- --Source: -- AttachmentSource; -- --AttachmentSource: -- ""from Attachment"" filename=STRING -- ""by"" author=STRING -- ""on"" createdDate=STRING -- ""of task"" taskId=TASK_ID; -- --terminal TASK_ID: -- ('a'..'z' | 'A'..'Z' | '_' | '-' | '0'..'9')*; -- -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDslAttachmentScopeItem.java b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDslAttachmentScopeItem.java -new file mode 100644 -index 00000000..044f346c ---- /dev/null -+++ b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDslAttachmentScopeItem.java -@@ -0,0 +1,74 @@ -+/******************************************************************************* -+ * Copyright (c) 2011 Research Group for Industrial Software (INSO), Vienna University of Technology. -+ * All rights reserved. This program and the accompanying materials -+ * are made available under the terms of the Eclipse Public License v1.0 -+ * which accompanies this distribution, and is available at -+ * http://www.eclipse.org/legal/epl-v10.html -+ * -+ * Contributors: -+ * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation -+ *******************************************************************************/ -+package org.eclipse.mylyn.reviews.tasks.dsl; -+ -+/** -+ * -+ * @author mattk -+ * -+ */ -+public class ReviewDslAttachmentScopeItem extends ReviewDslScopeItem { -+ -+ public enum Type { -+ PATCH, RESOURCE -+ } -+ -+ private Type type; -+ private String fileName; -+ private String author; -+ private String createdDate; -+ private String taskId; -+ -+ public ReviewDslAttachmentScopeItem(Type type, String fileName, -+ String author, String createdDate, String taskId) { -+ super(); -+ this.type = type; -+ this.fileName = fileName; -+ this.author = author; -+ this.createdDate = createdDate; -+ this.taskId = taskId; -+ } -+ -+ public Type getType() { -+ return type; -+ } -+ -+ public String getFileName() { -+ return fileName; -+ } -+ -+ public String getAuthor() { -+ return author; -+ } -+ -+ public String getCreatedDate() { -+ return createdDate; -+ } -+ -+ public String getTaskId() { -+ return taskId; -+ } -+ -+ @Override -+ public StringBuilder serialize(StringBuilder sb) { -+ sb.append(type == Type.PATCH ? ""Patch"" : ""Resource""); -+ sb.append(""from Attachment \""""); -+ sb.append(fileName); -+ sb.append(""\"" by \""""); -+ sb.append(author); -+ sb.append(""\"" on \""""); -+ sb.append(createdDate); -+ sb.append(""\"" of task ""); -+ sb.append(taskId); -+ return sb; -+ } -+ -+} -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDslChangesetScopeItem.java b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDslChangesetScopeItem.java -new file mode 100644 -index 00000000..bfb5ff27 ---- /dev/null -+++ b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDslChangesetScopeItem.java -@@ -0,0 +1,50 @@ -+/******************************************************************************* -+ * Copyright (c) 2011 Research Group for Industrial Software (INSO), Vienna University of Technology. -+ * All rights reserved. This program and the accompanying materials -+ * are made available under the terms of the Eclipse Public License v1.0 -+ * which accompanies this distribution, and is available at -+ * http://www.eclipse.org/legal/epl-v10.html -+ * -+ * Contributors: -+ * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation -+ *******************************************************************************/ -+ -+package org.eclipse.mylyn.reviews.tasks.dsl; -+ -+/** -+ * -+ * @author mattk -+ * -+ */ -+public class ReviewDslChangesetScopeItem extends ReviewDslScopeItem { -+ -+ private String revision; -+ private String repoUrl; -+ -+ public String getRevision() { -+ return revision; -+ } -+ -+ public void setRevision(String revision) { -+ this.revision = revision; -+ } -+ -+ public String getRepoUrl() { -+ return repoUrl; -+ } -+ -+ public void setRepoUrl(String repoUrl) { -+ this.repoUrl = repoUrl; -+ } -+ -+ @Override -+ public StringBuilder serialize(StringBuilder sb) { -+ sb.append(""Changeset \""""); -+ sb.append(revision); -+ sb.append(""\"" from \""""); -+ sb.append(repoUrl); -+ sb.append(""\""""); -+ return sb; -+ } -+ -+} -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDslResult.java b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDslResult.java -new file mode 100644 -index 00000000..ee0063d8 ---- /dev/null -+++ b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDslResult.java -@@ -0,0 +1,153 @@ -+/******************************************************************************* -+ * Copyright (c) 2011 Research Group for Industrial Software (INSO), Vienna University of Technology. -+ * All rights reserved. This program and the accompanying materials -+ * are made available under the terms of the Eclipse Public License v1.0 -+ * which accompanies this distribution, and is available at -+ * http://www.eclipse.org/legal/epl-v10.html -+ * -+ * Contributors: -+ * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation -+ *******************************************************************************/ -+package org.eclipse.mylyn.reviews.tasks.dsl; -+ -+import java.util.ArrayList; -+import java.util.List; -+ -+/** -+ * -+ * @author mattk -+ * -+ */ -+public class ReviewDslResult { -+ public enum Rating { -+ PASSED, FAILED, WARNING, TODO -+ } -+ -+ public static class FileComment { -+ -+ private String fileName; -+ private String comment; -+ private List lineComments = new ArrayList(); -+ -+ public String getFileName() { -+ return fileName; -+ } -+ -+ public String getComment() { -+ return comment; -+ } -+ -+ public void setFileName(String path) { -+ this.fileName = path; -+ } -+ -+ public void setComment(String comment) { -+ this.comment = comment; -+ } -+ -+ public List getLineComments() { -+ return lineComments; -+ } -+ -+ public StringBuilder serialize(StringBuilder sb) { -+ sb.append(""File \""""); -+ sb.append(fileName); -+ sb.append(""\""""); -+ if (comment != null) { -+ sb.append("" \""""); -+ sb.append(comment); -+ sb.append(""\""""); -+ } -+ for (LineComment c : lineComments) { -+ sb.append(""\n""); -+ c.serialize(sb); -+ } -+ return sb; -+ } -+ -+ } -+ -+ public static class LineComment { -+ private int begin; -+ private int end; -+ private String comment; -+ -+ public int getBegin() { -+ return begin; -+ } -+ -+ public StringBuilder serialize(StringBuilder sb) { -+ sb.append(""Line ""); -+ sb.append(begin); -+ if (begin != end) { -+ sb.append("" - ""); -+ sb.append(end); -+ } -+ sb.append("": \""""); -+ sb.append(comment); -+ sb.append(""\""""); -+ return sb; -+ } -+ -+ public void setBegin(int begin) { -+ this.begin = begin; -+ } -+ -+ public int getEnd() { -+ return end; -+ } -+ -+ public void setEnd(int end) { -+ this.end = end; -+ } -+ -+ public String getComment() { -+ return comment; -+ } -+ -+ public void setComment(String comment) { -+ this.comment = comment; -+ } -+ -+ } -+ -+ private Rating rating; -+ private String comment; -+ private List fileComments = new ArrayList(); -+ -+ public Rating getRating() { -+ return rating; -+ } -+ -+ public String getComment() { -+ return comment; -+ } -+ -+ public void setRating(Rating rating) { -+ this.rating = rating; -+ } -+ -+ public void setComment(String comment) { -+ this.comment = comment; -+ } -+ -+ public List getFileComments() { -+ return fileComments; -+ } -+ -+ public StringBuilder serialize(StringBuilder sb) { -+ sb.append(""Review result: ""); -+ sb.append(rating.toString()); -+ if (comment != null) { -+ sb.append("" \""""); -+ sb.append(comment); -+ sb.append(""\""""); -+ } -+ for (FileComment c : fileComments) { -+ sb.append(""\n""); -+ c.serialize(sb); -+ } -+ return sb; -+ } -+ -+} -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDslRuntimeModule.java b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDslRuntimeModule.java -deleted file mode 100644 -index 7bf5ed9f..00000000 ---- a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDslRuntimeModule.java -+++ /dev/null -@@ -1,12 +0,0 @@ --/* -- * generated by Xtext -- */ --package org.eclipse.mylyn.reviews.tasks.dsl; -- -- --/** -- * Use this class to register components to be used at runtime / without the Equinox extension registry. -- */ --public class ReviewDslRuntimeModule extends org.eclipse.mylyn.reviews.tasks.dsl.AbstractReviewDslRuntimeModule { -- --} -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDslScope.java b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDslScope.java -new file mode 100644 -index 00000000..90b9859c ---- /dev/null -+++ b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDslScope.java -@@ -0,0 +1,40 @@ -+/******************************************************************************* -+ * Copyright (c) 2011 Research Group for Industrial Software (INSO), Vienna University of Technology. -+ * All rights reserved. This program and the accompanying materials -+ * are made available under the terms of the Eclipse Public License v1.0 -+ * which accompanies this distribution, and is available at -+ * http://www.eclipse.org/legal/epl-v10.html -+ * -+ * Contributors: -+ * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation -+ *******************************************************************************/ -+package org.eclipse.mylyn.reviews.tasks.dsl; -+ -+import java.util.ArrayList; -+import java.util.Collections; -+import java.util.List; -+ -+/** -+ * -+ * @author mattk -+ * -+ */ -+public class ReviewDslScope { -+ List items = new ArrayList(); -+ public void addItem(ReviewDslScopeItem item) { -+ items.add(item); -+ } -+ -+ public List getItems() { -+ return Collections.unmodifiableList(items); -+ } -+ -+ public StringBuilder serialize(StringBuilder sb) { -+ sb.append(""Review scope:""); -+ for(ReviewDslScopeItem item : items) { -+ item.serialize(sb); -+ sb.append(""\n""); -+ } -+ return sb; -+ } -+} -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDslScopeItem.java b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDslScopeItem.java -new file mode 100644 -index 00000000..fc1952b8 ---- /dev/null -+++ b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDslScopeItem.java -@@ -0,0 +1,22 @@ -+/******************************************************************************* -+ * Copyright (c) 2011 Research Group for Industrial Software (INSO), Vienna University of Technology. -+ * All rights reserved. This program and the accompanying materials -+ * are made available under the terms of the Eclipse Public License v1.0 -+ * which accompanies this distribution, and is available at -+ * http://www.eclipse.org/legal/epl-v10.html -+ * -+ * Contributors: -+ * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation -+ *******************************************************************************/ -+package org.eclipse.mylyn.reviews.tasks.dsl; -+ -+/** -+ * -+ * @author mattk -+ * -+ */ -+public abstract class ReviewDslScopeItem { -+ -+ public abstract StringBuilder serialize(StringBuilder sb) ; -+ -+} -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDslStandaloneSetup.java b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDslStandaloneSetup.java -deleted file mode 100644 -index e5caf580..00000000 ---- a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/ReviewDslStandaloneSetup.java -+++ /dev/null -@@ -1,16 +0,0 @@ -- --package org.eclipse.mylyn.reviews.tasks.dsl; -- --import org.eclipse.mylyn.reviews.tasks.dsl.ReviewDslStandaloneSetupGenerated; -- --/** -- * Initialization support for running Xtext languages -- * without equinox extension registry -- */ --public class ReviewDslStandaloneSetup extends ReviewDslStandaloneSetupGenerated{ -- -- public static void doSetup() { -- new ReviewDslStandaloneSetup().createInjectorAndDoEMFRegistration(); -- } --} -- -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/formatting/ReviewDslFormatter.java b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/formatting/ReviewDslFormatter.java -deleted file mode 100644 -index 35f59b67..00000000 ---- a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/formatting/ReviewDslFormatter.java -+++ /dev/null -@@ -1,33 +0,0 @@ --/* -- * generated by Xtext -- */ --package org.eclipse.mylyn.reviews.tasks.dsl.formatting; -- --import org.eclipse.mylyn.reviews.tasks.dsl.services.ReviewDslGrammarAccess; --import org.eclipse.xtext.formatting.impl.AbstractDeclarativeFormatter; --import org.eclipse.xtext.formatting.impl.FormattingConfig; -- --/** -- * This class contains custom formatting description. -- * -- * see : http://www.eclipse.org/Xtext/documentation/latest/xtext.html#formatting -- * on how and when to use it -- * -- * Also see {@link org.eclipse.xtext.xtext.XtextFormattingTokenSerializer} as an -- * example -- */ --public class ReviewDslFormatter extends AbstractDeclarativeFormatter { -- -- @Override -- protected void configureFormatting(FormattingConfig c) { -- ReviewDslGrammarAccess grammar = (ReviewDslGrammarAccess) getGrammarAccess(); -- c.setLinewrap().after(grammar.getModelRule()); -- c.setIndentationIncrement().before(grammar.getReviewScopeItemRule()); -- c.setIndentationDecrement().after(grammar.getReviewScopeItemRule()); -- c.setLinewrap().after(grammar.getReviewScopeItemRule()); -- c.setLinewrap().before( -- grammar.getReviewResultAccess().getCommentKeyword_2_0()); -- c.setLinewrap().after( -- grammar.getReviewResultAccess().getCommentAssignment_2_1()); -- } --} -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/internal/ReviewDslLexer.java b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/internal/ReviewDslLexer.java -new file mode 100644 -index 00000000..69cf5513 ---- /dev/null -+++ b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/internal/ReviewDslLexer.java -@@ -0,0 +1,2399 @@ -+/******************************************************************************* -+ * Copyright (c) 2011 Research Group for Industrial Software (INSO), Vienna University of Technology. -+ * All rights reserved. This program and the accompanying materials -+ * are made available under the terms of the Eclipse Public License v1.0 -+ * which accompanies this distribution, and is available at -+ * http://www.eclipse.org/legal/epl-v10.html -+ * -+ * Contributors: -+ * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation -+ *******************************************************************************/ -+package org.eclipse.mylyn.reviews.tasks.dsl.internal; -+ -+// $ANTLR 3.0 ReviewDsl.g 2011-03-06 18:30:25 -+ -+import org.antlr.runtime.CharStream; -+import org.antlr.runtime.EarlyExitException; -+import org.antlr.runtime.Lexer; -+import org.antlr.runtime.MismatchedSetException; -+import org.antlr.runtime.NoViableAltException; -+import org.antlr.runtime.RecognitionException; -+ -+/** -+ * -+ * @author mattk -+ * -+ */ -+public class ReviewDslLexer extends Lexer { -+ public static final int TASK_ID=6; -+ public static final int UNICODE_ESC=8; -+ public static final int OCTAL_ESC=9; -+ public static final int HEX_DIGIT=10; -+ public static final int T29=29; -+ public static final int INT=5; -+ public static final int T28=28; -+ public static final int T27=27; -+ public static final int T26=26; -+ public static final int T25=25; -+ public static final int Tokens=33; -+ public static final int T24=24; -+ public static final int EOF=-1; -+ public static final int T23=23; -+ public static final int T22=22; -+ public static final int T21=21; -+ public static final int T20=20; -+ public static final int ESC_SEQ=7; -+ public static final int WS=11; -+ public static final int T12=12; -+ public static final int T13=13; -+ public static final int T14=14; -+ public static final int T15=15; -+ public static final int T16=16; -+ public static final int T17=17; -+ public static final int T18=18; -+ public static final int T30=30; -+ public static final int T19=19; -+ public static final int T32=32; -+ public static final int STRING=4; -+ public static final int T31=31; -+ public ReviewDslLexer() {;} -+ public ReviewDslLexer(CharStream input) { -+ super(input); -+ } -+ public String getGrammarFileName() { return ""ReviewDsl.g""; } -+ -+ // $ANTLR start T12 -+ public final void mT12() throws RecognitionException { -+ try { -+ int _type = T12; -+ // ReviewDsl.g:3:7: ( 'Review' ) -+ // ReviewDsl.g:3:7: 'Review' -+ { -+ match(""Review""); -+ -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end T12 -+ -+ // $ANTLR start T13 -+ public final void mT13() throws RecognitionException { -+ try { -+ int _type = T13; -+ // ReviewDsl.g:4:7: ( 'result:' ) -+ // ReviewDsl.g:4:7: 'result:' -+ { -+ match(""result:""); -+ -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end T13 -+ -+ // $ANTLR start T14 -+ public final void mT14() throws RecognitionException { -+ try { -+ int _type = T14; -+ // ReviewDsl.g:5:7: ( 'Comment:' ) -+ // ReviewDsl.g:5:7: 'Comment:' -+ { -+ match(""Comment:""); -+ -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end T14 -+ -+ // $ANTLR start T15 -+ public final void mT15() throws RecognitionException { -+ try { -+ int _type = T15; -+ // ReviewDsl.g:6:7: ( 'PASSED' ) -+ // ReviewDsl.g:6:7: 'PASSED' -+ { -+ match(""PASSED""); -+ -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end T15 -+ -+ // $ANTLR start T16 -+ public final void mT16() throws RecognitionException { -+ try { -+ int _type = T16; -+ // ReviewDsl.g:7:7: ( 'WARNING' ) -+ // ReviewDsl.g:7:7: 'WARNING' -+ { -+ match(""WARNING""); -+ -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end T16 -+ -+ // $ANTLR start T17 -+ public final void mT17() throws RecognitionException { -+ try { -+ int _type = T17; -+ // ReviewDsl.g:8:7: ( 'FAILED' ) -+ // ReviewDsl.g:8:7: 'FAILED' -+ { -+ match(""FAILED""); -+ -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end T17 -+ -+ // $ANTLR start T18 -+ public final void mT18() throws RecognitionException { -+ try { -+ int _type = T18; -+ // ReviewDsl.g:9:7: ( 'TODO' ) -+ // ReviewDsl.g:9:7: 'TODO' -+ { -+ match(""TODO""); -+ -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end T18 -+ -+ // $ANTLR start T19 -+ public final void mT19() throws RecognitionException { -+ try { -+ int _type = T19; -+ // ReviewDsl.g:10:7: ( 'File' ) -+ // ReviewDsl.g:10:7: 'File' -+ { -+ match(""File""); -+ -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end T19 -+ -+ // $ANTLR start T20 -+ public final void mT20() throws RecognitionException { -+ try { -+ int _type = T20; -+ // ReviewDsl.g:11:7: ( ':' ) -+ // ReviewDsl.g:11:7: ':' -+ { -+ match(':'); -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end T20 -+ -+ // $ANTLR start T21 -+ public final void mT21() throws RecognitionException { -+ try { -+ int _type = T21; -+ // ReviewDsl.g:12:7: ( 'Line' ) -+ // ReviewDsl.g:12:7: 'Line' -+ { -+ match(""Line""); -+ -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end T21 -+ -+ // $ANTLR start T22 -+ public final void mT22() throws RecognitionException { -+ try { -+ int _type = T22; -+ // ReviewDsl.g:13:7: ( '-' ) -+ // ReviewDsl.g:13:7: '-' -+ { -+ match('-'); -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end T22 -+ -+ // $ANTLR start T23 -+ public final void mT23() throws RecognitionException { -+ try { -+ int _type = T23; -+ // ReviewDsl.g:14:7: ( 'scope:' ) -+ // ReviewDsl.g:14:7: 'scope:' -+ { -+ match(""scope:""); -+ -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end T23 -+ -+ // $ANTLR start T24 -+ public final void mT24() throws RecognitionException { -+ try { -+ int _type = T24; -+ // ReviewDsl.g:15:7: ( 'Resource' ) -+ // ReviewDsl.g:15:7: 'Resource' -+ { -+ match(""Resource""); -+ -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end T24 -+ -+ // $ANTLR start T25 -+ public final void mT25() throws RecognitionException { -+ try { -+ int _type = T25; -+ // ReviewDsl.g:16:7: ( 'Changeset' ) -+ // ReviewDsl.g:16:7: 'Changeset' -+ { -+ match(""Changeset""); -+ -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end T25 -+ -+ // $ANTLR start T26 -+ public final void mT26() throws RecognitionException { -+ try { -+ int _type = T26; -+ // ReviewDsl.g:17:7: ( 'from' ) -+ // ReviewDsl.g:17:7: 'from' -+ { -+ match(""from""); -+ -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end T26 -+ -+ // $ANTLR start T27 -+ public final void mT27() throws RecognitionException { -+ try { -+ int _type = T27; -+ // ReviewDsl.g:18:7: ( 'Patch' ) -+ // ReviewDsl.g:18:7: 'Patch' -+ { -+ match(""Patch""); -+ -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end T27 -+ -+ // $ANTLR start T28 -+ public final void mT28() throws RecognitionException { -+ try { -+ int _type = T28; -+ // ReviewDsl.g:19:7: ( 'Attachment' ) -+ // ReviewDsl.g:19:7: 'Attachment' -+ { -+ match(""Attachment""); -+ -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end T28 -+ -+ // $ANTLR start T29 -+ public final void mT29() throws RecognitionException { -+ try { -+ int _type = T29; -+ // ReviewDsl.g:20:7: ( 'by' ) -+ // ReviewDsl.g:20:7: 'by' -+ { -+ match(""by""); -+ -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end T29 -+ -+ // $ANTLR start T30 -+ public final void mT30() throws RecognitionException { -+ try { -+ int _type = T30; -+ // ReviewDsl.g:21:7: ( 'on' ) -+ // ReviewDsl.g:21:7: 'on' -+ { -+ match(""on""); -+ -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end T30 -+ -+ // $ANTLR start T31 -+ public final void mT31() throws RecognitionException { -+ try { -+ int _type = T31; -+ // ReviewDsl.g:22:7: ( 'of' ) -+ // ReviewDsl.g:22:7: 'of' -+ { -+ match(""of""); -+ -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end T31 -+ -+ // $ANTLR start T32 -+ public final void mT32() throws RecognitionException { -+ try { -+ int _type = T32; -+ // ReviewDsl.g:23:7: ( 'task' ) -+ // ReviewDsl.g:23:7: 'task' -+ { -+ match(""task""); -+ -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end T32 -+ -+ // $ANTLR start TASK_ID -+ public final void mTASK_ID() throws RecognitionException { -+ try { -+ int _type = TASK_ID; -+ // ReviewDsl.g:72:2: ( ( 'a' .. 'z' | 'A' .. 'Z' )+ ( '_' | '-' )? ( 'a' .. 'z' | 'A' .. 'Z' | INT )+ ) -+ // ReviewDsl.g:72:2: ( 'a' .. 'z' | 'A' .. 'Z' )+ ( '_' | '-' )? ( 'a' .. 'z' | 'A' .. 'Z' | INT )+ -+ { -+ // ReviewDsl.g:72:2: ( 'a' .. 'z' | 'A' .. 'Z' )+ -+ int cnt1=0; -+ loop1: -+ do { -+ int alt1=2; -+ int LA1_0 = input.LA(1); -+ -+ if ( ((LA1_0>='a' && LA1_0<='z')) ) { -+ alt1=1; -+ } -+ else if ( ((LA1_0>='A' && LA1_0<='Z')) ) { -+ alt1=1; -+ } -+ -+ -+ switch (alt1) { -+ case 1 : -+ // ReviewDsl.g: -+ { -+ if ( (input.LA(1)>='A' && input.LA(1)<='Z')||(input.LA(1)>='a' && input.LA(1)<='z') ) { -+ input.consume(); -+ -+ } -+ else { -+ MismatchedSetException mse = -+ new MismatchedSetException(null,input); -+ recover(mse); throw mse; -+ } -+ -+ -+ } -+ break; -+ -+ default : -+ if ( cnt1 >= 1 ) break loop1; -+ EarlyExitException eee = -+ new EarlyExitException(1, input); -+ throw eee; -+ } -+ cnt1++; -+ } while (true); -+ -+ // ReviewDsl.g:72:25: ( '_' | '-' )? -+ int alt2=2; -+ int LA2_0 = input.LA(1); -+ -+ if ( (LA2_0=='-'||LA2_0=='_') ) { -+ alt2=1; -+ } -+ switch (alt2) { -+ case 1 : -+ // ReviewDsl.g: -+ { -+ if ( input.LA(1)=='-'||input.LA(1)=='_' ) { -+ input.consume(); -+ -+ } -+ else { -+ MismatchedSetException mse = -+ new MismatchedSetException(null,input); -+ recover(mse); throw mse; -+ } -+ -+ -+ } -+ break; -+ -+ } -+ -+ // ReviewDsl.g:72:38: ( 'a' .. 'z' | 'A' .. 'Z' | INT )+ -+ int cnt3=0; -+ loop3: -+ do { -+ int alt3=4; -+ switch ( input.LA(1) ) { -+ case 'a': -+ case 'b': -+ case 'c': -+ case 'd': -+ case 'e': -+ case 'f': -+ case 'g': -+ case 'h': -+ case 'i': -+ case 'j': -+ case 'k': -+ case 'l': -+ case 'm': -+ case 'n': -+ case 'o': -+ case 'p': -+ case 'q': -+ case 'r': -+ case 's': -+ case 't': -+ case 'u': -+ case 'v': -+ case 'w': -+ case 'x': -+ case 'y': -+ case 'z': -+ { -+ alt3=1; -+ } -+ break; -+ case 'A': -+ case 'B': -+ case 'C': -+ case 'D': -+ case 'E': -+ case 'F': -+ case 'G': -+ case 'H': -+ case 'I': -+ case 'J': -+ case 'K': -+ case 'L': -+ case 'M': -+ case 'N': -+ case 'O': -+ case 'P': -+ case 'Q': -+ case 'R': -+ case 'S': -+ case 'T': -+ case 'U': -+ case 'V': -+ case 'W': -+ case 'X': -+ case 'Y': -+ case 'Z': -+ { -+ alt3=2; -+ } -+ break; -+ case '0': -+ case '1': -+ case '2': -+ case '3': -+ case '4': -+ case '5': -+ case '6': -+ case '7': -+ case '8': -+ case '9': -+ { -+ alt3=3; -+ } -+ break; -+ -+ } -+ -+ switch (alt3) { -+ case 1 : -+ // ReviewDsl.g:72:39: 'a' .. 'z' -+ { -+ matchRange('a','z'); -+ -+ } -+ break; -+ case 2 : -+ // ReviewDsl.g:72:50: 'A' .. 'Z' -+ { -+ matchRange('A','Z'); -+ -+ } -+ break; -+ case 3 : -+ // ReviewDsl.g:72:61: INT -+ { -+ mINT(); -+ -+ } -+ break; -+ -+ default : -+ if ( cnt3 >= 1 ) break loop3; -+ EarlyExitException eee = -+ new EarlyExitException(3, input); -+ throw eee; -+ } -+ cnt3++; -+ } while (true); -+ -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end TASK_ID -+ -+ // $ANTLR start INT -+ public final void mINT() throws RecognitionException { -+ try { -+ int _type = INT; -+ // ReviewDsl.g:75:2: ( ( '0' .. '9' )+ ) -+ // ReviewDsl.g:75:2: ( '0' .. '9' )+ -+ { -+ // ReviewDsl.g:75:2: ( '0' .. '9' )+ -+ int cnt4=0; -+ loop4: -+ do { -+ int alt4=2; -+ int LA4_0 = input.LA(1); -+ -+ if ( ((LA4_0>='0' && LA4_0<='9')) ) { -+ alt4=1; -+ } -+ -+ -+ switch (alt4) { -+ case 1 : -+ // ReviewDsl.g:75:2: '0' .. '9' -+ { -+ matchRange('0','9'); -+ -+ } -+ break; -+ -+ default : -+ if ( cnt4 >= 1 ) break loop4; -+ EarlyExitException eee = -+ new EarlyExitException(4, input); -+ throw eee; -+ } -+ cnt4++; -+ } while (true); -+ -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end INT -+ -+ // $ANTLR start STRING -+ public final void mSTRING() throws RecognitionException { -+ try { -+ int _type = STRING; -+ // ReviewDsl.g:79:8: ( '\""' ( ESC_SEQ | ~ ( '\\\\' | '\""' ) )* '\""' ) -+ // ReviewDsl.g:79:8: '\""' ( ESC_SEQ | ~ ( '\\\\' | '\""' ) )* '\""' -+ { -+ match('\""'); -+ // ReviewDsl.g:79:12: ( ESC_SEQ | ~ ( '\\\\' | '\""' ) )* -+ loop5: -+ do { -+ int alt5=3; -+ int LA5_0 = input.LA(1); -+ -+ if ( (LA5_0=='\\') ) { -+ alt5=1; -+ } -+ else if ( ((LA5_0>='\u0000' && LA5_0<='!')||(LA5_0>='#' && LA5_0<='[')||(LA5_0>=']' && LA5_0<='\uFFFE')) ) { -+ alt5=2; -+ } -+ -+ -+ switch (alt5) { -+ case 1 : -+ // ReviewDsl.g:79:14: ESC_SEQ -+ { -+ mESC_SEQ(); -+ -+ } -+ break; -+ case 2 : -+ // ReviewDsl.g:79:24: ~ ( '\\\\' | '\""' ) -+ { -+ if ( (input.LA(1)>='\u0000' && input.LA(1)<='!')||(input.LA(1)>='#' && input.LA(1)<='[')||(input.LA(1)>=']' && input.LA(1)<='\uFFFE') ) { -+ input.consume(); -+ -+ } -+ else { -+ MismatchedSetException mse = -+ new MismatchedSetException(null,input); -+ recover(mse); throw mse; -+ } -+ -+ -+ } -+ break; -+ -+ default : -+ break loop5; -+ } -+ } while (true); -+ -+ match('\""'); -+ -+ } -+ -+ this.type = _type; -+ } -+ finally { -+ } -+ } -+ // $ANTLR end STRING -+ -+ // $ANTLR start ESC_SEQ -+ public final void mESC_SEQ() throws RecognitionException { -+ try { -+ // ReviewDsl.g:83:9: ( '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\""' | '\\'' | '\\\\' ) | UNICODE_ESC | OCTAL_ESC ) -+ int alt6=3; -+ int LA6_0 = input.LA(1); -+ -+ if ( (LA6_0=='\\') ) { -+ switch ( input.LA(2) ) { -+ case 'u': -+ { -+ alt6=2; -+ } -+ break; -+ case '\""': -+ case '\'': -+ case '\\': -+ case 'b': -+ case 'f': -+ case 'n': -+ case 'r': -+ case 't': -+ { -+ alt6=1; -+ } -+ break; -+ case '0': -+ case '1': -+ case '2': -+ case '3': -+ case '4': -+ case '5': -+ case '6': -+ case '7': -+ { -+ alt6=3; -+ } -+ break; -+ default: -+ NoViableAltException nvae = -+ new NoViableAltException(""81:1: fragment ESC_SEQ : ( '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\""' | '\\'' | '\\\\' ) | UNICODE_ESC | OCTAL_ESC );"", 6, 1, input); -+ -+ throw nvae; -+ } -+ -+ } -+ else { -+ NoViableAltException nvae = -+ new NoViableAltException(""81:1: fragment ESC_SEQ : ( '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\""' | '\\'' | '\\\\' ) | UNICODE_ESC | OCTAL_ESC );"", 6, 0, input); -+ -+ throw nvae; -+ } -+ switch (alt6) { -+ case 1 : -+ // ReviewDsl.g:83:9: '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\""' | '\\'' | '\\\\' ) -+ { -+ match('\\'); -+ if ( input.LA(1)=='\""'||input.LA(1)=='\''||input.LA(1)=='\\'||input.LA(1)=='b'||input.LA(1)=='f'||input.LA(1)=='n'||input.LA(1)=='r'||input.LA(1)=='t' ) { -+ input.consume(); -+ -+ } -+ else { -+ MismatchedSetException mse = -+ new MismatchedSetException(null,input); -+ recover(mse); throw mse; -+ } -+ -+ -+ } -+ break; -+ case 2 : -+ // ReviewDsl.g:84:9: UNICODE_ESC -+ { -+ mUNICODE_ESC(); -+ -+ } -+ break; -+ case 3 : -+ // ReviewDsl.g:85:9: OCTAL_ESC -+ { -+ mOCTAL_ESC(); -+ -+ } -+ break; -+ -+ } -+ } -+ finally { -+ } -+ } -+ // $ANTLR end ESC_SEQ -+ -+ // $ANTLR start OCTAL_ESC -+ public final void mOCTAL_ESC() throws RecognitionException { -+ try { -+ // ReviewDsl.g:89:9: ( '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) ) -+ int alt7=3; -+ int LA7_0 = input.LA(1); -+ -+ if ( (LA7_0=='\\') ) { -+ int LA7_1 = input.LA(2); -+ -+ if ( ((LA7_1>='0' && LA7_1<='3')) ) { -+ int LA7_2 = input.LA(3); -+ -+ if ( ((LA7_2>='0' && LA7_2<='7')) ) { -+ int LA7_5 = input.LA(4); -+ -+ if ( ((LA7_5>='0' && LA7_5<='7')) ) { -+ alt7=1; -+ } -+ else { -+ alt7=2;} -+ } -+ else { -+ alt7=3;} -+ } -+ else if ( ((LA7_1>='4' && LA7_1<='7')) ) { -+ int LA7_3 = input.LA(3); -+ -+ if ( ((LA7_3>='0' && LA7_3<='7')) ) { -+ alt7=2; -+ } -+ else { -+ alt7=3;} -+ } -+ else { -+ NoViableAltException nvae = -+ new NoViableAltException(""87:1: fragment OCTAL_ESC : ( '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) );"", 7, 1, input); -+ -+ throw nvae; -+ } -+ } -+ else { -+ NoViableAltException nvae = -+ new NoViableAltException(""87:1: fragment OCTAL_ESC : ( '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) );"", 7, 0, input); -+ -+ throw nvae; -+ } -+ switch (alt7) { -+ case 1 : -+ // ReviewDsl.g:89:9: '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) -+ { -+ match('\\'); -+ // ReviewDsl.g:89:14: ( '0' .. '3' ) -+ // ReviewDsl.g:89:15: '0' .. '3' -+ { -+ matchRange('0','3'); -+ -+ } -+ -+ // ReviewDsl.g:89:25: ( '0' .. '7' ) -+ // ReviewDsl.g:89:26: '0' .. '7' -+ { -+ matchRange('0','7'); -+ -+ } -+ -+ // ReviewDsl.g:89:36: ( '0' .. '7' ) -+ // ReviewDsl.g:89:37: '0' .. '7' -+ { -+ matchRange('0','7'); -+ -+ } -+ -+ -+ } -+ break; -+ case 2 : -+ // ReviewDsl.g:90:9: '\\\\' ( '0' .. '7' ) ( '0' .. '7' ) -+ { -+ match('\\'); -+ // ReviewDsl.g:90:14: ( '0' .. '7' ) -+ // ReviewDsl.g:90:15: '0' .. '7' -+ { -+ matchRange('0','7'); -+ -+ } -+ -+ // ReviewDsl.g:90:25: ( '0' .. '7' ) -+ // ReviewDsl.g:90:26: '0' .. '7' -+ { -+ matchRange('0','7'); -+ -+ } -+ -+ -+ } -+ break; -+ case 3 : -+ // ReviewDsl.g:91:9: '\\\\' ( '0' .. '7' ) -+ { -+ match('\\'); -+ // ReviewDsl.g:91:14: ( '0' .. '7' ) -+ // ReviewDsl.g:91:15: '0' .. '7' -+ { -+ matchRange('0','7'); -+ -+ } -+ -+ -+ } -+ break; -+ -+ } -+ } -+ finally { -+ } -+ } -+ // $ANTLR end OCTAL_ESC -+ -+ // $ANTLR start UNICODE_ESC -+ public final void mUNICODE_ESC() throws RecognitionException { -+ try { -+ // ReviewDsl.g:95:9: ( '\\\\' 'u' HEX_DIGIT HEX_DIGIT HEX_DIGIT HEX_DIGIT ) -+ // ReviewDsl.g:95:9: '\\\\' 'u' HEX_DIGIT HEX_DIGIT HEX_DIGIT HEX_DIGIT -+ { -+ match('\\'); -+ match('u'); -+ mHEX_DIGIT(); -+ mHEX_DIGIT(); -+ mHEX_DIGIT(); -+ mHEX_DIGIT(); -+ -+ } -+ -+ } -+ finally { -+ } -+ } -+ // $ANTLR end UNICODE_ESC -+ -+ // $ANTLR start HEX_DIGIT -+ public final void mHEX_DIGIT() throws RecognitionException { -+ try { -+ // ReviewDsl.g:99:13: ( ( INT | 'a' .. 'f' | 'A' .. 'F' ) ) -+ // ReviewDsl.g:99:13: ( INT | 'a' .. 'f' | 'A' .. 'F' ) -+ { -+ // ReviewDsl.g:99:13: ( INT | 'a' .. 'f' | 'A' .. 'F' ) -+ int alt8=3; -+ switch ( input.LA(1) ) { -+ case '0': -+ case '1': -+ case '2': -+ case '3': -+ case '4': -+ case '5': -+ case '6': -+ case '7': -+ case '8': -+ case '9': -+ { -+ alt8=1; -+ } -+ break; -+ case 'a': -+ case 'b': -+ case 'c': -+ case 'd': -+ case 'e': -+ case 'f': -+ { -+ alt8=2; -+ } -+ break; -+ case 'A': -+ case 'B': -+ case 'C': -+ case 'D': -+ case 'E': -+ case 'F': -+ { -+ alt8=3; -+ } -+ break; -+ default: -+ NoViableAltException nvae = -+ new NoViableAltException(""99:13: ( INT | 'a' .. 'f' | 'A' .. 'F' )"", 8, 0, input); -+ -+ throw nvae; -+ } -+ -+ switch (alt8) { -+ case 1 : -+ // ReviewDsl.g:99:14: INT -+ { -+ mINT(); -+ -+ } -+ break; -+ case 2 : -+ // ReviewDsl.g:99:18: 'a' .. 'f' -+ { -+ matchRange('a','f'); -+ -+ } -+ break; -+ case 3 : -+ // ReviewDsl.g:99:27: 'A' .. 'F' -+ { -+ matchRange('A','F'); -+ -+ } -+ break; -+ -+ } -+ -+ -+ } -+ -+ } -+ finally { -+ } -+ } -+ // $ANTLR end HEX_DIGIT -+ -+ // $ANTLR start WS -+ public final void mWS() throws RecognitionException { -+ try { -+ // ReviewDsl.g:102:9: ( ( ' ' | '\\t' | '\\r' | '\\n' ) ) -+ // ReviewDsl.g:102:9: ( ' ' | '\\t' | '\\r' | '\\n' ) -+ { -+ if ( (input.LA(1)>='\t' && input.LA(1)<='\n')||input.LA(1)=='\r'||input.LA(1)==' ' ) { -+ input.consume(); -+ -+ } -+ else { -+ MismatchedSetException mse = -+ new MismatchedSetException(null,input); -+ recover(mse); throw mse; -+ } -+ -+ channel=HIDDEN; -+ -+ } -+ -+ } -+ finally { -+ } -+ } -+ // $ANTLR end WS -+ -+ public void mTokens() throws RecognitionException { -+ // ReviewDsl.g:1:10: ( T12 | T13 | T14 | T15 | T16 | T17 | T18 | T19 | T20 | T21 | T22 | T23 | T24 | T25 | T26 | T27 | T28 | T29 | T30 | T31 | T32 | TASK_ID | INT | STRING ) -+ int alt9=24; -+ switch ( input.LA(1) ) { -+ case 'R': -+ { -+ int LA9_1 = input.LA(2); -+ -+ if ( (LA9_1=='e') ) { -+ switch ( input.LA(3) ) { -+ case 'v': -+ { -+ int LA9_38 = input.LA(4); -+ -+ if ( (LA9_38=='i') ) { -+ int LA9_57 = input.LA(5); -+ -+ if ( (LA9_57=='e') ) { -+ int LA9_73 = input.LA(6); -+ -+ if ( (LA9_73=='w') ) { -+ int LA9_89 = input.LA(7); -+ -+ if ( (LA9_89=='-'||(LA9_89>='0' && LA9_89<='9')||(LA9_89>='A' && LA9_89<='Z')||LA9_89=='_'||(LA9_89>='a' && LA9_89<='z')) ) { -+ alt9=22; -+ } -+ else { -+ alt9=1;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ break; -+ case 's': -+ { -+ int LA9_39 = input.LA(4); -+ -+ if ( (LA9_39=='o') ) { -+ int LA9_58 = input.LA(5); -+ -+ if ( (LA9_58=='u') ) { -+ int LA9_74 = input.LA(6); -+ -+ if ( (LA9_74=='r') ) { -+ int LA9_90 = input.LA(7); -+ -+ if ( (LA9_90=='c') ) { -+ int LA9_101 = input.LA(8); -+ -+ if ( (LA9_101=='e') ) { -+ int LA9_109 = input.LA(9); -+ -+ if ( (LA9_109=='-'||(LA9_109>='0' && LA9_109<='9')||(LA9_109>='A' && LA9_109<='Z')||LA9_109=='_'||(LA9_109>='a' && LA9_109<='z')) ) { -+ alt9=22; -+ } -+ else { -+ alt9=13;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ break; -+ default: -+ alt9=22;} -+ -+ } -+ else if ( (LA9_1=='-'||(LA9_1>='0' && LA9_1<='9')||(LA9_1>='A' && LA9_1<='Z')||LA9_1=='_'||(LA9_1>='a' && LA9_1<='d')||(LA9_1>='f' && LA9_1<='z')) ) { -+ alt9=22; -+ } -+ else { -+ NoViableAltException nvae = -+ new NoViableAltException(""1:1: Tokens : ( T12 | T13 | T14 | T15 | T16 | T17 | T18 | T19 | T20 | T21 | T22 | T23 | T24 | T25 | T26 | T27 | T28 | T29 | T30 | T31 | T32 | TASK_ID | INT | STRING );"", 9, 1, input); -+ -+ throw nvae; -+ } -+ } -+ break; -+ case 'r': -+ { -+ int LA9_2 = input.LA(2); -+ -+ if ( (LA9_2=='e') ) { -+ int LA9_21 = input.LA(3); -+ -+ if ( (LA9_21=='s') ) { -+ int LA9_40 = input.LA(4); -+ -+ if ( (LA9_40=='u') ) { -+ int LA9_59 = input.LA(5); -+ -+ if ( (LA9_59=='l') ) { -+ int LA9_75 = input.LA(6); -+ -+ if ( (LA9_75=='t') ) { -+ int LA9_91 = input.LA(7); -+ -+ if ( (LA9_91==':') ) { -+ alt9=2; -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else if ( (LA9_2=='-'||(LA9_2>='0' && LA9_2<='9')||(LA9_2>='A' && LA9_2<='Z')||LA9_2=='_'||(LA9_2>='a' && LA9_2<='d')||(LA9_2>='f' && LA9_2<='z')) ) { -+ alt9=22; -+ } -+ else { -+ NoViableAltException nvae = -+ new NoViableAltException(""1:1: Tokens : ( T12 | T13 | T14 | T15 | T16 | T17 | T18 | T19 | T20 | T21 | T22 | T23 | T24 | T25 | T26 | T27 | T28 | T29 | T30 | T31 | T32 | TASK_ID | INT | STRING );"", 9, 2, input); -+ -+ throw nvae; -+ } -+ } -+ break; -+ case 'C': -+ { -+ switch ( input.LA(2) ) { -+ case 'h': -+ { -+ int LA9_22 = input.LA(3); -+ -+ if ( (LA9_22=='a') ) { -+ int LA9_41 = input.LA(4); -+ -+ if ( (LA9_41=='n') ) { -+ int LA9_60 = input.LA(5); -+ -+ if ( (LA9_60=='g') ) { -+ int LA9_76 = input.LA(6); -+ -+ if ( (LA9_76=='e') ) { -+ int LA9_92 = input.LA(7); -+ -+ if ( (LA9_92=='s') ) { -+ int LA9_103 = input.LA(8); -+ -+ if ( (LA9_103=='e') ) { -+ int LA9_110 = input.LA(9); -+ -+ if ( (LA9_110=='t') ) { -+ int LA9_115 = input.LA(10); -+ -+ if ( (LA9_115=='-'||(LA9_115>='0' && LA9_115<='9')||(LA9_115>='A' && LA9_115<='Z')||LA9_115=='_'||(LA9_115>='a' && LA9_115<='z')) ) { -+ alt9=22; -+ } -+ else { -+ alt9=14;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ break; -+ case 'o': -+ { -+ int LA9_23 = input.LA(3); -+ -+ if ( (LA9_23=='m') ) { -+ int LA9_42 = input.LA(4); -+ -+ if ( (LA9_42=='m') ) { -+ int LA9_61 = input.LA(5); -+ -+ if ( (LA9_61=='e') ) { -+ int LA9_77 = input.LA(6); -+ -+ if ( (LA9_77=='n') ) { -+ int LA9_93 = input.LA(7); -+ -+ if ( (LA9_93=='t') ) { -+ int LA9_104 = input.LA(8); -+ -+ if ( (LA9_104==':') ) { -+ alt9=3; -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ break; -+ case '-': -+ case '0': -+ case '1': -+ case '2': -+ case '3': -+ case '4': -+ case '5': -+ case '6': -+ case '7': -+ case '8': -+ case '9': -+ case 'A': -+ case 'B': -+ case 'C': -+ case 'D': -+ case 'E': -+ case 'F': -+ case 'G': -+ case 'H': -+ case 'I': -+ case 'J': -+ case 'K': -+ case 'L': -+ case 'M': -+ case 'N': -+ case 'O': -+ case 'P': -+ case 'Q': -+ case 'R': -+ case 'S': -+ case 'T': -+ case 'U': -+ case 'V': -+ case 'W': -+ case 'X': -+ case 'Y': -+ case 'Z': -+ case '_': -+ case 'a': -+ case 'b': -+ case 'c': -+ case 'd': -+ case 'e': -+ case 'f': -+ case 'g': -+ case 'i': -+ case 'j': -+ case 'k': -+ case 'l': -+ case 'm': -+ case 'n': -+ case 'p': -+ case 'q': -+ case 'r': -+ case 's': -+ case 't': -+ case 'u': -+ case 'v': -+ case 'w': -+ case 'x': -+ case 'y': -+ case 'z': -+ { -+ alt9=22; -+ } -+ break; -+ default: -+ NoViableAltException nvae = -+ new NoViableAltException(""1:1: Tokens : ( T12 | T13 | T14 | T15 | T16 | T17 | T18 | T19 | T20 | T21 | T22 | T23 | T24 | T25 | T26 | T27 | T28 | T29 | T30 | T31 | T32 | TASK_ID | INT | STRING );"", 9, 3, input); -+ -+ throw nvae; -+ } -+ -+ } -+ break; -+ case 'P': -+ { -+ switch ( input.LA(2) ) { -+ case 'a': -+ { -+ int LA9_24 = input.LA(3); -+ -+ if ( (LA9_24=='t') ) { -+ int LA9_43 = input.LA(4); -+ -+ if ( (LA9_43=='c') ) { -+ int LA9_62 = input.LA(5); -+ -+ if ( (LA9_62=='h') ) { -+ int LA9_78 = input.LA(6); -+ -+ if ( (LA9_78=='-'||(LA9_78>='0' && LA9_78<='9')||(LA9_78>='A' && LA9_78<='Z')||LA9_78=='_'||(LA9_78>='a' && LA9_78<='z')) ) { -+ alt9=22; -+ } -+ else { -+ alt9=16;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ break; -+ case 'A': -+ { -+ int LA9_25 = input.LA(3); -+ -+ if ( (LA9_25=='S') ) { -+ int LA9_44 = input.LA(4); -+ -+ if ( (LA9_44=='S') ) { -+ int LA9_63 = input.LA(5); -+ -+ if ( (LA9_63=='E') ) { -+ int LA9_79 = input.LA(6); -+ -+ if ( (LA9_79=='D') ) { -+ int LA9_95 = input.LA(7); -+ -+ if ( (LA9_95=='-'||(LA9_95>='0' && LA9_95<='9')||(LA9_95>='A' && LA9_95<='Z')||LA9_95=='_'||(LA9_95>='a' && LA9_95<='z')) ) { -+ alt9=22; -+ } -+ else { -+ alt9=4;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ break; -+ case '-': -+ case '0': -+ case '1': -+ case '2': -+ case '3': -+ case '4': -+ case '5': -+ case '6': -+ case '7': -+ case '8': -+ case '9': -+ case 'B': -+ case 'C': -+ case 'D': -+ case 'E': -+ case 'F': -+ case 'G': -+ case 'H': -+ case 'I': -+ case 'J': -+ case 'K': -+ case 'L': -+ case 'M': -+ case 'N': -+ case 'O': -+ case 'P': -+ case 'Q': -+ case 'R': -+ case 'S': -+ case 'T': -+ case 'U': -+ case 'V': -+ case 'W': -+ case 'X': -+ case 'Y': -+ case 'Z': -+ case '_': -+ case 'b': -+ case 'c': -+ case 'd': -+ case 'e': -+ case 'f': -+ case 'g': -+ case 'h': -+ case 'i': -+ case 'j': -+ case 'k': -+ case 'l': -+ case 'm': -+ case 'n': -+ case 'o': -+ case 'p': -+ case 'q': -+ case 'r': -+ case 's': -+ case 't': -+ case 'u': -+ case 'v': -+ case 'w': -+ case 'x': -+ case 'y': -+ case 'z': -+ { -+ alt9=22; -+ } -+ break; -+ default: -+ NoViableAltException nvae = -+ new NoViableAltException(""1:1: Tokens : ( T12 | T13 | T14 | T15 | T16 | T17 | T18 | T19 | T20 | T21 | T22 | T23 | T24 | T25 | T26 | T27 | T28 | T29 | T30 | T31 | T32 | TASK_ID | INT | STRING );"", 9, 4, input); -+ -+ throw nvae; -+ } -+ -+ } -+ break; -+ case 'W': -+ { -+ int LA9_5 = input.LA(2); -+ -+ if ( (LA9_5=='A') ) { -+ int LA9_26 = input.LA(3); -+ -+ if ( (LA9_26=='R') ) { -+ int LA9_45 = input.LA(4); -+ -+ if ( (LA9_45=='N') ) { -+ int LA9_64 = input.LA(5); -+ -+ if ( (LA9_64=='I') ) { -+ int LA9_80 = input.LA(6); -+ -+ if ( (LA9_80=='N') ) { -+ int LA9_96 = input.LA(7); -+ -+ if ( (LA9_96=='G') ) { -+ int LA9_106 = input.LA(8); -+ -+ if ( (LA9_106=='-'||(LA9_106>='0' && LA9_106<='9')||(LA9_106>='A' && LA9_106<='Z')||LA9_106=='_'||(LA9_106>='a' && LA9_106<='z')) ) { -+ alt9=22; -+ } -+ else { -+ alt9=5;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else if ( (LA9_5=='-'||(LA9_5>='0' && LA9_5<='9')||(LA9_5>='B' && LA9_5<='Z')||LA9_5=='_'||(LA9_5>='a' && LA9_5<='z')) ) { -+ alt9=22; -+ } -+ else { -+ NoViableAltException nvae = -+ new NoViableAltException(""1:1: Tokens : ( T12 | T13 | T14 | T15 | T16 | T17 | T18 | T19 | T20 | T21 | T22 | T23 | T24 | T25 | T26 | T27 | T28 | T29 | T30 | T31 | T32 | TASK_ID | INT | STRING );"", 9, 5, input); -+ -+ throw nvae; -+ } -+ } -+ break; -+ case 'F': -+ { -+ switch ( input.LA(2) ) { -+ case 'A': -+ { -+ int LA9_27 = input.LA(3); -+ -+ if ( (LA9_27=='I') ) { -+ int LA9_46 = input.LA(4); -+ -+ if ( (LA9_46=='L') ) { -+ int LA9_65 = input.LA(5); -+ -+ if ( (LA9_65=='E') ) { -+ int LA9_81 = input.LA(6); -+ -+ if ( (LA9_81=='D') ) { -+ int LA9_97 = input.LA(7); -+ -+ if ( (LA9_97=='-'||(LA9_97>='0' && LA9_97<='9')||(LA9_97>='A' && LA9_97<='Z')||LA9_97=='_'||(LA9_97>='a' && LA9_97<='z')) ) { -+ alt9=22; -+ } -+ else { -+ alt9=6;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ break; -+ case 'i': -+ { -+ int LA9_28 = input.LA(3); -+ -+ if ( (LA9_28=='l') ) { -+ int LA9_47 = input.LA(4); -+ -+ if ( (LA9_47=='e') ) { -+ int LA9_66 = input.LA(5); -+ -+ if ( (LA9_66=='-'||(LA9_66>='0' && LA9_66<='9')||(LA9_66>='A' && LA9_66<='Z')||LA9_66=='_'||(LA9_66>='a' && LA9_66<='z')) ) { -+ alt9=22; -+ } -+ else { -+ alt9=8;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ break; -+ case '-': -+ case '0': -+ case '1': -+ case '2': -+ case '3': -+ case '4': -+ case '5': -+ case '6': -+ case '7': -+ case '8': -+ case '9': -+ case 'B': -+ case 'C': -+ case 'D': -+ case 'E': -+ case 'F': -+ case 'G': -+ case 'H': -+ case 'I': -+ case 'J': -+ case 'K': -+ case 'L': -+ case 'M': -+ case 'N': -+ case 'O': -+ case 'P': -+ case 'Q': -+ case 'R': -+ case 'S': -+ case 'T': -+ case 'U': -+ case 'V': -+ case 'W': -+ case 'X': -+ case 'Y': -+ case 'Z': -+ case '_': -+ case 'a': -+ case 'b': -+ case 'c': -+ case 'd': -+ case 'e': -+ case 'f': -+ case 'g': -+ case 'h': -+ case 'j': -+ case 'k': -+ case 'l': -+ case 'm': -+ case 'n': -+ case 'o': -+ case 'p': -+ case 'q': -+ case 'r': -+ case 's': -+ case 't': -+ case 'u': -+ case 'v': -+ case 'w': -+ case 'x': -+ case 'y': -+ case 'z': -+ { -+ alt9=22; -+ } -+ break; -+ default: -+ NoViableAltException nvae = -+ new NoViableAltException(""1:1: Tokens : ( T12 | T13 | T14 | T15 | T16 | T17 | T18 | T19 | T20 | T21 | T22 | T23 | T24 | T25 | T26 | T27 | T28 | T29 | T30 | T31 | T32 | TASK_ID | INT | STRING );"", 9, 6, input); -+ -+ throw nvae; -+ } -+ -+ } -+ break; -+ case 'T': -+ { -+ int LA9_7 = input.LA(2); -+ -+ if ( (LA9_7=='O') ) { -+ int LA9_29 = input.LA(3); -+ -+ if ( (LA9_29=='D') ) { -+ int LA9_48 = input.LA(4); -+ -+ if ( (LA9_48=='O') ) { -+ int LA9_67 = input.LA(5); -+ -+ if ( (LA9_67=='-'||(LA9_67>='0' && LA9_67<='9')||(LA9_67>='A' && LA9_67<='Z')||LA9_67=='_'||(LA9_67>='a' && LA9_67<='z')) ) { -+ alt9=22; -+ } -+ else { -+ alt9=7;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else if ( (LA9_7=='-'||(LA9_7>='0' && LA9_7<='9')||(LA9_7>='A' && LA9_7<='N')||(LA9_7>='P' && LA9_7<='Z')||LA9_7=='_'||(LA9_7>='a' && LA9_7<='z')) ) { -+ alt9=22; -+ } -+ else { -+ NoViableAltException nvae = -+ new NoViableAltException(""1:1: Tokens : ( T12 | T13 | T14 | T15 | T16 | T17 | T18 | T19 | T20 | T21 | T22 | T23 | T24 | T25 | T26 | T27 | T28 | T29 | T30 | T31 | T32 | TASK_ID | INT | STRING );"", 9, 7, input); -+ -+ throw nvae; -+ } -+ } -+ break; -+ case ':': -+ { -+ alt9=9; -+ } -+ break; -+ case 'L': -+ { -+ int LA9_9 = input.LA(2); -+ -+ if ( (LA9_9=='i') ) { -+ int LA9_30 = input.LA(3); -+ -+ if ( (LA9_30=='n') ) { -+ int LA9_49 = input.LA(4); -+ -+ if ( (LA9_49=='e') ) { -+ int LA9_68 = input.LA(5); -+ -+ if ( (LA9_68=='-'||(LA9_68>='0' && LA9_68<='9')||(LA9_68>='A' && LA9_68<='Z')||LA9_68=='_'||(LA9_68>='a' && LA9_68<='z')) ) { -+ alt9=22; -+ } -+ else { -+ alt9=10;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else if ( (LA9_9=='-'||(LA9_9>='0' && LA9_9<='9')||(LA9_9>='A' && LA9_9<='Z')||LA9_9=='_'||(LA9_9>='a' && LA9_9<='h')||(LA9_9>='j' && LA9_9<='z')) ) { -+ alt9=22; -+ } -+ else { -+ NoViableAltException nvae = -+ new NoViableAltException(""1:1: Tokens : ( T12 | T13 | T14 | T15 | T16 | T17 | T18 | T19 | T20 | T21 | T22 | T23 | T24 | T25 | T26 | T27 | T28 | T29 | T30 | T31 | T32 | TASK_ID | INT | STRING );"", 9, 9, input); -+ -+ throw nvae; -+ } -+ } -+ break; -+ case '-': -+ { -+ alt9=11; -+ } -+ break; -+ case 's': -+ { -+ int LA9_11 = input.LA(2); -+ -+ if ( (LA9_11=='c') ) { -+ int LA9_31 = input.LA(3); -+ -+ if ( (LA9_31=='o') ) { -+ int LA9_50 = input.LA(4); -+ -+ if ( (LA9_50=='p') ) { -+ int LA9_69 = input.LA(5); -+ -+ if ( (LA9_69=='e') ) { -+ int LA9_85 = input.LA(6); -+ -+ if ( (LA9_85==':') ) { -+ alt9=12; -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else if ( (LA9_11=='-'||(LA9_11>='0' && LA9_11<='9')||(LA9_11>='A' && LA9_11<='Z')||LA9_11=='_'||(LA9_11>='a' && LA9_11<='b')||(LA9_11>='d' && LA9_11<='z')) ) { -+ alt9=22; -+ } -+ else { -+ NoViableAltException nvae = -+ new NoViableAltException(""1:1: Tokens : ( T12 | T13 | T14 | T15 | T16 | T17 | T18 | T19 | T20 | T21 | T22 | T23 | T24 | T25 | T26 | T27 | T28 | T29 | T30 | T31 | T32 | TASK_ID | INT | STRING );"", 9, 11, input); -+ -+ throw nvae; -+ } -+ } -+ break; -+ case 'f': -+ { -+ int LA9_12 = input.LA(2); -+ -+ if ( (LA9_12=='r') ) { -+ int LA9_32 = input.LA(3); -+ -+ if ( (LA9_32=='o') ) { -+ int LA9_51 = input.LA(4); -+ -+ if ( (LA9_51=='m') ) { -+ int LA9_70 = input.LA(5); -+ -+ if ( (LA9_70=='-'||(LA9_70>='0' && LA9_70<='9')||(LA9_70>='A' && LA9_70<='Z')||LA9_70=='_'||(LA9_70>='a' && LA9_70<='z')) ) { -+ alt9=22; -+ } -+ else { -+ alt9=15;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else if ( (LA9_12=='-'||(LA9_12>='0' && LA9_12<='9')||(LA9_12>='A' && LA9_12<='Z')||LA9_12=='_'||(LA9_12>='a' && LA9_12<='q')||(LA9_12>='s' && LA9_12<='z')) ) { -+ alt9=22; -+ } -+ else { -+ NoViableAltException nvae = -+ new NoViableAltException(""1:1: Tokens : ( T12 | T13 | T14 | T15 | T16 | T17 | T18 | T19 | T20 | T21 | T22 | T23 | T24 | T25 | T26 | T27 | T28 | T29 | T30 | T31 | T32 | TASK_ID | INT | STRING );"", 9, 12, input); -+ -+ throw nvae; -+ } -+ } -+ break; -+ case 'A': -+ { -+ int LA9_13 = input.LA(2); -+ -+ if ( (LA9_13=='t') ) { -+ int LA9_33 = input.LA(3); -+ -+ if ( (LA9_33=='t') ) { -+ int LA9_52 = input.LA(4); -+ -+ if ( (LA9_52=='a') ) { -+ int LA9_71 = input.LA(5); -+ -+ if ( (LA9_71=='c') ) { -+ int LA9_87 = input.LA(6); -+ -+ if ( (LA9_87=='h') ) { -+ int LA9_99 = input.LA(7); -+ -+ if ( (LA9_99=='m') ) { -+ int LA9_108 = input.LA(8); -+ -+ if ( (LA9_108=='e') ) { -+ int LA9_113 = input.LA(9); -+ -+ if ( (LA9_113=='n') ) { -+ int LA9_116 = input.LA(10); -+ -+ if ( (LA9_116=='t') ) { -+ int LA9_118 = input.LA(11); -+ -+ if ( (LA9_118=='-'||(LA9_118>='0' && LA9_118<='9')||(LA9_118>='A' && LA9_118<='Z')||LA9_118=='_'||(LA9_118>='a' && LA9_118<='z')) ) { -+ alt9=22; -+ } -+ else { -+ alt9=17;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else if ( (LA9_13=='-'||(LA9_13>='0' && LA9_13<='9')||(LA9_13>='A' && LA9_13<='Z')||LA9_13=='_'||(LA9_13>='a' && LA9_13<='s')||(LA9_13>='u' && LA9_13<='z')) ) { -+ alt9=22; -+ } -+ else { -+ NoViableAltException nvae = -+ new NoViableAltException(""1:1: Tokens : ( T12 | T13 | T14 | T15 | T16 | T17 | T18 | T19 | T20 | T21 | T22 | T23 | T24 | T25 | T26 | T27 | T28 | T29 | T30 | T31 | T32 | TASK_ID | INT | STRING );"", 9, 13, input); -+ -+ throw nvae; -+ } -+ } -+ break; -+ case 'b': -+ { -+ int LA9_14 = input.LA(2); -+ -+ if ( (LA9_14=='y') ) { -+ int LA9_34 = input.LA(3); -+ -+ if ( (LA9_34=='-'||(LA9_34>='0' && LA9_34<='9')||(LA9_34>='A' && LA9_34<='Z')||LA9_34=='_'||(LA9_34>='a' && LA9_34<='z')) ) { -+ alt9=22; -+ } -+ else { -+ alt9=18;} -+ } -+ else if ( (LA9_14=='-'||(LA9_14>='0' && LA9_14<='9')||(LA9_14>='A' && LA9_14<='Z')||LA9_14=='_'||(LA9_14>='a' && LA9_14<='x')||LA9_14=='z') ) { -+ alt9=22; -+ } -+ else { -+ NoViableAltException nvae = -+ new NoViableAltException(""1:1: Tokens : ( T12 | T13 | T14 | T15 | T16 | T17 | T18 | T19 | T20 | T21 | T22 | T23 | T24 | T25 | T26 | T27 | T28 | T29 | T30 | T31 | T32 | TASK_ID | INT | STRING );"", 9, 14, input); -+ -+ throw nvae; -+ } -+ } -+ break; -+ case 'o': -+ { -+ switch ( input.LA(2) ) { -+ case 'n': -+ { -+ int LA9_35 = input.LA(3); -+ -+ if ( (LA9_35=='-'||(LA9_35>='0' && LA9_35<='9')||(LA9_35>='A' && LA9_35<='Z')||LA9_35=='_'||(LA9_35>='a' && LA9_35<='z')) ) { -+ alt9=22; -+ } -+ else { -+ alt9=19;} -+ } -+ break; -+ case 'f': -+ { -+ int LA9_36 = input.LA(3); -+ -+ if ( (LA9_36=='-'||(LA9_36>='0' && LA9_36<='9')||(LA9_36>='A' && LA9_36<='Z')||LA9_36=='_'||(LA9_36>='a' && LA9_36<='z')) ) { -+ alt9=22; -+ } -+ else { -+ alt9=20;} -+ } -+ break; -+ case '-': -+ case '0': -+ case '1': -+ case '2': -+ case '3': -+ case '4': -+ case '5': -+ case '6': -+ case '7': -+ case '8': -+ case '9': -+ case 'A': -+ case 'B': -+ case 'C': -+ case 'D': -+ case 'E': -+ case 'F': -+ case 'G': -+ case 'H': -+ case 'I': -+ case 'J': -+ case 'K': -+ case 'L': -+ case 'M': -+ case 'N': -+ case 'O': -+ case 'P': -+ case 'Q': -+ case 'R': -+ case 'S': -+ case 'T': -+ case 'U': -+ case 'V': -+ case 'W': -+ case 'X': -+ case 'Y': -+ case 'Z': -+ case '_': -+ case 'a': -+ case 'b': -+ case 'c': -+ case 'd': -+ case 'e': -+ case 'g': -+ case 'h': -+ case 'i': -+ case 'j': -+ case 'k': -+ case 'l': -+ case 'm': -+ case 'o': -+ case 'p': -+ case 'q': -+ case 'r': -+ case 's': -+ case 't': -+ case 'u': -+ case 'v': -+ case 'w': -+ case 'x': -+ case 'y': -+ case 'z': -+ { -+ alt9=22; -+ } -+ break; -+ default: -+ NoViableAltException nvae = -+ new NoViableAltException(""1:1: Tokens : ( T12 | T13 | T14 | T15 | T16 | T17 | T18 | T19 | T20 | T21 | T22 | T23 | T24 | T25 | T26 | T27 | T28 | T29 | T30 | T31 | T32 | TASK_ID | INT | STRING );"", 9, 15, input); -+ -+ throw nvae; -+ } -+ -+ } -+ break; -+ case 't': -+ { -+ int LA9_16 = input.LA(2); -+ -+ if ( (LA9_16=='a') ) { -+ int LA9_37 = input.LA(3); -+ -+ if ( (LA9_37=='s') ) { -+ int LA9_56 = input.LA(4); -+ -+ if ( (LA9_56=='k') ) { -+ int LA9_72 = input.LA(5); -+ -+ if ( (LA9_72=='-'||(LA9_72>='0' && LA9_72<='9')||(LA9_72>='A' && LA9_72<='Z')||LA9_72=='_'||(LA9_72>='a' && LA9_72<='z')) ) { -+ alt9=22; -+ } -+ else { -+ alt9=21;} -+ } -+ else { -+ alt9=22;} -+ } -+ else { -+ alt9=22;} -+ } -+ else if ( (LA9_16=='-'||(LA9_16>='0' && LA9_16<='9')||(LA9_16>='A' && LA9_16<='Z')||LA9_16=='_'||(LA9_16>='b' && LA9_16<='z')) ) { -+ alt9=22; -+ } -+ else { -+ NoViableAltException nvae = -+ new NoViableAltException(""1:1: Tokens : ( T12 | T13 | T14 | T15 | T16 | T17 | T18 | T19 | T20 | T21 | T22 | T23 | T24 | T25 | T26 | T27 | T28 | T29 | T30 | T31 | T32 | TASK_ID | INT | STRING );"", 9, 16, input); -+ -+ throw nvae; -+ } -+ } -+ break; -+ case 'B': -+ case 'D': -+ case 'E': -+ case 'G': -+ case 'H': -+ case 'I': -+ case 'J': -+ case 'K': -+ case 'M': -+ case 'N': -+ case 'O': -+ case 'Q': -+ case 'S': -+ case 'U': -+ case 'V': -+ case 'X': -+ case 'Y': -+ case 'Z': -+ case 'a': -+ case 'c': -+ case 'd': -+ case 'e': -+ case 'g': -+ case 'h': -+ case 'i': -+ case 'j': -+ case 'k': -+ case 'l': -+ case 'm': -+ case 'n': -+ case 'p': -+ case 'q': -+ case 'u': -+ case 'v': -+ case 'w': -+ case 'x': -+ case 'y': -+ case 'z': -+ { -+ alt9=22; -+ } -+ break; -+ case '0': -+ case '1': -+ case '2': -+ case '3': -+ case '4': -+ case '5': -+ case '6': -+ case '7': -+ case '8': -+ case '9': -+ { -+ alt9=23; -+ } -+ break; -+ case '\""': -+ { -+ alt9=24; -+ } -+ break; -+ default: -+ NoViableAltException nvae = -+ new NoViableAltException(""1:1: Tokens : ( T12 | T13 | T14 | T15 | T16 | T17 | T18 | T19 | T20 | T21 | T22 | T23 | T24 | T25 | T26 | T27 | T28 | T29 | T30 | T31 | T32 | TASK_ID | INT | STRING );"", 9, 0, input); -+ -+ throw nvae; -+ } -+ -+ switch (alt9) { -+ case 1 : -+ // ReviewDsl.g:1:10: T12 -+ { -+ mT12(); -+ -+ } -+ break; -+ case 2 : -+ // ReviewDsl.g:1:14: T13 -+ { -+ mT13(); -+ -+ } -+ break; -+ case 3 : -+ // ReviewDsl.g:1:18: T14 -+ { -+ mT14(); -+ -+ } -+ break; -+ case 4 : -+ // ReviewDsl.g:1:22: T15 -+ { -+ mT15(); -+ -+ } -+ break; -+ case 5 : -+ // ReviewDsl.g:1:26: T16 -+ { -+ mT16(); -+ -+ } -+ break; -+ case 6 : -+ // ReviewDsl.g:1:30: T17 -+ { -+ mT17(); -+ -+ } -+ break; -+ case 7 : -+ // ReviewDsl.g:1:34: T18 -+ { -+ mT18(); -+ -+ } -+ break; -+ case 8 : -+ // ReviewDsl.g:1:38: T19 -+ { -+ mT19(); -+ -+ } -+ break; -+ case 9 : -+ // ReviewDsl.g:1:42: T20 -+ { -+ mT20(); -+ -+ } -+ break; -+ case 10 : -+ // ReviewDsl.g:1:46: T21 -+ { -+ mT21(); -+ -+ } -+ break; -+ case 11 : -+ // ReviewDsl.g:1:50: T22 -+ { -+ mT22(); -+ -+ } -+ break; -+ case 12 : -+ // ReviewDsl.g:1:54: T23 -+ { -+ mT23(); -+ -+ } -+ break; -+ case 13 : -+ // ReviewDsl.g:1:58: T24 -+ { -+ mT24(); -+ -+ } -+ break; -+ case 14 : -+ // ReviewDsl.g:1:62: T25 -+ { -+ mT25(); -+ -+ } -+ break; -+ case 15 : -+ // ReviewDsl.g:1:66: T26 -+ { -+ mT26(); -+ -+ } -+ break; -+ case 16 : -+ // ReviewDsl.g:1:70: T27 -+ { -+ mT27(); -+ -+ } -+ break; -+ case 17 : -+ // ReviewDsl.g:1:74: T28 -+ { -+ mT28(); -+ -+ } -+ break; -+ case 18 : -+ // ReviewDsl.g:1:78: T29 -+ { -+ mT29(); -+ -+ } -+ break; -+ case 19 : -+ // ReviewDsl.g:1:82: T30 -+ { -+ mT30(); -+ -+ } -+ break; -+ case 20 : -+ // ReviewDsl.g:1:86: T31 -+ { -+ mT31(); -+ -+ } -+ break; -+ case 21 : -+ // ReviewDsl.g:1:90: T32 -+ { -+ mT32(); -+ -+ } -+ break; -+ case 22 : -+ // ReviewDsl.g:1:94: TASK_ID -+ { -+ mTASK_ID(); -+ -+ } -+ break; -+ case 23 : -+ // ReviewDsl.g:1:102: INT -+ { -+ mINT(); -+ -+ } -+ break; -+ case 24 : -+ // ReviewDsl.g:1:106: STRING -+ { -+ mSTRING(); -+ -+ } -+ break; -+ -+ } -+ -+ } -+ -+ -+ -+ -+} -\ No newline at end of file -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/internal/ReviewDslMapper.java b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/internal/ReviewDslMapper.java -new file mode 100644 -index 00000000..93fed735 ---- /dev/null -+++ b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/internal/ReviewDslMapper.java -@@ -0,0 +1,215 @@ -+/******************************************************************************* -+ * Copyright (c) 2011 Research Group for Industrial Software (INSO), Vienna University of Technology. -+ * All rights reserved. This program and the accompanying materials -+ * are made available under the terms of the Eclipse Public License v1.0 -+ * which accompanies this distribution, and is available at -+ * http://www.eclipse.org/legal/epl-v10.html -+ * -+ * Contributors: -+ * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation -+ *******************************************************************************/ -+ -+package org.eclipse.mylyn.reviews.tasks.dsl.internal; -+ -+import org.antlr.runtime.ANTLRStringStream; -+import org.antlr.runtime.CommonTokenStream; -+import org.antlr.runtime.RecognitionException; -+import org.antlr.runtime.TokenStream; -+import org.antlr.runtime.tree.TreeAdaptor; -+import org.eclipse.mylyn.reviews.tasks.dsl.IReviewDslMapper; -+import org.eclipse.mylyn.reviews.tasks.dsl.ParseException; -+import org.eclipse.mylyn.reviews.tasks.dsl.ReviewDslAttachmentScopeItem; -+import org.eclipse.mylyn.reviews.tasks.dsl.ReviewDslChangesetScopeItem; -+import org.eclipse.mylyn.reviews.tasks.dsl.ReviewDslResult; -+import org.eclipse.mylyn.reviews.tasks.dsl.ReviewDslAttachmentScopeItem.Type; -+import org.eclipse.mylyn.reviews.tasks.dsl.ReviewDslResult.FileComment; -+import org.eclipse.mylyn.reviews.tasks.dsl.ReviewDslResult.LineComment; -+import org.eclipse.mylyn.reviews.tasks.dsl.ReviewDslResult.Rating; -+import org.eclipse.mylyn.reviews.tasks.dsl.ReviewDslScope; -+import org.eclipse.mylyn.reviews.tasks.dsl.ReviewDslScopeItem; -+import org.eclipse.mylyn.reviews.tasks.dsl.internal.ReviewDslParser.attachmentSource_return; -+import org.eclipse.mylyn.reviews.tasks.dsl.internal.ReviewDslParser.changesetDef_return; -+import org.eclipse.mylyn.reviews.tasks.dsl.internal.ReviewDslParser.fileComment_return; -+import org.eclipse.mylyn.reviews.tasks.dsl.internal.ReviewDslParser.lineComment_return; -+import org.eclipse.mylyn.reviews.tasks.dsl.internal.ReviewDslParser.patchDef_return; -+import org.eclipse.mylyn.reviews.tasks.dsl.internal.ReviewDslParser.resourceDef_return; -+import org.eclipse.mylyn.reviews.tasks.dsl.internal.ReviewDslParser.reviewResult_return; -+import org.eclipse.mylyn.reviews.tasks.dsl.internal.ReviewDslParser.reviewScope_return; -+ -+/** -+ * -+ * @author mattk -+ * -+ */ -+public class ReviewDslMapper implements IReviewDslMapper { -+ -+ /* (non-Javadoc) -+ * @see org.eclipse.mylyn.reviews.tasks.dsl.internal.IReviewDslMapper#parseReviewResult(java.lang.String) -+ */ -+ @Override -+ public ReviewDslResult parseReviewResult(String text) throws ParseException { -+ ReviewDslLexer lexer = new ReviewDslLexer(new ANTLRStringStream(text)); -+ TokenStream input = new CommonTokenStream(lexer); -+ ReviewDslParser parser = new ReviewDslParser(input); -+ try { -+ return mapResult(parser.reviewResult(), parser.getTreeAdaptor()); -+ } catch (RecognitionException e) { -+ throw new ParseException(e.getMessage()); -+ } -+ -+ } -+ -+ private ReviewDslResult mapResult(reviewResult_return reviewResult, -+ TreeAdaptor treeAdaptor) { -+ ReviewDslResult result = new ReviewDslResult(); -+ -+ result.setRating(Rating.valueOf(reviewResult.result)); -+ result.setComment(convertStr(reviewResult.comment)); -+ if (reviewResult.fileComments != null) { -+ for (int i = 0; i < reviewResult.fileComments.size(); i++) { -+ fileComment_return fc = (fileComment_return) reviewResult.fileComments -+ .get(i); -+ result.getFileComments().add(map(fc)); -+ } -+ } -+ return result; -+ } -+ -+ private FileComment map(fileComment_return fc) { -+ FileComment fileComment = new FileComment(); -+ fileComment.setFileName(convertStr(fc.path)); -+ fileComment.setComment(convertStr(fc.comment)); -+ if (fc.lineComments != null) { -+ for (int i = 0; i < fc.lineComments.size(); i++) { -+ lineComment_return lc = (lineComment_return) fc.lineComments -+ .get(i); -+ fileComment.getLineComments().add(map(lc)); -+ } -+ } -+ return fileComment; -+ } -+ -+ private LineComment map(lineComment_return lc) { -+ LineComment lineComment = new LineComment(); -+ lineComment.setBegin(lc.begin); -+ if (lc.end != null) { -+ lineComment.setEnd(Integer.parseInt(lc.end)); -+ } else { -+ lineComment.setEnd(lineComment.getBegin()); -+ } -+ lineComment.setComment(convertStr(lc.comment)); -+ -+ return lineComment; -+ } -+ -+ private String convertStr(String string) { -+ if (string == null) -+ return string; -+ int startIdx = 0; -+ int endIdx = string.length(); -+ if (string.startsWith(""\"""")) -+ startIdx = 1; -+ if (string.endsWith(""\"""")) -+ endIdx--; -+ return string.substring(startIdx, endIdx); -+ } -+ -+ /* (non-Javadoc) -+ * @see org.eclipse.mylyn.reviews.tasks.dsl.internal.IReviewDslMapper#parseReviewScope(java.lang.String) -+ */ -+ @Override -+ public ReviewDslScope parseReviewScope(String text) throws ParseException { -+ ReviewDslLexer lexer = new ReviewDslLexer(new ANTLRStringStream(text)); -+ TokenStream input = new CommonTokenStream(lexer); -+ ReviewDslParser parser = new ReviewDslParser(input); -+ try { -+ return mapScope(parser.reviewScope(), parser.getTreeAdaptor()); -+ } catch (RecognitionException e) { -+ throw new ParseException(e.getMessage()); -+ } -+ } -+ -+ private ReviewDslScope mapScope(reviewScope_return reviewScope, -+ TreeAdaptor treeAdaptor) { -+ if (reviewScope == null) -+ return null; -+ ReviewDslScope scope = new ReviewDslScope(); -+ -+ if (reviewScope.scopeItems != null) { -+ for (int i = 0; i < reviewScope.scopeItems.size(); i++) { -+ Object child = reviewScope.scopeItems.get(i); -+ if (patchDef_return.class.equals(child.getClass())) { -+ scope.addItem(parsePatch((patchDef_return) child, -+ treeAdaptor)); -+ } else if (resourceDef_return.class.equals(child.getClass())) { -+ scope.addItem(parseResource((resourceDef_return) child, -+ treeAdaptor)); -+ } else if (changesetDef_return.class.equals(child.getClass())) { -+ scope.addItem(parseChangeSet((changesetDef_return) child)); -+ } -+ } -+ } -+ return scope; -+ } -+ -+ private ReviewDslChangesetScopeItem parseChangeSet(changesetDef_return child) { -+ ReviewDslChangesetScopeItem item = new ReviewDslChangesetScopeItem(); -+ item.setRevision(convertStr(child.revision)); -+ item.setRepoUrl(convertStr(child.repoUrl)); -+ return item; -+ } -+ -+ private ReviewDslAttachmentScopeItem parseResource( -+ resourceDef_return child, TreeAdaptor treeAdaptor) { -+ AttachmentSource source = parseAttachmentSource( -+ (attachmentSource_return) child.source); -+ return new ReviewDslAttachmentScopeItem(Type.RESOURCE, source.fileName, -+ source.author, source.createdDate, source.taskId); -+ } -+ -+ private ReviewDslAttachmentScopeItem parsePatch(patchDef_return child, -+ TreeAdaptor treeAdaptor) { -+ -+ AttachmentSource source = parseAttachmentSource( -+ (attachmentSource_return) child.source); -+ return new ReviewDslAttachmentScopeItem(Type.PATCH, source.fileName, -+ source.author, source.createdDate, source.taskId); -+ } -+ -+ private AttachmentSource parseAttachmentSource( -+ attachmentSource_return child) { -+ AttachmentSource source = new AttachmentSource(); -+ source.fileName = convertStr(child.filename); -+ source.author = convertStr(child.author); -+ source.createdDate =convertStr(child.createdDate); -+ source.taskId = convertStr(child.taskId); -+ return source; -+ } -+ -+ private static class AttachmentSource { -+ public String fileName; -+ public String author; -+ public String createdDate; -+ public String taskId; -+ } -+ -+ public static void main(String[] args) throws Exception { -+ // String text = ""Review result: TODO Comment: \""test\""""; -+ String text = ""Review scope: Patch from Attachment \""0001-Extension-point-for-scm-connector-defined-jaxb-model.patch\"" by \""Jane@inso.tuwien.ac.at\"" on \""2010-06-25 17:42:00\"" of task 85""; -+ IReviewDslMapper reviewDslMapper = new ReviewDslMapper(); -+ ReviewDslScope parseReviewScope = reviewDslMapper -+ .parseReviewScope(text); -+ -+ if (parseReviewScope != null) { -+ for (ReviewDslScopeItem item : parseReviewScope.getItems()) { -+ System.err.println(item); -+ } -+ } -+ } -+ -+ @Override -+ public ReviewDslResult parseChangedReviewScope(String text) { -+ // TODO Auto-generated method stub -+ return null; -+ } -+} -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/internal/ReviewDslParser.java b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/internal/ReviewDslParser.java -new file mode 100644 -index 00000000..5291ed39 ---- /dev/null -+++ b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/internal/ReviewDslParser.java -@@ -0,0 +1,1154 @@ -+/******************************************************************************* -+ * Copyright (c) 2011 Research Group for Industrial Software (INSO), Vienna University of Technology. -+ * All rights reserved. This program and the accompanying materials -+ * are made available under the terms of the Eclipse Public License v1.0 -+ * which accompanies this distribution, and is available at -+ * http://www.eclipse.org/legal/epl-v10.html -+ * -+ * Contributors: -+ * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation -+ *******************************************************************************/ -+ -+// $ANTLR 3.0 ReviewDsl.g 2011-03-06 18:30:25 -+ -+package org.eclipse.mylyn.reviews.tasks.dsl.internal; -+ -+import java.util.ArrayList; -+import java.util.List; -+ -+import org.antlr.runtime.BitSet; -+import org.antlr.runtime.EarlyExitException; -+import org.antlr.runtime.MismatchedSetException; -+import org.antlr.runtime.NoViableAltException; -+import org.antlr.runtime.Parser; -+import org.antlr.runtime.ParserRuleReturnScope; -+import org.antlr.runtime.RecognitionException; -+import org.antlr.runtime.RuleReturnScope; -+import org.antlr.runtime.Token; -+import org.antlr.runtime.TokenStream; -+import org.antlr.runtime.tree.CommonTree; -+import org.antlr.runtime.tree.CommonTreeAdaptor; -+import org.antlr.runtime.tree.TreeAdaptor; -+ -+/** -+ * -+ * @author mattk -+ * -+ */ -+public class ReviewDslParser extends Parser { -+ public static final String[] tokenNames = new String[] { """", -+ """", """", """", ""STRING"", ""INT"", ""TASK_ID"", ""ESC_SEQ"", -+ ""UNICODE_ESC"", ""OCTAL_ESC"", ""HEX_DIGIT"", ""WS"", ""'Review'"", -+ ""'result:'"", ""'Comment:'"", ""'PASSED'"", ""'WARNING'"", ""'FAILED'"", -+ ""'TODO'"", ""'File'"", ""':'"", ""'Line'"", ""'-'"", ""'scope:'"", -+ ""'Resource'"", ""'Changeset'"", ""'from'"", ""'Patch'"", ""'Attachment'"", -+ ""'by'"", ""'on'"", ""'of'"", ""'task'"" }; -+ public static final int WS = 11; -+ public static final int ESC_SEQ = 7; -+ public static final int TASK_ID = 6; -+ public static final int UNICODE_ESC = 8; -+ public static final int OCTAL_ESC = 9; -+ public static final int HEX_DIGIT = 10; -+ public static final int INT = 5; -+ public static final int EOF = -1; -+ public static final int STRING = 4; -+ -+ public ReviewDslParser(TokenStream input) { -+ super(input); -+ } -+ -+ protected TreeAdaptor adaptor = new CommonTreeAdaptor(); -+ -+ public void setTreeAdaptor(TreeAdaptor adaptor) { -+ this.adaptor = adaptor; -+ } -+ -+ public TreeAdaptor getTreeAdaptor() { -+ return adaptor; -+ } -+ -+ public String[] getTokenNames() { -+ return tokenNames; -+ } -+ -+ public String getGrammarFileName() { -+ return ""ReviewDsl.g""; -+ } -+ -+ public static class reviewResult_return extends ParserRuleReturnScope { -+ public String result; -+ public String comment; -+ public List fileComments; -+ CommonTree tree; -+ -+ public Object getTree() { -+ return tree; -+ } -+ }; -+ -+ // $ANTLR start reviewResult -+ // ReviewDsl.g:14:1: reviewResult returns [String result, String comment, -+ // List fileComments] : 'Review' 'result:' res= resultEnum ( 'Comment:' c= -+ // STRING )? ( (fc+= fileComment )+ )? ; -+ public final reviewResult_return reviewResult() throws RecognitionException { -+ reviewResult_return retval = new reviewResult_return(); -+ retval.start = input.LT(1); -+ -+ CommonTree root_0 = null; -+ -+ Token c = null; -+ Token string_literal1 = null; -+ Token string_literal2 = null; -+ Token string_literal3 = null; -+ List list_fc = null; -+ resultEnum_return res = null; -+ -+ RuleReturnScope fc = null; -+ CommonTree c_tree = null; -+ CommonTree string_literal1_tree = null; -+ CommonTree string_literal2_tree = null; -+ CommonTree string_literal3_tree = null; -+ -+ try { -+ // ReviewDsl.g:15:3: ( 'Review' 'result:' res= resultEnum ( -+ // 'Comment:' c= STRING )? ( (fc+= fileComment )+ )? ) -+ // ReviewDsl.g:15:3: 'Review' 'result:' res= resultEnum ( 'Comment:' -+ // c= STRING )? ( (fc+= fileComment )+ )? -+ { -+ root_0 = (CommonTree) adaptor.nil(); -+ -+ string_literal1 = (Token) input.LT(1); -+ match(input, 12, FOLLOW_12_in_reviewResult39); -+ string_literal1_tree = (CommonTree) adaptor -+ .create(string_literal1); -+ adaptor.addChild(root_0, string_literal1_tree); -+ -+ string_literal2 = (Token) input.LT(1); -+ match(input, 13, FOLLOW_13_in_reviewResult41); -+ string_literal2_tree = (CommonTree) adaptor -+ .create(string_literal2); -+ adaptor.addChild(root_0, string_literal2_tree); -+ -+ pushFollow(FOLLOW_resultEnum_in_reviewResult46); -+ res = resultEnum(); -+ _fsp--; -+ -+ adaptor.addChild(root_0, res.getTree()); -+ // ReviewDsl.g:16:3: ( 'Comment:' c= STRING )? -+ int alt1 = 2; -+ int LA1_0 = input.LA(1); -+ -+ if ((LA1_0 == 14)) { -+ alt1 = 1; -+ } -+ switch (alt1) { -+ case 1: -+ // ReviewDsl.g:16:4: 'Comment:' c= STRING -+ { -+ string_literal3 = (Token) input.LT(1); -+ match(input, 14, FOLLOW_14_in_reviewResult52); -+ string_literal3_tree = (CommonTree) adaptor -+ .create(string_literal3); -+ adaptor.addChild(root_0, string_literal3_tree); -+ -+ c = (Token) input.LT(1); -+ match(input, STRING, FOLLOW_STRING_in_reviewResult56); -+ c_tree = (CommonTree) adaptor.create(c); -+ adaptor.addChild(root_0, c_tree); -+ -+ } -+ break; -+ -+ } -+ -+ // ReviewDsl.g:17:2: ( (fc+= fileComment )+ )? -+ int alt3 = 2; -+ int LA3_0 = input.LA(1); -+ -+ if ((LA3_0 == 19)) { -+ alt3 = 1; -+ } -+ switch (alt3) { -+ case 1: -+ // ReviewDsl.g:17:3: (fc+= fileComment )+ -+ { -+ // ReviewDsl.g:17:5: (fc+= fileComment )+ -+ int cnt2 = 0; -+ loop2: do { -+ int alt2 = 2; -+ int LA2_0 = input.LA(1); -+ -+ if ((LA2_0 == 19)) { -+ alt2 = 1; -+ } -+ -+ switch (alt2) { -+ case 1: -+ // ReviewDsl.g:17:5: fc+= fileComment -+ { -+ pushFollow(FOLLOW_fileComment_in_reviewResult64); -+ fc = fileComment(); -+ _fsp--; -+ -+ adaptor.addChild(root_0, fc.getTree()); -+ if (list_fc == null) -+ list_fc = new ArrayList(); -+ list_fc.add(fc); -+ -+ } -+ break; -+ -+ default: -+ if (cnt2 >= 1) -+ break loop2; -+ EarlyExitException eee = new EarlyExitException(2, -+ input); -+ throw eee; -+ } -+ cnt2++; -+ } while (true); -+ -+ } -+ break; -+ -+ } -+ -+ retval.result = input.toString(res.start, res.stop); -+ retval.comment = c != null ? c.getText() : null; -+ retval.fileComments = list_fc; -+ -+ } -+ -+ retval.stop = input.LT(-1); -+ -+ retval.tree = (CommonTree) adaptor.rulePostProcessing(root_0); -+ adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); -+ -+ } catch (RecognitionException re) { -+ reportError(re); -+ recover(input, re); -+ } finally { -+ } -+ return retval; -+ } -+ -+ // $ANTLR end reviewResult -+ -+ public static class resultEnum_return extends ParserRuleReturnScope { -+ CommonTree tree; -+ -+ public Object getTree() { -+ return tree; -+ } -+ }; -+ -+ // $ANTLR start resultEnum -+ // ReviewDsl.g:21:1: resultEnum : ( 'PASSED' | 'WARNING' | 'FAILED' | 'TODO' -+ // ); -+ public final resultEnum_return resultEnum() throws RecognitionException { -+ resultEnum_return retval = new resultEnum_return(); -+ retval.start = input.LT(1); -+ -+ CommonTree root_0 = null; -+ -+ Token set4 = null; -+ -+ CommonTree set4_tree = null; -+ -+ try { -+ // ReviewDsl.g:22:3: ( 'PASSED' | 'WARNING' | 'FAILED' | 'TODO' ) -+ // ReviewDsl.g: -+ { -+ root_0 = (CommonTree) adaptor.nil(); -+ -+ set4 = (Token) input.LT(1); -+ if ((input.LA(1) >= 15 && input.LA(1) <= 18)) { -+ input.consume(); -+ adaptor.addChild(root_0, adaptor.create(set4)); -+ errorRecovery = false; -+ } else { -+ MismatchedSetException mse = new MismatchedSetException( -+ null, input); -+ recoverFromMismatchedSet(input, mse, -+ FOLLOW_set_in_resultEnum0); -+ throw mse; -+ } -+ -+ } -+ -+ retval.stop = input.LT(-1); -+ -+ retval.tree = (CommonTree) adaptor.rulePostProcessing(root_0); -+ adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); -+ -+ } catch (RecognitionException re) { -+ reportError(re); -+ recover(input, re); -+ } finally { -+ } -+ return retval; -+ } -+ -+ // $ANTLR end resultEnum -+ -+ public static class fileComment_return extends ParserRuleReturnScope { -+ public String path; -+ public String comment; -+ public List lineComments; -+ CommonTree tree; -+ -+ public Object getTree() { -+ return tree; -+ } -+ }; -+ -+ // $ANTLR start fileComment -+ // ReviewDsl.g:24:1: fileComment returns [String path, String comment, List -+ // lineComments] : 'File' p= STRING ':' (c= STRING )? (lc+= lineComment )* ; -+ public final fileComment_return fileComment() throws RecognitionException { -+ fileComment_return retval = new fileComment_return(); -+ retval.start = input.LT(1); -+ -+ CommonTree root_0 = null; -+ -+ Token p = null; -+ Token c = null; -+ Token string_literal5 = null; -+ Token char_literal6 = null; -+ List list_lc = null; -+ RuleReturnScope lc = null; -+ CommonTree p_tree = null; -+ CommonTree c_tree = null; -+ CommonTree string_literal5_tree = null; -+ CommonTree char_literal6_tree = null; -+ -+ try { -+ // ReviewDsl.g:25:2: ( 'File' p= STRING ':' (c= STRING )? (lc+= -+ // lineComment )* ) -+ // ReviewDsl.g:25:2: 'File' p= STRING ':' (c= STRING )? (lc+= -+ // lineComment )* -+ { -+ root_0 = (CommonTree) adaptor.nil(); -+ -+ string_literal5 = (Token) input.LT(1); -+ match(input, 19, FOLLOW_19_in_fileComment98); -+ string_literal5_tree = (CommonTree) adaptor -+ .create(string_literal5); -+ adaptor.addChild(root_0, string_literal5_tree); -+ -+ p = (Token) input.LT(1); -+ match(input, STRING, FOLLOW_STRING_in_fileComment102); -+ p_tree = (CommonTree) adaptor.create(p); -+ adaptor.addChild(root_0, p_tree); -+ -+ char_literal6 = (Token) input.LT(1); -+ match(input, 20, FOLLOW_20_in_fileComment104); -+ char_literal6_tree = (CommonTree) adaptor.create(char_literal6); -+ adaptor.addChild(root_0, char_literal6_tree); -+ -+ // ReviewDsl.g:25:22: (c= STRING )? -+ int alt4 = 2; -+ int LA4_0 = input.LA(1); -+ -+ if ((LA4_0 == STRING)) { -+ alt4 = 1; -+ } -+ switch (alt4) { -+ case 1: -+ // ReviewDsl.g:25:23: c= STRING -+ { -+ c = (Token) input.LT(1); -+ match(input, STRING, FOLLOW_STRING_in_fileComment109); -+ c_tree = (CommonTree) adaptor.create(c); -+ adaptor.addChild(root_0, c_tree); -+ -+ } -+ break; -+ -+ } -+ -+ // ReviewDsl.g:26:3: (lc+= lineComment )* -+ loop5: do { -+ int alt5 = 2; -+ int LA5_0 = input.LA(1); -+ -+ if ((LA5_0 == 21)) { -+ alt5 = 1; -+ } -+ -+ switch (alt5) { -+ case 1: -+ // ReviewDsl.g:26:3: lc+= lineComment -+ { -+ pushFollow(FOLLOW_lineComment_in_fileComment115); -+ lc = lineComment(); -+ _fsp--; -+ -+ adaptor.addChild(root_0, lc.getTree()); -+ if (list_lc == null) -+ list_lc = new ArrayList(); -+ list_lc.add(lc); -+ -+ } -+ break; -+ -+ default: -+ break loop5; -+ } -+ } while (true); -+ -+ retval.path = p.getText(); -+ retval.comment = c.getText(); -+ retval.lineComments = list_lc; -+ -+ } -+ -+ retval.stop = input.LT(-1); -+ -+ retval.tree = (CommonTree) adaptor.rulePostProcessing(root_0); -+ adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); -+ -+ } catch (RecognitionException re) { -+ reportError(re); -+ recover(input, re); -+ } finally { -+ } -+ return retval; -+ } -+ -+ // $ANTLR end fileComment -+ -+ public static class lineComment_return extends ParserRuleReturnScope { -+ public int begin; -+ public String end; -+ public String comment; -+ CommonTree tree; -+ -+ public Object getTree() { -+ return tree; -+ } -+ }; -+ -+ // $ANTLR start lineComment -+ // ReviewDsl.g:29:1: lineComment returns [int begin, String end, String -+ // comment] : 'Line' s= INT ( '-' e= INT )? ':' c= STRING ; -+ public final lineComment_return lineComment() throws RecognitionException { -+ lineComment_return retval = new lineComment_return(); -+ retval.start = input.LT(1); -+ -+ CommonTree root_0 = null; -+ -+ Token s = null; -+ Token e = null; -+ Token c = null; -+ Token string_literal7 = null; -+ Token char_literal8 = null; -+ Token char_literal9 = null; -+ -+ CommonTree s_tree = null; -+ CommonTree e_tree = null; -+ CommonTree c_tree = null; -+ CommonTree string_literal7_tree = null; -+ CommonTree char_literal8_tree = null; -+ CommonTree char_literal9_tree = null; -+ -+ try { -+ // ReviewDsl.g:30:3: ( 'Line' s= INT ( '-' e= INT )? ':' c= STRING ) -+ // ReviewDsl.g:30:3: 'Line' s= INT ( '-' e= INT )? ':' c= STRING -+ { -+ root_0 = (CommonTree) adaptor.nil(); -+ -+ string_literal7 = (Token) input.LT(1); -+ match(input, 21, FOLLOW_21_in_lineComment130); -+ string_literal7_tree = (CommonTree) adaptor -+ .create(string_literal7); -+ adaptor.addChild(root_0, string_literal7_tree); -+ -+ s = (Token) input.LT(1); -+ match(input, INT, FOLLOW_INT_in_lineComment134); -+ s_tree = (CommonTree) adaptor.create(s); -+ adaptor.addChild(root_0, s_tree); -+ -+ // ReviewDsl.g:30:16: ( '-' e= INT )? -+ int alt6 = 2; -+ int LA6_0 = input.LA(1); -+ -+ if ((LA6_0 == 22)) { -+ alt6 = 1; -+ } -+ switch (alt6) { -+ case 1: -+ // ReviewDsl.g:30:17: '-' e= INT -+ { -+ char_literal8 = (Token) input.LT(1); -+ match(input, 22, FOLLOW_22_in_lineComment137); -+ char_literal8_tree = (CommonTree) adaptor -+ .create(char_literal8); -+ adaptor.addChild(root_0, char_literal8_tree); -+ -+ e = (Token) input.LT(1); -+ match(input, INT, FOLLOW_INT_in_lineComment141); -+ e_tree = (CommonTree) adaptor.create(e); -+ adaptor.addChild(root_0, e_tree); -+ -+ } -+ break; -+ -+ } -+ -+ char_literal9 = (Token) input.LT(1); -+ match(input, 20, FOLLOW_20_in_lineComment145); -+ char_literal9_tree = (CommonTree) adaptor.create(char_literal9); -+ adaptor.addChild(root_0, char_literal9_tree); -+ -+ c = (Token) input.LT(1); -+ match(input, STRING, FOLLOW_STRING_in_lineComment150); -+ c_tree = (CommonTree) adaptor.create(c); -+ adaptor.addChild(root_0, c_tree); -+ -+ retval.begin = Integer.parseInt(s.getText()); -+ retval.end = e != null ? e.getText() : null; -+ retval.comment = c.getText(); -+ -+ } -+ -+ retval.stop = input.LT(-1); -+ -+ retval.tree = (CommonTree) adaptor.rulePostProcessing(root_0); -+ adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); -+ -+ } catch (RecognitionException re) { -+ reportError(re); -+ recover(input, re); -+ } finally { -+ } -+ return retval; -+ } -+ -+ // $ANTLR end lineComment -+ -+ public static class reviewScope_return extends ParserRuleReturnScope { -+ public List scopeItems; -+ CommonTree tree; -+ -+ public Object getTree() { -+ return tree; -+ } -+ }; -+ -+ // $ANTLR start reviewScope -+ // ReviewDsl.g:35:1: reviewScope returns [List scopeItems] : 'Review' -+ // 'scope:' (s+= ( resourceDef | patchDef | changesetDef ) )* ; -+ public final reviewScope_return reviewScope() throws RecognitionException { -+ reviewScope_return retval = new reviewScope_return(); -+ retval.start = input.LT(1); -+ -+ CommonTree root_0 = null; -+ -+ Token string_literal10 = null; -+ Token string_literal11 = null; -+ Token s = null; -+ List list_s = new ArrayList(); -+ resourceDef_return resourceDef12 = null; -+ -+ patchDef_return patchDef13 = null; -+ -+ changesetDef_return changesetDef14 = null; -+ -+ CommonTree string_literal10_tree = null; -+ CommonTree string_literal11_tree = null; -+ CommonTree s_tree = null; -+ -+ try { -+ // ReviewDsl.g:36:4: ( 'Review' 'scope:' (s+= ( resourceDef | -+ // patchDef | changesetDef ) )* ) -+ // ReviewDsl.g:36:4: 'Review' 'scope:' (s+= ( resourceDef | patchDef -+ // | changesetDef ) )* -+ { -+ root_0 = (CommonTree) adaptor.nil(); -+ -+ string_literal10 = (Token) input.LT(1); -+ match(input, 12, FOLLOW_12_in_reviewScope167); -+ string_literal10_tree = (CommonTree) adaptor -+ .create(string_literal10); -+ adaptor.addChild(root_0, string_literal10_tree); -+ -+ string_literal11 = (Token) input.LT(1); -+ match(input, 23, FOLLOW_23_in_reviewScope169); -+ string_literal11_tree = (CommonTree) adaptor -+ .create(string_literal11); -+ adaptor.addChild(root_0, string_literal11_tree); -+ -+ // ReviewDsl.g:36:22: (s+= ( resourceDef | patchDef | -+ // changesetDef ) )* -+ loop8: do { -+ int alt8 = 2; -+ int LA8_0 = input.LA(1); -+ -+ if (((LA8_0 >= 24 && LA8_0 <= 25) || LA8_0 == 27)) { -+ alt8 = 1; -+ } -+ -+ switch (alt8) { -+ case 1: -+ // ReviewDsl.g:36:23: s+= ( resourceDef | patchDef | -+ // changesetDef ) -+ { -+ // ReviewDsl.g:36:26: ( resourceDef | patchDef | -+ // changesetDef ) -+ int alt7 = 3; -+ switch (input.LA(1)) { -+ case 24: { -+ alt7 = 1; -+ } -+ break; -+ case 27: { -+ alt7 = 2; -+ } -+ break; -+ case 25: { -+ alt7 = 3; -+ } -+ break; -+ default: -+ NoViableAltException nvae = new NoViableAltException( -+ ""36:26: ( resourceDef | patchDef | changesetDef )"", -+ 7, 0, input); -+ -+ throw nvae; -+ } -+ -+ switch (alt7) { -+ case 1: -+ // ReviewDsl.g:36:27: resourceDef -+ { -+ pushFollow(FOLLOW_resourceDef_in_reviewScope175); -+ resourceDef12 = resourceDef(); -+ list_s.add(resourceDef12); -+ _fsp--; -+ -+ adaptor.addChild(root_0, resourceDef12.getTree()); -+ -+ } -+ break; -+ case 2: -+ // ReviewDsl.g:36:40: patchDef -+ { -+ pushFollow(FOLLOW_patchDef_in_reviewScope178); -+ patchDef13 = patchDef(); -+ list_s.add(patchDef13); -+ _fsp--; -+ -+ adaptor.addChild(root_0, patchDef13.getTree()); -+ -+ } -+ break; -+ case 3: -+ // ReviewDsl.g:36:52: changesetDef -+ { -+ pushFollow(FOLLOW_changesetDef_in_reviewScope183); -+ changesetDef14 = changesetDef(); -+ list_s.add(changesetDef14); -+ _fsp--; -+ -+ adaptor.addChild(root_0, changesetDef14.getTree()); -+ -+ } -+ break; -+ -+ } -+ -+ } -+ break; -+ -+ default: -+ break loop8; -+ } -+ } while (true); -+ -+ retval.scopeItems = list_s; -+ -+ } -+ -+ retval.stop = input.LT(-1); -+ -+ retval.tree = (CommonTree) adaptor.rulePostProcessing(root_0); -+ adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); -+ -+ } catch (RecognitionException re) { -+ reportError(re); -+ recover(input, re); -+ } finally { -+ } -+ return retval; -+ } -+ -+ // $ANTLR end reviewScope -+ -+ public static class resourceDef_return extends ParserRuleReturnScope { -+ public Object source; -+ CommonTree tree; -+ -+ public Object getTree() { -+ return tree; -+ } -+ }; -+ -+ // $ANTLR start resourceDef -+ // ReviewDsl.g:40:1: resourceDef returns [Object source] : 'Resource' s= -+ // attachmentSource ; -+ public final resourceDef_return resourceDef() throws RecognitionException { -+ resourceDef_return retval = new resourceDef_return(); -+ retval.start = input.LT(1); -+ -+ CommonTree root_0 = null; -+ -+ Token string_literal15 = null; -+ attachmentSource_return s = null; -+ -+ CommonTree string_literal15_tree = null; -+ -+ try { -+ // ReviewDsl.g:41:4: ( 'Resource' s= attachmentSource ) -+ // ReviewDsl.g:41:4: 'Resource' s= attachmentSource -+ { -+ root_0 = (CommonTree) adaptor.nil(); -+ -+ string_literal15 = (Token) input.LT(1); -+ match(input, 24, FOLLOW_24_in_resourceDef205); -+ string_literal15_tree = (CommonTree) adaptor -+ .create(string_literal15); -+ adaptor.addChild(root_0, string_literal15_tree); -+ -+ pushFollow(FOLLOW_attachmentSource_in_resourceDef210); -+ s = attachmentSource(); -+ _fsp--; -+ -+ adaptor.addChild(root_0, s.getTree()); -+ retval.source = s; -+ -+ } -+ -+ retval.stop = input.LT(-1); -+ -+ retval.tree = (CommonTree) adaptor.rulePostProcessing(root_0); -+ adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); -+ -+ } catch (RecognitionException re) { -+ reportError(re); -+ recover(input, re); -+ } finally { -+ } -+ return retval; -+ } -+ -+ // $ANTLR end resourceDef -+ -+ public static class changesetDef_return extends ParserRuleReturnScope { -+ public String revision; -+ public String repoUrl; -+ CommonTree tree; -+ -+ public Object getTree() { -+ return tree; -+ } -+ }; -+ -+ // $ANTLR start changesetDef -+ // ReviewDsl.g:44:1: changesetDef returns [String revision, String repoUrl] -+ // : 'Changeset' rev= STRING 'from' url= STRING ; -+ public final changesetDef_return changesetDef() throws RecognitionException { -+ changesetDef_return retval = new changesetDef_return(); -+ retval.start = input.LT(1); -+ -+ CommonTree root_0 = null; -+ -+ Token rev = null; -+ Token url = null; -+ Token string_literal16 = null; -+ Token string_literal17 = null; -+ -+ CommonTree rev_tree = null; -+ CommonTree url_tree = null; -+ CommonTree string_literal16_tree = null; -+ CommonTree string_literal17_tree = null; -+ -+ try { -+ // ReviewDsl.g:45:4: ( 'Changeset' rev= STRING 'from' url= STRING ) -+ // ReviewDsl.g:45:4: 'Changeset' rev= STRING 'from' url= STRING -+ { -+ root_0 = (CommonTree) adaptor.nil(); -+ -+ string_literal16 = (Token) input.LT(1); -+ match(input, 25, FOLLOW_25_in_changesetDef226); -+ string_literal16_tree = (CommonTree) adaptor -+ .create(string_literal16); -+ adaptor.addChild(root_0, string_literal16_tree); -+ -+ rev = (Token) input.LT(1); -+ match(input, STRING, FOLLOW_STRING_in_changesetDef230); -+ rev_tree = (CommonTree) adaptor.create(rev); -+ adaptor.addChild(root_0, rev_tree); -+ -+ string_literal17 = (Token) input.LT(1); -+ match(input, 26, FOLLOW_26_in_changesetDef233); -+ string_literal17_tree = (CommonTree) adaptor -+ .create(string_literal17); -+ adaptor.addChild(root_0, string_literal17_tree); -+ -+ url = (Token) input.LT(1); -+ match(input, STRING, FOLLOW_STRING_in_changesetDef237); -+ url_tree = (CommonTree) adaptor.create(url); -+ adaptor.addChild(root_0, url_tree); -+ -+ retval.revision = rev.getText(); -+ retval.repoUrl = url.getText(); -+ -+ } -+ -+ retval.stop = input.LT(-1); -+ -+ retval.tree = (CommonTree) adaptor.rulePostProcessing(root_0); -+ adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); -+ -+ } catch (RecognitionException re) { -+ reportError(re); -+ recover(input, re); -+ } finally { -+ } -+ return retval; -+ } -+ -+ // $ANTLR end changesetDef -+ -+ public static class patchDef_return extends ParserRuleReturnScope { -+ public Object source; -+ CommonTree tree; -+ -+ public Object getTree() { -+ return tree; -+ } -+ }; -+ -+ // $ANTLR start patchDef -+ // ReviewDsl.g:48:1: patchDef returns [Object source] : 'Patch' s= -+ // attachmentSource ; -+ public final patchDef_return patchDef() throws RecognitionException { -+ patchDef_return retval = new patchDef_return(); -+ retval.start = input.LT(1); -+ -+ CommonTree root_0 = null; -+ -+ Token string_literal18 = null; -+ attachmentSource_return s = null; -+ -+ CommonTree string_literal18_tree = null; -+ -+ try { -+ // ReviewDsl.g:49:2: ( 'Patch' s= attachmentSource ) -+ // ReviewDsl.g:49:2: 'Patch' s= attachmentSource -+ { -+ root_0 = (CommonTree) adaptor.nil(); -+ -+ string_literal18 = (Token) input.LT(1); -+ match(input, 27, FOLLOW_27_in_patchDef252); -+ string_literal18_tree = (CommonTree) adaptor -+ .create(string_literal18); -+ adaptor.addChild(root_0, string_literal18_tree); -+ -+ pushFollow(FOLLOW_attachmentSource_in_patchDef256); -+ s = attachmentSource(); -+ _fsp--; -+ -+ adaptor.addChild(root_0, s.getTree()); -+ retval.source = s; -+ -+ } -+ -+ retval.stop = input.LT(-1); -+ -+ retval.tree = (CommonTree) adaptor.rulePostProcessing(root_0); -+ adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); -+ -+ } catch (RecognitionException re) { -+ reportError(re); -+ recover(input, re); -+ } finally { -+ } -+ return retval; -+ } -+ -+ // $ANTLR end patchDef -+ -+ public static class attachmentSource_return extends ParserRuleReturnScope { -+ public String filename; -+ public String author; -+ String createdDate; -+ public String taskId; -+ CommonTree tree; -+ -+ public Object getTree() { -+ return tree; -+ } -+ }; -+ -+ // $ANTLR start attachmentSource -+ // ReviewDsl.g:52:1: attachmentSource returns [String filename, String -+ // author; String createdDate, String taskId] : 'from' 'Attachment' fn= -+ // STRING 'by' a= STRING 'on' d= STRING 'of' 'task' t= taskIdDef ; -+ public final attachmentSource_return attachmentSource() -+ throws RecognitionException { -+ attachmentSource_return retval = new attachmentSource_return(); -+ retval.start = input.LT(1); -+ -+ CommonTree root_0 = null; -+ -+ Token fn = null; -+ Token a = null; -+ Token d = null; -+ Token string_literal19 = null; -+ Token string_literal20 = null; -+ Token string_literal21 = null; -+ Token string_literal22 = null; -+ Token string_literal23 = null; -+ Token string_literal24 = null; -+ taskIdDef_return t = null; -+ -+ CommonTree fn_tree = null; -+ CommonTree a_tree = null; -+ CommonTree d_tree = null; -+ CommonTree string_literal19_tree = null; -+ CommonTree string_literal20_tree = null; -+ CommonTree string_literal21_tree = null; -+ CommonTree string_literal22_tree = null; -+ CommonTree string_literal23_tree = null; -+ CommonTree string_literal24_tree = null; -+ -+ try { -+ // ReviewDsl.g:53:2: ( 'from' 'Attachment' fn= STRING 'by' a= STRING -+ // 'on' d= STRING 'of' 'task' t= taskIdDef ) -+ // ReviewDsl.g:53:2: 'from' 'Attachment' fn= STRING 'by' a= STRING -+ // 'on' d= STRING 'of' 'task' t= taskIdDef -+ { -+ root_0 = (CommonTree) adaptor.nil(); -+ -+ string_literal19 = (Token) input.LT(1); -+ match(input, 26, FOLLOW_26_in_attachmentSource274); -+ string_literal19_tree = (CommonTree) adaptor -+ .create(string_literal19); -+ adaptor.addChild(root_0, string_literal19_tree); -+ -+ string_literal20 = (Token) input.LT(1); -+ match(input, 28, FOLLOW_28_in_attachmentSource276); -+ string_literal20_tree = (CommonTree) adaptor -+ .create(string_literal20); -+ adaptor.addChild(root_0, string_literal20_tree); -+ -+ fn = (Token) input.LT(1); -+ match(input, STRING, FOLLOW_STRING_in_attachmentSource280); -+ fn_tree = (CommonTree) adaptor.create(fn); -+ adaptor.addChild(root_0, fn_tree); -+ -+ string_literal21 = (Token) input.LT(1); -+ match(input, 29, FOLLOW_29_in_attachmentSource284); -+ string_literal21_tree = (CommonTree) adaptor -+ .create(string_literal21); -+ adaptor.addChild(root_0, string_literal21_tree); -+ -+ a = (Token) input.LT(1); -+ match(input, STRING, FOLLOW_STRING_in_attachmentSource288); -+ a_tree = (CommonTree) adaptor.create(a); -+ adaptor.addChild(root_0, a_tree); -+ -+ string_literal22 = (Token) input.LT(1); -+ match(input, 30, FOLLOW_30_in_attachmentSource292); -+ string_literal22_tree = (CommonTree) adaptor -+ .create(string_literal22); -+ adaptor.addChild(root_0, string_literal22_tree); -+ -+ d = (Token) input.LT(1); -+ match(input, STRING, FOLLOW_STRING_in_attachmentSource296); -+ d_tree = (CommonTree) adaptor.create(d); -+ adaptor.addChild(root_0, d_tree); -+ -+ string_literal23 = (Token) input.LT(1); -+ match(input, 31, FOLLOW_31_in_attachmentSource300); -+ string_literal23_tree = (CommonTree) adaptor -+ .create(string_literal23); -+ adaptor.addChild(root_0, string_literal23_tree); -+ -+ string_literal24 = (Token) input.LT(1); -+ match(input, 32, FOLLOW_32_in_attachmentSource302); -+ string_literal24_tree = (CommonTree) adaptor -+ .create(string_literal24); -+ adaptor.addChild(root_0, string_literal24_tree); -+ -+ pushFollow(FOLLOW_taskIdDef_in_attachmentSource306); -+ t = taskIdDef(); -+ _fsp--; -+ -+ adaptor.addChild(root_0, t.getTree()); -+ retval.filename = fn.getText(); -+ retval.author = a.getText(); -+ retval.createdDate = d.getText(); -+ retval.taskId = input.toString(t.start, t.stop); -+ -+ } -+ -+ retval.stop = input.LT(-1); -+ -+ retval.tree = (CommonTree) adaptor.rulePostProcessing(root_0); -+ adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); -+ -+ } catch (RecognitionException re) { -+ reportError(re); -+ recover(input, re); -+ } finally { -+ } -+ return retval; -+ } -+ -+ // $ANTLR end attachmentSource -+ -+ public static class taskIdDef_return extends ParserRuleReturnScope { -+ CommonTree tree; -+ -+ public Object getTree() { -+ return tree; -+ } -+ }; -+ -+ // $ANTLR start taskIdDef -+ // ReviewDsl.g:60:1: taskIdDef : ( TASK_ID | INT ); -+ public final taskIdDef_return taskIdDef() throws RecognitionException { -+ taskIdDef_return retval = new taskIdDef_return(); -+ retval.start = input.LT(1); -+ -+ CommonTree root_0 = null; -+ -+ Token set25 = null; -+ -+ CommonTree set25_tree = null; -+ -+ try { -+ // ReviewDsl.g:61:4: ( TASK_ID | INT ) -+ // ReviewDsl.g: -+ { -+ root_0 = (CommonTree) adaptor.nil(); -+ -+ set25 = (Token) input.LT(1); -+ if ((input.LA(1) >= INT && input.LA(1) <= TASK_ID)) { -+ input.consume(); -+ adaptor.addChild(root_0, adaptor.create(set25)); -+ errorRecovery = false; -+ } else { -+ MismatchedSetException mse = new MismatchedSetException( -+ null, input); -+ recoverFromMismatchedSet(input, mse, -+ FOLLOW_set_in_taskIdDef0); -+ throw mse; -+ } -+ -+ } -+ -+ retval.stop = input.LT(-1); -+ -+ retval.tree = (CommonTree) adaptor.rulePostProcessing(root_0); -+ adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); -+ -+ } catch (RecognitionException re) { -+ reportError(re); -+ recover(input, re); -+ } finally { -+ } -+ return retval; -+ } -+ -+ // $ANTLR end taskIdDef -+ -+ public static final BitSet FOLLOW_12_in_reviewResult39 = new BitSet( -+ new long[] { 0x0000000000002000L }); -+ public static final BitSet FOLLOW_13_in_reviewResult41 = new BitSet( -+ new long[] { 0x0000000000078000L }); -+ public static final BitSet FOLLOW_resultEnum_in_reviewResult46 = new BitSet( -+ new long[] { 0x0000000000084002L }); -+ public static final BitSet FOLLOW_14_in_reviewResult52 = new BitSet( -+ new long[] { 0x0000000000000010L }); -+ public static final BitSet FOLLOW_STRING_in_reviewResult56 = new BitSet( -+ new long[] { 0x0000000000080002L }); -+ public static final BitSet FOLLOW_fileComment_in_reviewResult64 = new BitSet( -+ new long[] { 0x0000000000080002L }); -+ public static final BitSet FOLLOW_set_in_resultEnum0 = new BitSet( -+ new long[] { 0x0000000000000002L }); -+ public static final BitSet FOLLOW_19_in_fileComment98 = new BitSet( -+ new long[] { 0x0000000000000010L }); -+ public static final BitSet FOLLOW_STRING_in_fileComment102 = new BitSet( -+ new long[] { 0x0000000000100000L }); -+ public static final BitSet FOLLOW_20_in_fileComment104 = new BitSet( -+ new long[] { 0x0000000000200012L }); -+ public static final BitSet FOLLOW_STRING_in_fileComment109 = new BitSet( -+ new long[] { 0x0000000000200002L }); -+ public static final BitSet FOLLOW_lineComment_in_fileComment115 = new BitSet( -+ new long[] { 0x0000000000200002L }); -+ public static final BitSet FOLLOW_21_in_lineComment130 = new BitSet( -+ new long[] { 0x0000000000000020L }); -+ public static final BitSet FOLLOW_INT_in_lineComment134 = new BitSet( -+ new long[] { 0x0000000000500000L }); -+ public static final BitSet FOLLOW_22_in_lineComment137 = new BitSet( -+ new long[] { 0x0000000000000020L }); -+ public static final BitSet FOLLOW_INT_in_lineComment141 = new BitSet( -+ new long[] { 0x0000000000100000L }); -+ public static final BitSet FOLLOW_20_in_lineComment145 = new BitSet( -+ new long[] { 0x0000000000000010L }); -+ public static final BitSet FOLLOW_STRING_in_lineComment150 = new BitSet( -+ new long[] { 0x0000000000000002L }); -+ public static final BitSet FOLLOW_12_in_reviewScope167 = new BitSet( -+ new long[] { 0x0000000000800000L }); -+ public static final BitSet FOLLOW_23_in_reviewScope169 = new BitSet( -+ new long[] { 0x000000000B000002L }); -+ public static final BitSet FOLLOW_resourceDef_in_reviewScope175 = new BitSet( -+ new long[] { 0x000000000B000002L }); -+ public static final BitSet FOLLOW_patchDef_in_reviewScope178 = new BitSet( -+ new long[] { 0x000000000B000002L }); -+ public static final BitSet FOLLOW_changesetDef_in_reviewScope183 = new BitSet( -+ new long[] { 0x000000000B000002L }); -+ public static final BitSet FOLLOW_24_in_resourceDef205 = new BitSet( -+ new long[] { 0x0000000004000000L }); -+ public static final BitSet FOLLOW_attachmentSource_in_resourceDef210 = new BitSet( -+ new long[] { 0x0000000000000002L }); -+ public static final BitSet FOLLOW_25_in_changesetDef226 = new BitSet( -+ new long[] { 0x0000000000000010L }); -+ public static final BitSet FOLLOW_STRING_in_changesetDef230 = new BitSet( -+ new long[] { 0x0000000004000000L }); -+ public static final BitSet FOLLOW_26_in_changesetDef233 = new BitSet( -+ new long[] { 0x0000000000000010L }); -+ public static final BitSet FOLLOW_STRING_in_changesetDef237 = new BitSet( -+ new long[] { 0x0000000000000002L }); -+ public static final BitSet FOLLOW_27_in_patchDef252 = new BitSet( -+ new long[] { 0x0000000004000000L }); -+ public static final BitSet FOLLOW_attachmentSource_in_patchDef256 = new BitSet( -+ new long[] { 0x0000000000000002L }); -+ public static final BitSet FOLLOW_26_in_attachmentSource274 = new BitSet( -+ new long[] { 0x0000000010000000L }); -+ public static final BitSet FOLLOW_28_in_attachmentSource276 = new BitSet( -+ new long[] { 0x0000000000000010L }); -+ public static final BitSet FOLLOW_STRING_in_attachmentSource280 = new BitSet( -+ new long[] { 0x0000000020000000L }); -+ public static final BitSet FOLLOW_29_in_attachmentSource284 = new BitSet( -+ new long[] { 0x0000000000000010L }); -+ public static final BitSet FOLLOW_STRING_in_attachmentSource288 = new BitSet( -+ new long[] { 0x0000000040000000L }); -+ public static final BitSet FOLLOW_30_in_attachmentSource292 = new BitSet( -+ new long[] { 0x0000000000000010L }); -+ public static final BitSet FOLLOW_STRING_in_attachmentSource296 = new BitSet( -+ new long[] { 0x0000000080000000L }); -+ public static final BitSet FOLLOW_31_in_attachmentSource300 = new BitSet( -+ new long[] { 0x0000000100000000L }); -+ public static final BitSet FOLLOW_32_in_attachmentSource302 = new BitSet( -+ new long[] { 0x0000000000000060L }); -+ public static final BitSet FOLLOW_taskIdDef_in_attachmentSource306 = new BitSet( -+ new long[] { 0x0000000000000002L }); -+ public static final BitSet FOLLOW_set_in_taskIdDef0 = new BitSet( -+ new long[] { 0x0000000000000002L }); -+ -+} -\ No newline at end of file -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/internal/ReviewDslSerializer.java b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/internal/ReviewDslSerializer.java -new file mode 100644 -index 00000000..567758f3 ---- /dev/null -+++ b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/internal/ReviewDslSerializer.java -@@ -0,0 +1,39 @@ -+/******************************************************************************* -+ * Copyright (c) 2011 Research Group for Industrial Software (INSO), Vienna University of Technology. -+ * All rights reserved. This program and the accompanying materials -+ * are made available under the terms of the Eclipse Public License v1.0 -+ * which accompanies this distribution, and is available at -+ * http://www.eclipse.org/legal/epl-v10.html -+ * -+ * Contributors: -+ * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation -+ *******************************************************************************/ -+ -+package org.eclipse.mylyn.reviews.tasks.dsl.internal; -+ -+import org.eclipse.mylyn.reviews.tasks.dsl.IReviewDslSerializer; -+import org.eclipse.mylyn.reviews.tasks.dsl.ReviewDslResult; -+import org.eclipse.mylyn.reviews.tasks.dsl.ReviewDslScope; -+ -+/** -+ * -+ * @author mattk -+ * -+ */ -+public class ReviewDslSerializer implements IReviewDslSerializer { -+ -+ /* (non-Javadoc) -+ * @see org.eclipse.mylyn.reviews.tasks.dsl.internal.IReviewDslSerializer#serialize(org.eclipse.mylyn.reviews.tasks.dsl.ReviewDslScope) -+ */ -+ @Override -+ public String serialize(ReviewDslScope scope) { -+ return scope.serialize(new StringBuilder()).toString(); -+ } -+ /* (non-Javadoc) -+ * @see org.eclipse.mylyn.reviews.tasks.dsl.internal.IReviewDslSerializer#serialize(org.eclipse.mylyn.reviews.tasks.dsl.ReviewDslResult) -+ */ -+ @Override -+ public String serialize(ReviewDslResult result) { -+ return result.serialize(new StringBuilder()).toString(); -+ } -+} -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/scoping/ReviewDslScopeProvider.java b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/scoping/ReviewDslScopeProvider.java -deleted file mode 100644 -index b763d9b4..00000000 ---- a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/scoping/ReviewDslScopeProvider.java -+++ /dev/null -@@ -1,17 +0,0 @@ --/* -- * generated by Xtext -- */ --package org.eclipse.mylyn.reviews.tasks.dsl.scoping; -- --import org.eclipse.xtext.scoping.impl.AbstractDeclarativeScopeProvider; -- --/** -- * This class contains custom scoping description. -- * -- * see : http://www.eclipse.org/Xtext/documentation/latest/xtext.html#scoping -- * on how and when to use it -- * -- */ --public class ReviewDslScopeProvider extends AbstractDeclarativeScopeProvider { -- --} -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/validation/ReviewDslJavaValidator.java b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/validation/ReviewDslJavaValidator.java -deleted file mode 100644 -index 1ebad0a8..00000000 ---- a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/src/org/eclipse/mylyn/reviews/tasks/dsl/validation/ReviewDslJavaValidator.java -+++ /dev/null -@@ -1,13 +0,0 @@ --package org.eclipse.mylyn.reviews.tasks.dsl.validation; -- -- --public class ReviewDslJavaValidator extends AbstractReviewDslJavaValidator { -- --// @Check --// public void checkGreetingStartsWithCapital(Greeting greeting) { --// if (!Character.isUpperCase(greeting.getName().charAt(0))) { --// warning(""Name should start with a capital"", MyDslPackage.GREETING__NAME); --// } --// } -- --} -diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.ui/src/org/eclipse/mylyn/reviews/tasks/ui/internal/ReviewsUiPlugin.java b/tbr/org.eclipse.mylyn.reviews.tasks.ui/src/org/eclipse/mylyn/reviews/tasks/ui/internal/ReviewsUiPlugin.java -index 1d0602c9..a54bb134 100644 ---- a/tbr/org.eclipse.mylyn.reviews.tasks.ui/src/org/eclipse/mylyn/reviews/tasks/ui/internal/ReviewsUiPlugin.java -+++ b/tbr/org.eclipse.mylyn.reviews.tasks.ui/src/org/eclipse/mylyn/reviews/tasks/ui/internal/ReviewsUiPlugin.java -@@ -29,7 +29,7 @@ public class ReviewsUiPlugin extends AbstractUIPlugin { - // The shared instance - private static ReviewsUiPlugin plugin; - -- private static ReviewTaskMapper mapper; -+ private static IReviewMapper mapper; - - /** - * The constructor" -8de9caa985fa3e79348c1c88b9313a4bce4d92e6,Valadoc,"libvaladoc: Add support for SourceCode attributes -",a,https://github.com/GNOME/vala/,⚠️ Could not parse repo info -08dbf2ed3696c16a8c2f67e436ace7f6a7622386,cloudname$cloudname,"Brave new world -Rewrite into a core library with simple methods and an abstraction layer on -top of the backend system. -The following backends are implemented -* Memory (for testing; only works within a single JVM) -* ZooKeeper (proof-of-concept implementation of the backend) -The following libraries are created: -* Service discovery library -This brings the overall functionality on par with 2.x with a few exceptions: -* Locking isn't implemented. That did not work for the old library so -there's no real change in functionality -* It isn't possible to query *everything* from a client. This will be -addressed in another commit (or just ignored completely since the -backends offers this in some shape or form) -* It isn't possible to resolve coordinates partially, f.e. finding -""logserver"" when your own coordinate is ""service.tag.region""; -""logserver"" should resolve to ""logserver.tag.region"". This will -be solved in a later commit by making a separate resolver class that -creates service coordinates based on existing coordinates. -",p,https://github.com/cloudname/cloudname,"diff --git a/README.md b/README.md -index 4a1fbe9b..90ef64d5 100644 ---- a/README.md -+++ b/README.md -@@ -1,3 +1,31 @@ - # Brave new world: Cloudname 3.0 - --Forget everything we said earlier. This is going to be even greater. -+## cn-core -+The core Cloudname library for resource management -+ -+## cn-service -+Service discovery built on top of the core library. -+ -+--- -+# The yet-to-be-updated section -+ -+## a3 -- Authentication, Authorization and Access library -+Mostly the first, some of the second. Still unchanged from 2.x -+ -+## Idgen - Generating IDs -+Generate bucketloads of unique IDs spread across multiple hosts, services and -+regions. -+ -+## Flags - Command line Flags -+Simple command line flag handling via annotations on properties and accessors. -+ -+## Log - Core logging library -+Core entities for logging. -+ -+## Timber - A log server and client -+A networked high-performance log server that uses protobuf entities. Server and -+client code. -+ -+## Testtools - Testing tools and utilities -+ -+Mostly for internal use by the various modules. -diff --git a/cn-core/README.md b/cn-core/README.md -new file mode 100644 -index 00000000..9df86e77 ---- /dev/null -+++ b/cn-core/README.md -@@ -0,0 +1,30 @@ -+# Cloudname Core -+ -+The core libraries are mostly for internal use and are the basic building block for the other libraries. Clients won't use or access this library directly but through the libraries build on the core library. -+ -+The core library supports various backends. The build-in backend is memory-based and is *not* something you want to use in a production service. Its sole purpose is to provide a fast single-JVM backend used when testing other modules built on top of the core library. -+ -+## Key concepts -+### Leases -+The backends expose **leases** to clients. Each lease is represented by a **path**. Clients belong to a **region**. A region is typically a cluster of servers that are coordinate through a single backend. -+ -+ -+ -+#### Client leases -+Ordinary leases exists only as long as the client is running and is connected to the backend. When the client terminates the connection the lease expires and anyone listening on changes will be notified. -+ -+#### Permanent leases -+Permanent leases persist between client connections. If a client connects to the backend, creates a permanent lease and then disconnects the lease will still be in place. The permanent leases does not expire and will only be removed if done so explicitly by the clients. -+ -+### Paths -+A **path** is nothing more than an ordered set of strings that represents a (real or virtual) tree structure. The backend itself does not need to use a hierarchical storage mechanism since the paths can be used directly as identifiers. -+ -+Elements in the paths follows the DNS naming conventions in RFC 952 and RFC 1123: Strings between 1-63 characters long, a-z characters (case insensitive) and hyphens. A string cannot start or end with a hyphen. -+ -+ -+## Backend requirements -+* Paths are guaranteed unique for all clients in the same cluster. There is no guarantee that a lease will be unique for other regions. -+* The backend ensures there are no duplicate leases for the current region. -+* The backend will create notifications in the same order as they occur. -+* Past leases given to disconnected clients are not guaranteed to be unique -+* The backend is responsible for cleanups of leases; if all clients disconnect the only leases that should be left is the permanent leases. -diff --git a/cn-core/pom.xml b/cn-core/pom.xml -new file mode 100644 -index 00000000..1c8fe5f9 ---- /dev/null -+++ b/cn-core/pom.xml -@@ -0,0 +1,46 @@ -+ -+ 4.0.0 -+ -+ -+ org.cloudname -+ cloudname-parent -+ 3.0-SNAPSHOT -+ -+ -+ cn-core -+ jar -+ -+ Cloudname Library -+ Managing distributed resources -+ https://github.com/Cloudname/cloudname -+ -+ -+ -+ junit -+ junit -+ test -+ -+ -+ -+ org.hamcrest -+ hamcrest-all -+ 1.3 -+ -+ -+ -+ -+ -+ -+ -+ org.apache.maven.plugins -+ maven-surefire-plugin -+ -+ -+ org.apache.maven.plugins -+ maven-compiler-plugin -+ -+ -+ -+ -+ -diff --git a/cn-core/src/main/java/org/cloudname/core/CloudnameBackend.java b/cn-core/src/main/java/org/cloudname/core/CloudnameBackend.java -new file mode 100644 -index 00000000..ed7f2270 ---- /dev/null -+++ b/cn-core/src/main/java/org/cloudname/core/CloudnameBackend.java -@@ -0,0 +1,133 @@ -+package org.cloudname.core; -+ -+/** -+ * Backends implement this interface. Clients won't use this interface; the logic is handled by the -+ * libaries built on top of the backend. Each backend provides a few basic primitives that must be -+ * implemented. One caveat: The backend is responsible for cleaning up unused paths. The clients won't -+ * remote unused elements. -+ * -+ * There are two kinds of leases - permanent and temporary. The permanent leases persist in the -+ * backend and aren't removed when clients disconnect, even if *all* clients disconnects. -+ * The temporary leases are removed by the backend when the client closes. Note that clients might -+ * not be well-behaved and may terminate without calling close(). The backend should remove -+ * these leases automatically. -+ * -+ * Clients listen on both kinds of leases and get notifications through listeners whenever something -+ * is changed. Notifications to the clients are sent in the same order they are received. -+ * -+ * Each lease have a data string attached to the lease and clients may update this freely. -+ * -+ * @author stalehd@gmail.com -+ */ -+public interface CloudnameBackend extends AutoCloseable { -+ /** -+ * Create a temporary lease. The temporary lease is limited by the client's connection and will -+ * be available for as long as the client is connected to the backend. Once the client -+ * disconnects (either through the LeaseHandle instance that is returned or just vanishing -+ * from the face of the earth) the lease is removed by the backend. The backend should support -+ * an unlimited number of leases (FSVO ""unlimited"") -+ * -+ * @param path Path to temporary lease. This value cannot be null. The path supplied by the -+ * client is just the stem of the full lease, i.e. if a client supplies foo:bar the backend -+ * will return an unique path to the client which represent the lease (for ""foo:bar"" the -+ * backend might return ""foo:bar:uniqueid0"", ""foo:bar:uniqueid1""... to clients acquiring -+ * the lease. -+ * -+ * @param data Temporary lease data. This is an arbitrary string supplied by the client. It -+ * carries no particular semantics for the backend and the backend only have to return the -+ * same string to the client. This value cannot be null. -+ * -+ * @return A LeaseHandle instance that the client can use to manipulate its data or release -+ * the lease (ie closing it). -+ */ -+ LeaseHandle createTemporaryLease(final CloudnamePath path, final String data); -+ -+ /** -+ * Update a client's lease. Normally this is something the client does itself but libraries -+ * built on top of the backends might use it to set additional properties. -+ * @param path Path to the temporary lease. -+ * @param data The updated lease data. -+ * @return True if successful, false otherwise -+ */ -+ boolean writeTemporaryLeaseData(final CloudnamePath path, final String data); -+ -+ /** -+ * Read temporary lease data. Clients won't use this in regular use but rather monitor changes -+ * through the listeners but libraries built on top of the backend might read the data. -+ * -+ * @param path Path to the client lease. -+ * @return The data stored in the client lease. -+ */ -+ String readTemporaryLeaseData(final CloudnamePath path); -+ -+ /** -+ * Add a listener to a set of temporary leases identified by a path. The temporary leases -+ * doesn't have to exist but as soon as someone creates a lease matching the given path a -+ * notification must be sent by the backend implementation. -+ * -+ * @param pathToObserve The path to observe for changes. -+ * @param listener Client's listener. Callbacks on this listener will be invoked by the backend. -+ */ -+ void addTemporaryLeaseListener(final CloudnamePath pathToObserve, final LeaseListener listener); -+ -+ /** -+ * Remove a previously attached listener. The backend will ignore leases that doesn't exist. -+ * -+ * @param listener The listener to remove -+ */ -+ void removeTemporaryLeaseListener(final LeaseListener listener); -+ -+ /** -+ * Create a permanent lease. A permanent lease persists even if the client that created it -+ * terminates or closes the connection. Other clients will still see the lease. Permanent leases -+ * must persist until they are explicitly removed. -+ * -+ * All permanent leases must be unique. Duplicate permanent leases yields an error. -+ * -+ * @param path Path to the permanent lease. -+ * @param data Data to store in the permanent lease when it is created. -+ * @return true if successful -+ */ -+ boolean createPermanantLease(final CloudnamePath path, final String data); -+ -+ /** -+ * Remove a permanent lease. The lease will be removed and clients listening on the lease -+ * will be notified. -+ * -+ * @param path The path to the lease -+ * @return true if lease is removed. -+ */ -+ boolean removePermanentLease(final CloudnamePath path); -+ -+ /** -+ * Update data on permanent lease. -+ * -+ * @param path path to the permanent lease -+ * @param data data to write to the lease -+ * @return true if successful -+ */ -+ boolean writePermanentLeaseData(final CloudnamePath path, final String data); -+ -+ /** -+ * Read data from permanent lease. -+ * -+ * @param path path to permanent lease -+ * @return data stored in lease or null if the lease doesn't exist -+ */ -+ String readPermanentLeaseData(final CloudnamePath path); -+ -+ /** -+ * Add a listener to a permanent lease. The listener is attached to just one lease, as opposed -+ * to the termporary lease listener. -+ * -+ * @param pathToObserver Path to lease -+ * @param listener Listener. Callbacks on this listener is invoked by the backend. -+ */ -+ void addPermanentLeaseListener(final CloudnamePath pathToObserver, final LeaseListener listener); -+ -+ /** -+ * Remove listener on permanent lease. Unknown listeners are ignored by the backend. -+ * @param listener The listener to remove -+ */ -+ void removePermanentLeaseListener(final LeaseListener listener); -+} -diff --git a/cn-core/src/main/java/org/cloudname/core/CloudnamePath.java b/cn-core/src/main/java/org/cloudname/core/CloudnamePath.java -new file mode 100644 -index 00000000..269d57cd ---- /dev/null -+++ b/cn-core/src/main/java/org/cloudname/core/CloudnamePath.java -@@ -0,0 +1,195 @@ -+package org.cloudname.core; -+ -+import java.util.Arrays; -+import java.util.regex.Matcher; -+import java.util.regex.Pattern; -+ -+/** -+ * A generic representation of a path. A ""path"" might be a bit of a misnomer in the actual -+ * backend implementation but it can be represented as an uniquely identifying string for the -+ * leases handed out. A path can be split into elements which can be accessed individually. -+ * -+ * Paths are an ordered set of strings consisting of the characters according to RFC 952 and -+ * RFC 1123, ie [a-z,0-9,-]. The names cannot start or end with an hyphen and can be between -+ * 1 and 63 characters long. -+ * -+ * @author stalehd@gmail.com -+ */ -+public class CloudnamePath { -+ private final String[] pathElements; -+ private static final Pattern NAME_PATTERN = Pattern.compile(""[a-z0-9-]*""); -+ -+ /** -+ * Check if path element is a valid name according to RFC 953/RCC 1123 -+ * -+ * @param name The element to check -+ * @return true if element is a valid stirng -+ */ -+ public static boolean isValidPathElementName(final String name) { -+ if (name == null || name.isEmpty()) { -+ return false; -+ } -+ -+ final Matcher matcher = NAME_PATTERN.matcher(name); -+ if (!matcher.matches()) { -+ return false; -+ } -+ if (name.length() > 64) { -+ return false; -+ } -+ if (name.charAt(0) == '-' || name.charAt(name.length() - 1) == '-') { -+ return false; -+ } -+ return true; -+ } -+ -+ /** -+ * @param pathElements the string array to create the path from. Order is preserved so -+ * pathElements[0] corresponds to the first element in the path. -+ * @throws AssertionError if the pathElements parameter is null. -+ */ -+ public CloudnamePath(final String[] pathElements) { -+ if (pathElements == null) { -+ throw new IllegalArgumentException(""Path elements can not be null""); -+ } -+ this.pathElements = new String[pathElements.length]; -+ for (int i = 0; i < pathElements.length; i++) { -+ if (pathElements[i] == null) { -+ throw new IllegalArgumentException(""Path element at index "" + i + "" is null""); -+ } -+ final String element = pathElements[i].toLowerCase(); -+ if (!isValidPathElementName(element)) { -+ throw new IllegalArgumentException(""Name element "" + element + "" isn't a valid name""); -+ } -+ this.pathElements[i] = element; -+ } -+ } -+ -+ /** -+ * Create a new path based on an existing one by appending a new element -+ * -+ * @param path The original CloudnamePath instance -+ * @param additionalElement Element to append to the end of the original path -+ * @throws AssertionError if one or more of the parameters are null -+ */ -+ public CloudnamePath(final CloudnamePath path, final String additionalElement) { -+ if (path == null) { -+ throw new IllegalArgumentException(""Path can not be null""); -+ } -+ if (additionalElement == null) { -+ throw new IllegalArgumentException(""additionalElement can not be null""); -+ } -+ -+ if (!isValidPathElementName(additionalElement)) { -+ throw new IllegalArgumentException(additionalElement + "" isn't a valid path name""); -+ } -+ this.pathElements = Arrays.copyOf(path.pathElements, path.pathElements.length + 1); -+ this.pathElements[this.pathElements.length - 1] = additionalElement; -+ -+ } -+ -+ /** -+ * @return the number of elements in the path -+ */ -+ public int length() { -+ return pathElements.length; -+ } -+ -+ /** -+ * Join the path elements into a string, f.e. join ""foo"", ""bar"" into ""foo:bar"" -+ * -+ * @param separator separator character between elements -+ * @return joined elements -+ */ -+ public String join(final char separator) { -+ final StringBuilder sb = new StringBuilder(); -+ boolean first = true; -+ for (final String element : pathElements) { -+ if (!first) { -+ sb.append(separator); -+ } -+ sb.append(element); -+ first = false; -+ } -+ return sb.toString(); -+ } -+ -+ /** -+ * @param index index of element -+ * @return element at index -+ * @throws IndexOutOfBoundsException if the index is out of range -+ */ -+ public String get(final int index) { -+ return pathElements[index]; -+ } -+ -+ /** -+ * Check if this path is a subpath. A path is a subpath whenever it starts with the -+ * same elements as the other path (""foo/bar/baz"" would be a subpath of ""foo/bar/baz/baz"" -+ * but not of ""bar/foo"") -+ * -+ * @param other Path to check -+ * @return true if this path is a subpath of the specified path -+ */ -+ public boolean isSubpathOf(final CloudnamePath other) { -+ if (other == null) { -+ return false; -+ } -+ if (this.pathElements.length > other.pathElements.length) { -+ return false; -+ } -+ -+ if (this.pathElements.length == 0) { -+ // This is an empty path. It is the subpath of any other path. -+ return true; -+ } -+ -+ for (int i = 0; i < this.pathElements.length; i++) { -+ if (!other.pathElements[i].equals(this.pathElements[i])) { -+ return false; -+ } -+ } -+ -+ return true; -+ } -+ -+ /** -+ * @return parent path of current. If this is the root path (ie it is empty), return the -+ * current path -+ */ -+ public CloudnamePath getParent() { -+ if (this.pathElements.length == 0) { -+ return this; -+ } -+ return new CloudnamePath(Arrays.copyOf(pathElements, this.pathElements.length - 1)); -+ } -+ -+ @Override -+ public boolean equals(final Object other) { -+ if (other == null || !(other instanceof CloudnamePath)) { -+ return false; -+ } -+ final CloudnamePath otherPath = (CloudnamePath) other; -+ if (otherPath.pathElements.length != pathElements.length) { -+ return false; -+ } -+ for (int i = 0; i < pathElements.length; i++) { -+ if (!pathElements[i].equals(otherPath.pathElements[i])) { -+ return false; -+ } -+ } -+ return true; -+ } -+ -+ @Override -+ public int hashCode() { -+ return Arrays.hashCode(pathElements); -+ } -+ -+ @Override -+ public String toString() { -+ return ""[ CloudnamePath ("" + Arrays.toString(pathElements) + "") ]""; -+ } -+ -+ -+} -diff --git a/cn-core/src/main/java/org/cloudname/core/LeaseHandle.java b/cn-core/src/main/java/org/cloudname/core/LeaseHandle.java -new file mode 100644 -index 00000000..45b79740 ---- /dev/null -+++ b/cn-core/src/main/java/org/cloudname/core/LeaseHandle.java -@@ -0,0 +1,21 @@ -+package org.cloudname.core; -+ -+/** -+ * Handle returned by the backend when a temporary lease is created. -+ * -+ * @author stalehd@gmail.com -+ */ -+public interface LeaseHandle extends AutoCloseable { -+ /** -+ * Write data to the lease. -+ * -+ * @param data data to write. Cannot be null. -+ * @return true if data is written -+ */ -+ boolean writeLeaseData(final String data); -+ -+ /** -+ * @return The full path of the lease -+ */ -+ CloudnamePath getLeasePath(); -+} -\ No newline at end of file -diff --git a/cn-core/src/main/java/org/cloudname/core/LeaseListener.java b/cn-core/src/main/java/org/cloudname/core/LeaseListener.java -new file mode 100644 -index 00000000..1586b100 ---- /dev/null -+++ b/cn-core/src/main/java/org/cloudname/core/LeaseListener.java -@@ -0,0 +1,31 @@ -+package org.cloudname.core; -+ -+/** -+ * Lease notifications to clients. -+ * -+ * @author stalehd@gmail.com -+ */ -+public interface LeaseListener { -+ /** -+ * A new lease is created. The lease is created at this point in time. -+ * -+ * @param path The full path of the lease -+ * @param data The data stored on the lease -+ */ -+ void leaseCreated(final CloudnamePath path, final String data); -+ -+ /** -+ * A lease is removed. The lease might not exist anymore at this point in time. -+ * -+ * @param path The path of the lease. -+ */ -+ void leaseRemoved(final CloudnamePath path); -+ -+ /** -+ * Lease data have changed in one of the leases the client is listening on. -+ * -+ * @param path Full path to the lease that have changed -+ * @param data The new data element stored in the lease -+ */ -+ void dataChanged(final CloudnamePath path, final String data); -+} -diff --git a/cn-core/src/test/java/org/cloudname/core/CloudnamePathTest.java b/cn-core/src/test/java/org/cloudname/core/CloudnamePathTest.java -new file mode 100644 -index 00000000..0e6bff9b ---- /dev/null -+++ b/cn-core/src/test/java/org/cloudname/core/CloudnamePathTest.java -@@ -0,0 +1,204 @@ -+package org.cloudname.core; -+ -+import org.junit.Test; -+ -+import static org.hamcrest.CoreMatchers.equalTo; -+import static org.hamcrest.CoreMatchers.is; -+import static org.hamcrest.CoreMatchers.notNullValue; -+import static org.junit.Assert.assertThat; -+import static org.junit.Assert.assertTrue; -+import static org.junit.Assert.fail; -+ -+/** -+ * Test the CloudnamePath class. -+ */ -+public class CloudnamePathTest { -+ private final String[] emptyElements = new String[] {}; -+ private final String[] oneElement = new String[] { ""foo"" }; -+ private final String[] twoElements = new String[] { ""foo"", ""bar"" }; -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void elementsCantBeNull() { -+ new CloudnamePath(null); -+ fail(""No exception, no pass for you!""); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void pathCantBeNull() { -+ new CloudnamePath(null, ""foof""); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void additionalElementCantBeNull() { -+ new CloudnamePath(new CloudnamePath(new String[] { ""foo"" }), null); -+ } -+ -+ @Test -+ public void appendPath() { -+ final CloudnamePath singleElement = new CloudnamePath(new String[] { ""one"" }); -+ final CloudnamePath twoElements = new CloudnamePath(new String[] { ""one"", ""two"" }); -+ assertThat(""Elements aren't equal"", singleElement.equals(twoElements), is(false)); -+ final CloudnamePath appendedElement = new CloudnamePath(singleElement, ""two""); -+ assertThat(""Appended are equal"", appendedElement.equals(twoElements), is(true)); -+ } -+ -+ @Test -+ public void elementAccess() { -+ final CloudnamePath path = new CloudnamePath(twoElements); -+ assertThat(path.get(0), is(twoElements[0])); -+ assertThat(path.get(1), is(twoElements[1])); -+ } -+ -+ @Test (expected = IndexOutOfBoundsException.class) -+ public void elementAccessMustBeWithinBounds() { -+ final CloudnamePath path = new CloudnamePath(twoElements); -+ path.get(2); -+ } -+ -+ @Test -+ public void joinPaths() { -+ final CloudnamePath empty = new CloudnamePath(emptyElements); -+ assertThat(""The empty path is length = 0"", empty.length(), is(0)); -+ assertThat(""String representation of emmpty path is empty string"", empty.join('.'), is("""")); -+ -+ final CloudnamePath one = new CloudnamePath(oneElement); -+ assertThat(""A single element path has length 1"", one.length(), is(1)); -+ assertThat(""String representation of a single element path is the element"", -+ one.join('.'), is(oneElement[0])); -+ -+ final CloudnamePath two = new CloudnamePath(twoElements); -+ assertThat(""Two element paths have length 2"", two.length(), is(2)); -+ assertThat(""String representation of two element paths includes both elements"", -+ two.join('.'), is(twoElements[0] + '.' + twoElements[1])); -+ } -+ -+ @Test -+ public void equalsTest() { -+ final CloudnamePath twoA = new CloudnamePath(twoElements); -+ final CloudnamePath twoB = new CloudnamePath(twoElements); -+ final CloudnamePath none = new CloudnamePath(emptyElements); -+ final CloudnamePath entirelyDifferent = new CloudnamePath(new String[] { ""foo"", ""2"" }); -+ -+ assertThat(""Identical paths are equal"", twoA.equals(twoB), is(true)); -+ assertThat(""Hash codes for equal objects are the same"", -+ twoA.hashCode(), is(twoB.hashCode())); -+ assertThat(""Identical paths are equal, ignore order"", twoB.equals(twoA), is(true)); -+ assertThat(""Paths aren't equal to strings"", twoA.equals(""""), is(false)); -+ assertThat(""Empty path does not equal actual path"", twoA.equals(none), is(false)); -+ assertThat(""Null elements aren't equal"", twoA.equals(null), is(false)); -+ assertThat(""Differen is just different"", twoA.equals(entirelyDifferent), is(false)); -+ } -+ -+ @Test -+ public void subpaths() { -+ final String[] e1 = new String[] { ""1"", ""2"", ""3"", ""4"" }; -+ final String[] e2 = new String[] { ""1"", ""2"" }; -+ -+ final CloudnamePath first = new CloudnamePath(e1); -+ final CloudnamePath second = new CloudnamePath(e2); -+ final CloudnamePath last = new CloudnamePath(twoElements); -+ -+ -+ assertThat(""More specific paths can't be subpaths"", first.isSubpathOf(second), is(false)); -+ assertThat(""More generic paths are subpaths"", second.isSubpathOf(first), is(true)); -+ assertThat(""A path can be subpath of itself"", first.isSubpathOf(first), is(true)); -+ -+ assertThat(""Paths must match at root levels"", last.isSubpathOf(second), is(false)); -+ -+ assertThat(""Null paths are not subpaths of anything"", first.isSubpathOf(null), is(false)); -+ -+ final CloudnamePath empty = new CloudnamePath(emptyElements); -+ assertThat(""An empty path is a subpath of everything"", empty.isSubpathOf(first), is(true)); -+ assertThat(""Empty paths can't have subpaths"", first.isSubpathOf(empty), is(false)); -+ } -+ -+ @Test -+ public void parentPaths() { -+ final CloudnamePath originalPath = new CloudnamePath(new String[] { ""foo"", ""bar"", ""baz"" }); -+ -+ assertTrue(originalPath.getParent().isSubpathOf(originalPath)); -+ -+ assertThat(originalPath.getParent(), is(equalTo( -+ new CloudnamePath(new String[] { ""foo"", ""bar"" })))); -+ -+ assertThat(originalPath.getParent().getParent(), -+ is(equalTo(new CloudnamePath(new String[] { ""foo"" })))); -+ -+ final CloudnamePath emptyPath = new CloudnamePath(new String[] { }); -+ -+ assertThat(originalPath.getParent().getParent().getParent(), -+ is(equalTo(emptyPath))); -+ -+ assertThat(originalPath.getParent().getParent().getParent().getParent(), -+ is(equalTo(emptyPath))); -+ -+ assertThat(emptyPath.getParent(), is(equalTo(emptyPath))); -+ } -+ @Test -+ public void testToString() { -+ final CloudnamePath one = new CloudnamePath(oneElement); -+ final CloudnamePath two = new CloudnamePath(twoElements); -+ final CloudnamePath three = new CloudnamePath(emptyElements); -+ -+ assertThat(one.toString(), is(notNullValue())); -+ assertThat(two.toString(), is(notNullValue())); -+ assertThat(three.toString(), is(notNullValue())); -+ } -+ -+ @Test -+ public void invalidPathNameWithHyphenFirst() { -+ assertThat(CloudnamePath.isValidPathElementName(""-invalid""), is(false)); -+ } -+ -+ @Test -+ public void invalidPathNameIsNull() { -+ assertThat(CloudnamePath.isValidPathElementName(null), is(false)); -+ } -+ @Test -+ public void invalidPathNameWithHyphenLast() { -+ assertThat(CloudnamePath.isValidPathElementName(""invalid-""), is(false)); -+ } -+ -+ @Test -+ public void invalidPathNameWithEmptyString() { -+ assertThat(CloudnamePath.isValidPathElementName(""""), is(false)); -+ } -+ -+ @Test -+ public void invalidPathNameWithIllegalChars() { -+ assertThat(CloudnamePath.isValidPathElementName(""__""), is(false)); -+ } -+ -+ @Test -+ public void invalidPathNameWithTooLongLabel() { -+ assertThat(CloudnamePath.isValidPathElementName( -+ ""rindfleischetikettierungsueberwachungsaufgabenuebertragungsgesetz""), is(false)); -+ } -+ -+ @Test -+ public void labelNamesAreCaseInsensitive() { -+ final CloudnamePath one = new CloudnamePath(new String[] { ""FirstSecond"" }); -+ final CloudnamePath two = new CloudnamePath(new String[] { ""fIRSTsECOND"" }); -+ assertTrue(""Label names aren't case sensitive"", one.equals(two)); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void pathCanNotBeNull() { -+ new CloudnamePath(null); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void pathElementsCanNotBeNull() { -+ new CloudnamePath(new String[] { null, null }); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void pathElementNamesCanNotBeInvalid() { -+ new CloudnamePath(new String[] { ""__"", ""foo"", ""bar""}); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void additionalElementsMustBeValid() { -+ new CloudnamePath(new CloudnamePath(new String[] { ""foo"" }), ""__""); -+ } -+} -diff --git a/cn-memory/README.md b/cn-memory/README.md -new file mode 100644 -index 00000000..bea9e848 ---- /dev/null -+++ b/cn-memory/README.md -@@ -0,0 +1,4 @@ -+# Memory-based backend -+ -+This backend is only suitable for testing. It will only work in a single -+VM. -diff --git a/cn-memory/pom.xml b/cn-memory/pom.xml -new file mode 100644 -index 00000000..ea56d0c7 ---- /dev/null -+++ b/cn-memory/pom.xml -@@ -0,0 +1,57 @@ -+ -+ 4.0.0 -+ -+ -+ org.cloudname -+ cloudname-parent -+ 3.0-SNAPSHOT -+ -+ -+ cn-memory -+ jar -+ -+ Cloudname Memory backend -+ Memory backend for Cloudname -+ https://github.com/Cloudname/cloudname -+ -+ -+ -+ org.cloudname -+ cn-core -+ -+ -+ -+ junit -+ junit -+ test -+ -+ -+ -+ org.hamcrest -+ hamcrest-all -+ 1.3 -+ -+ -+ -+ org.cloudname -+ testtools -+ test -+ -+ -+ -+ -+ -+ -+ -+ org.apache.maven.plugins -+ maven-surefire-plugin -+ -+ -+ org.apache.maven.plugins -+ maven-compiler-plugin -+ -+ -+ -+ -+ -diff --git a/cn-memory/src/main/java/org/cloudname/backends/memory/MemoryBackend.java b/cn-memory/src/main/java/org/cloudname/backends/memory/MemoryBackend.java -new file mode 100644 -index 00000000..0a869de6 ---- /dev/null -+++ b/cn-memory/src/main/java/org/cloudname/backends/memory/MemoryBackend.java -@@ -0,0 +1,270 @@ -+package org.cloudname.backends.memory; -+ -+import org.cloudname.core.CloudnameBackend; -+import org.cloudname.core.CloudnamePath; -+import org.cloudname.core.LeaseHandle; -+import org.cloudname.core.LeaseListener; -+ -+import java.util.HashMap; -+import java.util.HashSet; -+import java.util.Map; -+import java.util.Random; -+import java.util.Set; -+ -+/** -+ * Memory backend. This is the canonical implementation. The synchronization is probably not -+ * optimal but for testing this is OK. It defines the correct behaviour for backends, including -+ * calling listeners, return values and uniqueness. The actual timing of the various backends -+ * will of course vary. -+ * -+ * @author stalehd@gmail.com -+ */ -+public class MemoryBackend implements CloudnameBackend { -+ private enum LeaseEvent { -+ CREATED, -+ REMOVED, -+ DATA -+ } -+ -+ private final Map temporaryLeases = new HashMap<>(); -+ private final Map permanentLeases = new HashMap<>(); -+ private final Map> observedTemporaryPaths = new HashMap<>(); -+ private final Map> observedPermanentPaths = new HashMap<>(); -+ private final Object syncObject = new Object(); -+ -+ /* package-private */ void removeTemporaryLease(final CloudnamePath leasePath) { -+ synchronized (syncObject) { -+ if (temporaryLeases.containsKey(leasePath)) { -+ temporaryLeases.remove(leasePath); -+ notifyTemporaryObservers(leasePath, LeaseEvent.REMOVED, null); -+ } -+ } -+ } -+ private final Random random = new Random(); -+ -+ private String createRandomInstanceName() { -+ return Long.toHexString(random.nextLong()); -+ } -+ -+ /** -+ * @param path The path that has changed -+ * @param event The event -+ * @param data The data -+ */ -+ private void notifyTemporaryObservers( -+ final CloudnamePath path, final LeaseEvent event, final String data) { -+ for (final CloudnamePath observedPath : observedTemporaryPaths.keySet()) { -+ if (observedPath.isSubpathOf(path)) { -+ for (final LeaseListener listener : observedTemporaryPaths.get(observedPath)) { -+ switch (event) { -+ case CREATED: -+ listener.leaseCreated(path, data); -+ break; -+ case REMOVED: -+ listener.leaseRemoved(path); -+ break; -+ case DATA: -+ listener.dataChanged(path, data); -+ break; -+ default: -+ throw new RuntimeException(""Don't know how to handle "" + event); -+ } -+ } -+ } -+ } -+ } -+ -+ /** -+ * Notify observers of changes -+ */ -+ private void notifyPermanentObservers( -+ final CloudnamePath path, final LeaseEvent event, final String data) { -+ for (final CloudnamePath observedPath : observedPermanentPaths.keySet()) { -+ if (observedPath.isSubpathOf(path)) { -+ for (final LeaseListener listener : observedPermanentPaths.get(observedPath)) { -+ switch (event) { -+ case CREATED: -+ listener.leaseCreated(path, data); -+ break; -+ case REMOVED: -+ listener.leaseRemoved(path); -+ break; -+ case DATA: -+ listener.dataChanged(path, data); -+ break; -+ default: -+ throw new RuntimeException(""Don't know how to handle "" + event); -+ } -+ } -+ } -+ } -+ } -+ -+ @Override -+ public boolean createPermanantLease(final CloudnamePath path, final String data) { -+ assert path != null : ""Path to lease must be set!""; -+ assert data != null : ""Lease data is required""; -+ synchronized (syncObject) { -+ if (permanentLeases.containsKey(path)) { -+ return false; -+ } -+ permanentLeases.put(path, data); -+ notifyPermanentObservers(path, LeaseEvent.CREATED, data); -+ } -+ return true; -+ } -+ -+ @Override -+ public boolean removePermanentLease(final CloudnamePath path) { -+ synchronized (syncObject) { -+ if (!permanentLeases.containsKey(path)) { -+ return false; -+ } -+ permanentLeases.remove(path); -+ notifyPermanentObservers(path, LeaseEvent.REMOVED, null); -+ } -+ return true; -+ } -+ -+ @Override -+ public boolean writePermanentLeaseData(final CloudnamePath path, String data) { -+ synchronized (syncObject) { -+ if (!permanentLeases.containsKey(path)) { -+ return false; -+ } -+ permanentLeases.put(path, data); -+ notifyPermanentObservers(path, LeaseEvent.DATA, data); -+ } -+ return true; -+ } -+ -+ @Override -+ public String readPermanentLeaseData(final CloudnamePath path) { -+ synchronized (syncObject) { -+ if (!permanentLeases.containsKey(path)) { -+ return null; -+ } -+ return permanentLeases.get(path); -+ } -+ } -+ -+ @Override -+ public boolean writeTemporaryLeaseData(final CloudnamePath path, String data) { -+ synchronized (syncObject) { -+ if (!temporaryLeases.containsKey(path)) { -+ return false; -+ } -+ temporaryLeases.put(path, data); -+ notifyTemporaryObservers(path, LeaseEvent.DATA, data); -+ } -+ return true; -+ } -+ -+ @Override -+ public String readTemporaryLeaseData(final CloudnamePath path) { -+ synchronized (syncObject) { -+ if (!temporaryLeases.containsKey(path)) { -+ return null; -+ } -+ return temporaryLeases.get(path); -+ } -+ } -+ -+ @Override -+ public LeaseHandle createTemporaryLease(final CloudnamePath path, final String data) { -+ synchronized (syncObject) { -+ final String instanceName = createRandomInstanceName(); -+ CloudnamePath instancePath = new CloudnamePath(path, instanceName); -+ while (temporaryLeases.containsKey(instancePath)) { -+ instancePath = new CloudnamePath(path, instanceName); -+ } -+ temporaryLeases.put(instancePath, data); -+ notifyTemporaryObservers(instancePath, LeaseEvent.CREATED, data); -+ return new MemoryLeaseHandle(this, instancePath); -+ } -+ } -+ -+ /** -+ * Generate created events for temporary leases for newly attached listeners. -+ */ -+ private void regenerateEventsForTemporaryListener( -+ final CloudnamePath path, final LeaseListener listener) { -+ for (final CloudnamePath temporaryPath : temporaryLeases.keySet()) { -+ if (path.isSubpathOf(temporaryPath)) { -+ listener.leaseCreated(temporaryPath, temporaryLeases.get(temporaryPath)); -+ } -+ } -+ } -+ -+ /** -+ * Generate created events on permanent leases for newly attached listeners. -+ */ -+ private void regenerateEventsForPermanentListener( -+ final CloudnamePath path, final LeaseListener listener) { -+ for (final CloudnamePath permanentPath : permanentLeases.keySet()) { -+ if (path.isSubpathOf(permanentPath)) { -+ listener.leaseCreated(permanentPath, permanentLeases.get(permanentPath)); -+ } -+ } -+ } -+ -+ @Override -+ public void addTemporaryLeaseListener( -+ final CloudnamePath pathToObserve, final LeaseListener listener) { -+ synchronized (syncObject) { -+ Set listeners = observedTemporaryPaths.get(pathToObserve); -+ if (listeners == null) { -+ listeners = new HashSet<>(); -+ } -+ listeners.add(listener); -+ observedTemporaryPaths.put(pathToObserve, listeners); -+ regenerateEventsForTemporaryListener(pathToObserve, listener); -+ } -+ } -+ -+ @Override -+ public void removeTemporaryLeaseListener(final LeaseListener listener) { -+ synchronized (syncObject) { -+ for (final Set listeners : observedTemporaryPaths.values()) { -+ if (listeners.contains(listener)) { -+ listeners.remove(listener); -+ return; -+ } -+ } -+ } -+ } -+ -+ @Override -+ public void addPermanentLeaseListener( -+ final CloudnamePath pathToObserve, final LeaseListener listener) { -+ synchronized (syncObject) { -+ Set listeners = observedPermanentPaths.get(pathToObserve); -+ if (listeners == null) { -+ listeners = new HashSet<>(); -+ } -+ listeners.add(listener); -+ observedPermanentPaths.put(pathToObserve, listeners); -+ regenerateEventsForPermanentListener(pathToObserve, listener); -+ } -+ } -+ -+ @Override -+ public void removePermanentLeaseListener(final LeaseListener listener) { -+ synchronized (syncObject) { -+ for (final Set listeners : observedPermanentPaths.values()) { -+ if (listeners.contains(listener)) { -+ listeners.remove(listener); -+ return; -+ } -+ } -+ } -+ } -+ -+ @Override -+ public void close() { -+ synchronized (syncObject) { -+ observedTemporaryPaths.clear(); -+ observedPermanentPaths.clear(); -+ } -+ } -+} -diff --git a/cn-memory/src/main/java/org/cloudname/backends/memory/MemoryLeaseHandle.java b/cn-memory/src/main/java/org/cloudname/backends/memory/MemoryLeaseHandle.java -new file mode 100644 -index 00000000..53b1e277 ---- /dev/null -+++ b/cn-memory/src/main/java/org/cloudname/backends/memory/MemoryLeaseHandle.java -@@ -0,0 +1,47 @@ -+package org.cloudname.backends.memory; -+ -+import org.cloudname.core.CloudnamePath; -+import org.cloudname.core.LeaseHandle; -+ -+import java.io.IOException; -+import java.util.concurrent.atomic.AtomicBoolean; -+ -+/** -+ * A handle returned to clients acquiring temporary leases. -+ * -+ * @author stalehd@gmail.com -+ */ -+public class MemoryLeaseHandle implements LeaseHandle { -+ private final MemoryBackend backend; -+ private final CloudnamePath clientLeasePath; -+ private AtomicBoolean expired = new AtomicBoolean(false); -+ -+ /** -+ * @param backend The backend issuing the lease -+ * @param clientLeasePath The path to the lease -+ */ -+ public MemoryLeaseHandle(final MemoryBackend backend, final CloudnamePath clientLeasePath) { -+ this.backend = backend; -+ this.clientLeasePath = clientLeasePath; -+ expired.set(false); -+ } -+ -+ @Override -+ public boolean writeLeaseData(String data) { -+ return backend.writeTemporaryLeaseData(clientLeasePath, data); -+ } -+ -+ @Override -+ public CloudnamePath getLeasePath() { -+ if (expired.get()) { -+ return null; -+ } -+ return clientLeasePath; -+ } -+ -+ @Override -+ public void close() throws IOException { -+ backend.removeTemporaryLease(clientLeasePath); -+ expired.set(true); -+ } -+} -diff --git a/cn-memory/src/test/java/org/cloudname/backends/memory/MemoryBackendTest.java b/cn-memory/src/test/java/org/cloudname/backends/memory/MemoryBackendTest.java -new file mode 100644 -index 00000000..acc1ad8e ---- /dev/null -+++ b/cn-memory/src/test/java/org/cloudname/backends/memory/MemoryBackendTest.java -@@ -0,0 +1,18 @@ -+package org.cloudname.backends.memory; -+ -+import org.cloudname.core.CloudnameBackend; -+import org.cloudname.testtools.backend.CoreBackendTest; -+ -+/** -+ * Test the memory backend. Since the memory backend is the reference implementation this test -+ * shouldn't fail. Ever. -+ */ -+public class MemoryBackendTest extends CoreBackendTest { -+ private static final CloudnameBackend BACKEND = new MemoryBackend(); -+ -+ @Override -+ protected CloudnameBackend getBackend() { -+ return BACKEND; -+ } -+ -+} -diff --git a/cn-service/README.md b/cn-service/README.md -new file mode 100644 -index 00000000..8cf7df15 ---- /dev/null -+++ b/cn-service/README.md -@@ -0,0 +1,107 @@ -+# Cloudname service discovery -+ -+## Coordinates -+Each service that runs is represented by a **coordinate**. There are two kinds of coordinates: -+* **Service coordinates** which are generic coordinates that points to one or more services -+* **Instance coordinates** which points to a particular service -+ -+Coordinates are specified through **regions** and **tags**. A **region** is a separate (logical) cluster of services. One region is usually not connected to another region. The simplest comparison is either a *data center* or an AWS *region* or *availability zone* (like eu-west-1, us-east-1 and so on). -+ -+The **tag** is just that - a tag that you can assign to a cluster of different services. The tag doesn't contain any particular semantics. -+ -+A **service coordinate** looks like `..`, f.e. `geolocation.rel1501.dc1` or (if you are running in AWS and have decided that you'll assume regions are availability zones) `geolocation.rel1501.eu-west-1a`. -+ -+Instance coordinates points to a particular service instance and looks like this: `...`. For the examples above the instance coordinates might look like `ff08f0ah.geolocation.rel1501.dc1` or `ab08bed5.geolocation.rel1501.eu-west-1a`. -+ -+The instance identifier is an unique identifier for that instance. Note that the instance identifier isn't unique across all services, isn't sequential and does not carry any semantic information. -+ -+## Register a service -+A service is registered through the `CloudnameService` class: -+```java -+// Create the service class. Note that getBackend() returns a Cloudname backend -+// instance. There ar multiple types available. -+try (CloudnameService cloudnameService = new CloudnameService(getBackend())) { -+ // Create the coordinate and endpoint -+ ServiceCoordinate serviceCoordinate = ServiceCoordinate.parse(""myservice.demo.local""); -+ Endpoint httpEndpoint = new Endpoint(""http"", ""127.0.0.1"", 80); -+ -+ ServiceData serviceData = new ServiceData(Arrays.asList(httpEndpoint)); -+ -+ // This will register the service. The returned handle will expose the registration -+ // to other clients until it is closed. -+ try (ServiceHandle handle = cloudnameService.registerService(serviceCoordinate, serviceData)) { -+ -+ // ...Run your service here -+ -+ } -+} -+``` -+ -+## Looking up services -+Services can be located without registering a service; supply a listener to the CloudnameService instance to get notified of new services: -+```java -+CloudnameService cloudnameService = new CloudnameService(getBackend()); -+ServiceCoordinate serviceCoordinate = ServiceCoordinate.parse(""myservice.demo.local""); -+cloudnameService.addServiceListener(ServiceCoordinate, new ServiceListener() { -+ @Override -+ public void onServiceCreated(final InstanceCoordinate coordinate, final ServiceData data) { -+ // A new instance is launched. Retrieve the endpoints via the data parameter. -+ // Note that this method is also called when the listener is set so you'll -+ // get notifications on already existing services as well. -+ } -+ -+ @Override -+ public void onServiceDataChanged(final InstanceCoordinate coordinate, final ServiceData data) { -+ // There's a change in endpoints for the given instance. The updated endpoints -+ // are supplied in the data parameter -+ } -+ -+ @Override -+ public void onServiceRemoved(final InstanceCoordinate coordinate) { -+ // One of the instances is stopped. It might become unavailable shortly -+ // (or it might have terminated) -+ } -+}); -+``` -+ -+## Permanent services -+Some resources might not be suitable for service discovery, either because they are not under your control, they are pet services or not designed for cloud-like behavior (aka ""pet servers""). You can still use those in service discovery; just add them as *permanent services*. Permanent services behave a bit differently from ordinary services; they stay alive for long periods of time and on some rare occasions they change their endpoint. Registering permanent services are similar to ordinary services. The following snippet registers a permanent service, then terminates. The service registration will still be available to other clients when this client has terminated: -+ -+```java -+try (CloudnameService cloudnameService = new CloudnameService(getBackend())) { -+ ServiceCoordinate coordinate = ServiceCoordinate.parse(""mydb.demo.local""); -+ Endpoint endpoint = new Endpoint(""db"", ""127.0.0.1"", 5678); -+ -+ if (!cloudnameService.createPermanentService(coordinate, endpoint)) { -+ System.out.println(""Couldn't register permanent service!""); -+ } -+} -+``` -+Note that permanent services can not have more than one endpoint registered at any time. A permanent service registration applies only to *one* service at a time. -+ -+Looking up permanent service registrations is similar to ordinary services: -+ -+```java -+try (CloudnameService cloudnameService = new CloudnameService(getBackend())) { -+ ServiceCoordinate coordinate = ServiceCoordinate.parse(""mydb.demo.local""); -+ cloudnameService.addPermanentServiceListener(coordinate, -+ new PermanentServiceListener() { -+ @Override -+ public void onServiceCreated(Endpoint endpoint) { -+ // Service is created. Note that this is also called when the -+ // listener is set so you'll get notifications on already -+ // existing services as well. -+ } -+ -+ @Override -+ public void onServiceChanged(Endpoint endpoint) { -+ // The endpoint is updated -+ } -+ -+ @Override -+ public void onServiceRemoved() { -+ // The service has been removed -+ } -+ }); -+} -+``` -diff --git a/cn-service/pom.xml b/cn-service/pom.xml -new file mode 100644 -index 00000000..f3788ed2 ---- /dev/null -+++ b/cn-service/pom.xml -@@ -0,0 +1,63 @@ -+ -+ 4.0.0 -+ -+ -+ org.cloudname -+ cloudname-parent -+ 3.0-SNAPSHOT -+ -+ -+ cn-service -+ jar -+ -+ Cloudname Service Discovery -+ Simple library for service discovery (and notifications) -+ https://github.com/Cloudname/cloudname -+ -+ -+ -+ org.cloudname -+ cn-core -+ -+ -+ -+ junit -+ junit -+ test -+ -+ -+ -+ org.hamcrest -+ hamcrest-all -+ 1.3 -+ -+ -+ -+ org.json -+ json -+ 20140107 -+ -+ -+ -+ org.cloudname -+ cn-memory -+ test -+ -+ -+ -+ -+ -+ -+ -+ org.apache.maven.plugins -+ maven-surefire-plugin -+ -+ -+ org.apache.maven.plugins -+ maven-compiler-plugin -+ -+ -+ -+ -+ -diff --git a/cn-service/src/main/java/org/cloudname/service/CloudnameService.java b/cn-service/src/main/java/org/cloudname/service/CloudnameService.java -new file mode 100644 -index 00000000..9c4747f4 ---- /dev/null -+++ b/cn-service/src/main/java/org/cloudname/service/CloudnameService.java -@@ -0,0 +1,237 @@ -+package org.cloudname.service; -+import org.cloudname.core.CloudnameBackend; -+import org.cloudname.core.CloudnamePath; -+import org.cloudname.core.LeaseHandle; -+import org.cloudname.core.LeaseListener; -+ -+import java.util.ArrayList; -+import java.util.List; -+import java.util.Set; -+import java.util.concurrent.CopyOnWriteArraySet; -+import java.util.logging.Level; -+import java.util.logging.Logger; -+ -+/** -+ * Service discovery implementation. Use registerService() and addServiceListener() to register -+ * and locate services. -+ * -+ * TODO: Enable lookups based on partial coordinates. Create builder for service coordinates, -+ * use own coordinate to resolve complete coordinate. -+ * -+ * @author stalehd@gmail.com -+ */ -+public class CloudnameService implements AutoCloseable { -+ private final Logger LOG = Logger.getLogger(CloudnameService.class.getName()); -+ -+ private final CloudnameBackend backend; -+ private final List handles = new ArrayList<>(); -+ private final List temporaryListeners = new ArrayList<>(); -+ private final List permanentListeners = new ArrayList<>(); -+ private final Set permanentUpdatesInProgress = new CopyOnWriteArraySet<>(); -+ private final Object syncObject = new Object(); -+ -+ /** -+ * @oaram backend backend implementation to use -+ * @throws IllegalArgumentException if parameter is invalid -+ */ -+ public CloudnameService(final CloudnameBackend backend) { -+ if (backend == null) { -+ throw new IllegalArgumentException(""Backend can not be null""); -+ } -+ this.backend = backend; -+ } -+ -+ /** -+ * Register an instance with the given service coordinate. The service will get its own -+ * instance coordinate under the given service coordinate. -+ * -+ * @param serviceCoordinate The service coordinate that the service (instance) will attach to -+ * @param serviceData Service data for the instance -+ * @return ServiceHandle a handle the client can use to manage the endpoints for the service. -+ * The most typical use case is to register all endpoints -+ * @throws IllegalArgumentException if the parameters are invalid -+ */ -+ public ServiceHandle registerService( -+ final ServiceCoordinate serviceCoordinate, final ServiceData serviceData) { -+ -+ if (serviceCoordinate == null) { -+ throw new IllegalArgumentException(""Coordinate cannot be null""); -+ } -+ if (serviceData == null) { -+ throw new IllegalArgumentException(""Service Data cannot be null""); -+ } -+ final LeaseHandle leaseHandle = backend.createTemporaryLease( -+ serviceCoordinate.toCloudnamePath(), serviceData.toJsonString()); -+ -+ final ServiceHandle serviceHandle = new ServiceHandle( -+ new InstanceCoordinate(leaseHandle.getLeasePath()), serviceData, leaseHandle); -+ -+ synchronized (syncObject) { -+ handles.add(serviceHandle); -+ } -+ return serviceHandle; -+ } -+ -+ /** -+ * Add listener for service events. This only applies to ordinary services. -+ * -+ * @param coordinate The coordinate to monitor. -+ * @param listener Listener getting notifications on changes. -+ * @throws IllegalArgumentException if parameters are invalid -+ */ -+ public void addServiceListener( -+ final ServiceCoordinate coordinate, final ServiceListener listener) { -+ if (coordinate == null) { -+ throw new IllegalArgumentException(""Coordinate can not be null""); -+ } -+ if (listener == null) { -+ throw new IllegalArgumentException(""Listener can not be null""); -+ } -+ // Just create the corresponding listener on the backend and translate the parameters -+ // from the listener. -+ final LeaseListener leaseListener = new LeaseListener() { -+ @Override -+ public void leaseCreated(final CloudnamePath path, final String data) { -+ final InstanceCoordinate instanceCoordinate = new InstanceCoordinate(path); -+ final ServiceData serviceData = ServiceData.fromJsonString(data); -+ listener.onServiceCreated(instanceCoordinate, serviceData); -+ } -+ -+ @Override -+ public void leaseRemoved(final CloudnamePath path) { -+ final InstanceCoordinate instanceCoordinate = new InstanceCoordinate(path); -+ listener.onServiceRemoved(instanceCoordinate); -+ } -+ -+ @Override -+ public void dataChanged(final CloudnamePath path, final String data) { -+ final InstanceCoordinate instanceCoordinate = new InstanceCoordinate(path); -+ final ServiceData serviceData = ServiceData.fromJsonString(data); -+ listener.onServiceDataChanged(instanceCoordinate, serviceData); -+ } -+ }; -+ synchronized (syncObject) { -+ temporaryListeners.add(leaseListener); -+ } -+ backend.addTemporaryLeaseListener(coordinate.toCloudnamePath(), leaseListener); -+ } -+ -+ /** -+ * Create a permanent service. The service registration will be kept when the client exits. The -+ * service will have a single endpoint. -+ */ -+ public boolean createPermanentService( -+ final ServiceCoordinate coordinate, final Endpoint endpoint) { -+ if (coordinate == null) { -+ throw new IllegalArgumentException(""Service coordinate can't be null""); -+ } -+ if (endpoint == null) { -+ throw new IllegalArgumentException(""Endpoint can't be null""); -+ } -+ -+ return backend.createPermanantLease(coordinate.toCloudnamePath(), endpoint.toJsonString()); -+ } -+ -+ /** -+ * Update permanent service coordinate. Note that this is a non-atomic operation with multiple -+ * trips to the backend system. The update is done in two operations; one delete and one -+ * create. If the delete operation fail and the create operation succeeds it might end up -+ * removing the permanent service coordinate. Clients will not be notified of the removal. -+ */ -+ public boolean updatePermanentService( -+ final ServiceCoordinate coordinate, final Endpoint endpoint) { -+ if (coordinate == null) { -+ throw new IllegalArgumentException(""Coordinate can't be null""); -+ } -+ if (endpoint == null) { -+ throw new IllegalArgumentException(""Endpoint can't be null""); -+ } -+ -+ if (permanentUpdatesInProgress.contains(coordinate)) { -+ LOG.log(Level.WARNING, ""Attempt to update a permanent service which is already"" -+ + "" updating. (coordinate: "" + coordinate + "", endpoint: "" + endpoint); -+ return false; -+ } -+ // Check if the endpoint name still matches. -+ final String data = backend.readPermanentLeaseData(coordinate.toCloudnamePath()); -+ if (data == null) { -+ return false; -+ } -+ final Endpoint oldEndpoint = Endpoint.fromJson(data); -+ if (!oldEndpoint.getName().equals(endpoint.getName())) { -+ LOG.log(Level.INFO, ""Rejecting attempt to update permanent service with a new endpoint"" -+ + "" that has a different name. Old name: "" + oldEndpoint + "" new: "" + endpoint); -+ return false; -+ } -+ permanentUpdatesInProgress.add(coordinate); -+ try { -+ return backend.writePermanentLeaseData( -+ coordinate.toCloudnamePath(), endpoint.toJsonString()); -+ } catch (final RuntimeException ex) { -+ LOG.log(Level.WARNING, ""Got exception updating permanent lease. The system might be in"" -+ + "" an indeterminate state"", ex); -+ return false; -+ } finally { -+ permanentUpdatesInProgress.remove(coordinate); -+ } -+ } -+ -+ /** -+ * Remove a perviously registered permanent service. Needless to say: Use with caution. -+ */ -+ public boolean removePermanentService(final ServiceCoordinate coordinate) { -+ if (coordinate == null) { -+ throw new IllegalArgumentException(""Coordinate can not be null""); -+ } -+ return backend.removePermanentLease(coordinate.toCloudnamePath()); -+ } -+ -+ /** -+ * Listen for changes in permanent services. The changes are usually of the earth-shattering -+ * variety so as a client you'd be interested in knowing about these as soon as possible. -+ */ -+ public void addPermanentServiceListener( -+ final ServiceCoordinate coordinate, final PermanentServiceListener listener) { -+ if (coordinate == null) { -+ throw new IllegalArgumentException(""Coordinate can not be null""); -+ } -+ if (listener == null) { -+ throw new IllegalArgumentException(""Listener can not be null""); -+ } -+ final LeaseListener leaseListener = new LeaseListener() { -+ @Override -+ public void leaseCreated(CloudnamePath path, String data) { -+ listener.onServiceCreated(Endpoint.fromJson(data)); -+ } -+ -+ @Override -+ public void leaseRemoved(CloudnamePath path) { -+ listener.onServiceRemoved(); -+ } -+ -+ @Override -+ public void dataChanged(CloudnamePath path, String data) { -+ listener.onServiceChanged(Endpoint.fromJson(data)); -+ } -+ }; -+ synchronized (syncObject) { -+ permanentListeners.add(leaseListener); -+ } -+ backend.addPermanentLeaseListener(coordinate.toCloudnamePath(), leaseListener); -+ } -+ -+ @Override -+ public void close() { -+ synchronized (syncObject) { -+ for (final ServiceHandle handle : handles) { -+ handle.close(); -+ } -+ for (final LeaseListener listener : temporaryListeners) { -+ backend.removeTemporaryLeaseListener(listener); -+ } -+ for (final LeaseListener listener : permanentListeners) { -+ backend.removePermanentLeaseListener(listener); -+ } -+ } -+ } -+} -diff --git a/cn-service/src/main/java/org/cloudname/service/Endpoint.java b/cn-service/src/main/java/org/cloudname/service/Endpoint.java -new file mode 100644 -index 00000000..d20371fd ---- /dev/null -+++ b/cn-service/src/main/java/org/cloudname/service/Endpoint.java -@@ -0,0 +1,114 @@ -+package org.cloudname.service; -+ -+import org.cloudname.core.CloudnamePath; -+import org.json.JSONObject; -+ -+/** -+ * Endpoints exposed by services. Endpoints contains host address and port number. -+ * -+ * @author stalehd@gmail.com -+ */ -+public class Endpoint { -+ private final String name; -+ private final String host; -+ private final int port; -+ -+ /** -+ * @param name Name of endpoint. Must conform to RFC 952 and RFC 1123, -+ * ie [a-z,0-9,-] -+ * @param host Host name or IP address -+ * @param port Port number (1- max port number) -+ * @throws IllegalArgumentException if one of the parameters are null (name/host) or zero (port) -+ */ -+ public Endpoint(final String name, final String host, final int port) { -+ if (name == null || name.isEmpty()) { -+ throw new IllegalArgumentException(""Name can not be null or empty""); -+ } -+ if (host == null || host.isEmpty()) { -+ throw new IllegalArgumentException(""Host can not be null or empty""); -+ } -+ if (port < 1) { -+ throw new IllegalArgumentException(""Port can not be < 1""); -+ } -+ if (!CloudnamePath.isValidPathElementName(name)) { -+ throw new IllegalArgumentException(""Name is not a valid identifier""); -+ } -+ -+ this.name = name; -+ this.host = host; -+ this.port = port; -+ } -+ -+ /** -+ * @return The endpoint's name -+ */ -+ public String getName() { -+ return name; -+ } -+ -+ /** -+ * @return The endpoint's host name or IP address -+ */ -+ public String getHost() { -+ return host; -+ } -+ -+ /** -+ * @return The endpoint's port number -+ */ -+ public int getPort() { -+ return port; -+ } -+ -+ /** -+ * @return JSON representation of isntance -+ */ -+ /* package-private */ String toJsonString() { -+ return new JSONObject() -+ .put(""name"", name) -+ .put(""host"", host) -+ .put(""port"", port) -+ .toString(); -+ } -+ -+ /** -+ * @param jsonString String with JSON representation of instance -+ * @return Endpoint instance -+ * @throws org.json.JSONException if the string is malformed. -+ */ -+ /* package-private */ static Endpoint fromJson(final String jsonString) { -+ final JSONObject json = new JSONObject(jsonString); -+ return new Endpoint( -+ json.getString(""name""), -+ json.getString(""host""), -+ json.getInt(""port"")); -+ } -+ -+ @Override -+ public boolean equals(final Object o) { -+ if (o == null || !(o instanceof Endpoint)) { -+ return false; -+ } -+ final Endpoint other = (Endpoint) o; -+ -+ if (!this.name.equals(other.name) -+ || !this.host.equals(other.host) -+ || this.port != other.port) { -+ return false; -+ } -+ return true; -+ } -+ -+ @Override -+ public int hashCode() { -+ return this.toString().hashCode(); -+ } -+ -+ @Override -+ public String toString() { -+ return ""[ name = "" + name -+ + "", host = "" + host -+ + "", port = "" + port -+ + ""]""; -+ } -+} -diff --git a/cn-service/src/main/java/org/cloudname/service/InstanceCoordinate.java b/cn-service/src/main/java/org/cloudname/service/InstanceCoordinate.java -new file mode 100644 -index 00000000..7d219357 ---- /dev/null -+++ b/cn-service/src/main/java/org/cloudname/service/InstanceCoordinate.java -@@ -0,0 +1,146 @@ -+package org.cloudname.service; -+ -+import org.cloudname.core.CloudnamePath; -+import org.json.JSONObject; -+ -+import java.util.regex.Matcher; -+import java.util.regex.Pattern; -+ -+/** -+ * A coordinate representing a running service. The coordinate consists of four parts; instance id, -+ * service name, tag and region. -+ * -+ * Note that the order of elements in the string representation is opposite of the CloudnamePath -+ * class; you can't create a canonical representation of the instance coordinate by calling join() -+ * on the CloudnamePath instance. -+ * -+ * @author stalehd@gmail.com -+ */ -+public class InstanceCoordinate { -+ private static final Pattern COORDINATE_PATTERN = Pattern.compile(""(.*)\\.(.*)\\.(.*)\\.(.*)""); -+ private static final String REGION_NAME = ""region""; -+ private static final String TAG_NAME = ""tag""; -+ private static final String SERVICE_NAME = ""service""; -+ private static final String INSTANCE_NAME = ""instance""; -+ -+ -+ private final String region; -+ private final String tag; -+ private final String service; -+ private final String instance; -+ -+ /** -+ * @param path CloudnamePath instance to use as source -+ * @throws IllegalArgumentException if parameters are invalid -+ */ -+ /* package-private */ InstanceCoordinate(final CloudnamePath path) { -+ if (path == null) { -+ throw new IllegalArgumentException(""Path can not be null""); -+ } -+ if (path.length() != 4) { -+ throw new IllegalArgumentException(""Path must contain 4 elements""); -+ } -+ this.region = path.get(0); -+ this.tag = path.get(1); -+ this.service = path.get(2); -+ this.instance = path.get(3); -+ } -+ -+ /** -+ * @return The region of the coordinate -+ */ -+ public String getRegion() { -+ return region; -+ } -+ -+ /** -+ * @return The tag of the coordinate -+ */ -+ public String getTag() { -+ return tag; -+ } -+ -+ /** -+ * @return The service name -+ */ -+ public String getService() { -+ return service; -+ } -+ -+ /** -+ * @return The instance identifier -+ */ -+ public String getInstance() { -+ return instance; -+ } -+ -+ /** -+ * @return A CloudnamePath instance representing this coordinate -+ */ -+ /* package-private */ CloudnamePath toCloudnamePath() { -+ return new CloudnamePath( -+ new String[] { this.region, this.tag, this.service, this.instance }); -+ } -+ -+ /** -+ * @return Canonical string representation of coordinate -+ */ -+ public String toCanonicalString() { -+ return new StringBuffer() -+ .append(instance).append(""."") -+ .append(service).append(""."") -+ .append(tag).append(""."") -+ .append(region) -+ .toString(); -+ } -+ -+ /** -+ * @return Coordinate represented as a JSON-formatted string -+ */ -+ /* package-private */ String toJsonString() { -+ return new JSONObject() -+ .put(REGION_NAME, this.region) -+ .put(TAG_NAME, this.tag) -+ .put(SERVICE_NAME, this.service) -+ .put(INSTANCE_NAME, this.instance) -+ .toString(); -+ } -+ -+ /** -+ * @param jsonString A coordinate serialized as a JSON-formatted string -+ * @return InstanceCoordinate built from the string -+ */ -+ /* package-private */ static InstanceCoordinate fromJson(final String jsonString) { -+ final JSONObject object = new JSONObject(jsonString); -+ final String[] pathElements = new String[4]; -+ pathElements[0] = object.getString(REGION_NAME); -+ pathElements[1] = object.getString(TAG_NAME); -+ pathElements[2] = object.getString(SERVICE_NAME); -+ pathElements[3] = object.getString(INSTANCE_NAME); -+ -+ return new InstanceCoordinate(new CloudnamePath(pathElements)); -+ } -+ -+ /** -+ * @param string A canonical string representation of a coordinate -+ * @return InstanceCoordinate built from the string -+ */ -+ public static InstanceCoordinate parse(final String string) { -+ if (string == null) { -+ return null; -+ } -+ final Matcher matcher = COORDINATE_PATTERN.matcher(string); -+ if (!matcher.matches()) { -+ return null; -+ } -+ final String[] path = new String[] { -+ matcher.group(4), matcher.group(3), matcher.group(2), matcher.group(1) -+ }; -+ return new InstanceCoordinate(new CloudnamePath(path)); -+ } -+ -+ @Override -+ public String toString() { -+ return ""[ Coordinate "" + toCanonicalString() + ""]""; -+ } -+} -diff --git a/cn-service/src/main/java/org/cloudname/service/PermanentServiceListener.java b/cn-service/src/main/java/org/cloudname/service/PermanentServiceListener.java -new file mode 100644 -index 00000000..5d644b89 ---- /dev/null -+++ b/cn-service/src/main/java/org/cloudname/service/PermanentServiceListener.java -@@ -0,0 +1,25 @@ -+package org.cloudname.service; -+ -+/** -+ * Listener interface for permanent services. -+ * -+ * @author stalehd@gmail.com -+ */ -+public interface PermanentServiceListener { -+ /** -+ * A service is created. This method will be called on start-up for all existing services. -+ * @param endpoint The endpoint of the service -+ */ -+ void onServiceCreated(final Endpoint endpoint); -+ -+ /** -+ * Service endpoint has changed. -+ * @param endpoint The new value of the service endpoint -+ */ -+ void onServiceChanged(final Endpoint endpoint); -+ -+ /** -+ * Service has been removed. -+ */ -+ void onServiceRemoved(); -+} -diff --git a/cn-service/src/main/java/org/cloudname/service/ServiceCoordinate.java b/cn-service/src/main/java/org/cloudname/service/ServiceCoordinate.java -new file mode 100644 -index 00000000..02a74637 ---- /dev/null -+++ b/cn-service/src/main/java/org/cloudname/service/ServiceCoordinate.java -@@ -0,0 +1,107 @@ -+package org.cloudname.service; -+ -+import org.cloudname.core.CloudnamePath; -+ -+import java.util.regex.Matcher; -+import java.util.regex.Pattern; -+ -+/** -+ * A coordinate pointing to a set of services or a single permanent service. -+ * -+ * @author stalehd@gmail.com -+ */ -+public class ServiceCoordinate { -+ private final String region; -+ private final String tag; -+ private final String service; -+ -+ // Pattern for string parsing -+ private static final Pattern COORDINATE_PATTERN = Pattern.compile(""(.*)\\.(.*)\\.(.*)""); -+ -+ /** -+ * @param path The CloudnamePath instance to use when building the coordinate. The coordinate -+ * must consist of three elements and can not be null. -+ * @throws IllegalArgumentException if parameter is invalid -+ */ -+ /* package-private */ ServiceCoordinate(final CloudnamePath path) { -+ if (path == null) { -+ throw new IllegalArgumentException(""Path can not be null""); -+ } -+ if (path.length() != 3) { -+ throw new IllegalArgumentException(""Path must have three elements""); -+ } -+ region = path.get(0); -+ tag = path.get(1); -+ service = path.get(2); -+ } -+ -+ /** -+ * @return The coordinate's region -+ */ -+ public String getRegion() { -+ return region; -+ } -+ -+ /** -+ * @return The coordinate's tag -+ */ -+ public String getTag() { -+ return tag; -+ } -+ -+ /** -+ * @return The coordinate's service name -+ */ -+ public String getService() { -+ return service; -+ } -+ -+ /** -+ * @param serviceCoordinateString String representation of coordinate -+ * @return ServiceCoordinate instance built from the string. Null if the coordinate -+ * can't be parsed correctly. -+ */ -+ public static ServiceCoordinate parse(final String serviceCoordinateString) { -+ final Matcher matcher = COORDINATE_PATTERN.matcher(serviceCoordinateString); -+ if (!matcher.matches()) { -+ return null; -+ } -+ final String[] path = new String[] { matcher.group(3), matcher.group(2), matcher.group(1) }; -+ return new ServiceCoordinate(new CloudnamePath(path)); -+ } -+ -+ /** -+ * @return CloudnamePath representing this coordinate -+ */ -+ /* package-private */ CloudnamePath toCloudnamePath() { -+ return new CloudnamePath(new String[] { this.region, this.tag, this.service }); -+ } -+ -+ @Override -+ public boolean equals(final Object o) { -+ if (this == o) { -+ return true; -+ } -+ if (o == null || getClass() != o.getClass()) { -+ return false; -+ } -+ -+ final ServiceCoordinate other = (ServiceCoordinate) o; -+ -+ if (!this.region.equals(other.region) -+ || !this.tag.equals(other.tag) -+ || !this.service.equals(other.service)) { -+ return false; -+ } -+ return true; -+ } -+ -+ @Override -+ public int hashCode() { -+ int result = region.hashCode(); -+ result = 31 * result + tag.hashCode(); -+ result = 31 * result + service.hashCode(); -+ return result; -+ } -+ -+} -diff --git a/cn-service/src/main/java/org/cloudname/service/ServiceData.java b/cn-service/src/main/java/org/cloudname/service/ServiceData.java -new file mode 100644 -index 00000000..dec36ea1 ---- /dev/null -+++ b/cn-service/src/main/java/org/cloudname/service/ServiceData.java -@@ -0,0 +1,123 @@ -+package org.cloudname.service; -+ -+import org.json.JSONArray; -+import org.json.JSONObject; -+ -+import java.util.ArrayList; -+import java.util.HashMap; -+import java.util.List; -+import java.util.Map; -+ -+/** -+ * Service data stored for each service. This data only contains endpoints at the moment. Endpoint -+ * names must be unique. -+ * -+ * @author stalehd@gmail.com -+ */ -+public class ServiceData { -+ private final Object syncObject = new Object(); -+ private final Map endpoints = new HashMap<>(); -+ -+ /** -+ * Create empty service data object with no endpoints. -+ */ -+ public ServiceData() { -+ -+ } -+ -+ /** -+ * Create a new instance with the given list of endpoints. If there's duplicates in the list -+ * the duplicates will be discarded. -+ * -+ * @param endpointList List of endpoints to add -+ */ -+ /* package-private */ ServiceData(final List endpointList) { -+ synchronized (syncObject) { -+ for (final Endpoint endpoint : endpointList) { -+ endpoints.put(endpoint.getName(), endpoint); -+ } -+ } -+ } -+ -+ /** -+ * @param name Name of endpoint -+ * @return The endpoint with the specified name. Null if the endpoint doesn't exist -+ */ -+ public Endpoint getEndpoint(final String name) { -+ synchronized (syncObject) { -+ for (final String epName : endpoints.keySet()) { -+ if (epName.equals(name)) { -+ return endpoints.get(name); -+ } -+ } -+ } -+ return null; -+ } -+ -+ /** -+ * @param endpoint Endpoint to add -+ * @return true if endpoint can be added. False if the endpoint already exists. -+ * @throws IllegalArgumentException if endpoint is invalid -+ */ -+ public boolean addEndpoint(final Endpoint endpoint) { -+ if (endpoint == null) { -+ throw new IllegalArgumentException(""Endpoint can not be null""); -+ } -+ synchronized (syncObject) { -+ if (endpoints.containsKey(endpoint.getName())) { -+ return false; -+ } -+ endpoints.put(endpoint.getName(), endpoint); -+ } -+ return true; -+ } -+ -+ /** -+ * @param endpoint endpoint to remove -+ * @return True if the endpoint has been removed, false if the endpoint can't be removed. Nulls -+ * @throws IllegalArgumentException if endpoint is invalid -+ */ -+ public boolean removeEndpoint(final Endpoint endpoint) { -+ if (endpoint == null) { -+ throw new IllegalArgumentException(""Endpoint can't be null""); -+ } -+ synchronized (syncObject) { -+ if (!endpoints.containsKey(endpoint.getName())) { -+ return false; -+ } -+ endpoints.remove(endpoint.getName()); -+ } -+ return true; -+ } -+ -+ /** -+ * @return Service data serialized as a JSON string -+ */ -+ /* package-private */ String toJsonString() { -+ final JSONArray epList = new JSONArray(); -+ int i = 0; -+ for (Map.Entry entry : endpoints.entrySet()) { -+ epList.put(i++, new JSONObject(entry.getValue().toJsonString())); -+ } -+ return new JSONObject().put(""endpoints"", epList).toString(); -+ } -+ -+ /** -+ * @param jsonString JSON string to create instance from -+ * @throws IllegalArgumentException if parameter is invalid -+ */ -+ /* package-private */ static ServiceData fromJsonString(final String jsonString) { -+ if (jsonString == null || jsonString.isEmpty()) { -+ throw new IllegalArgumentException(""json string can not be null or empty""); -+ } -+ -+ final List endpoints = new ArrayList<>(); -+ -+ final JSONObject json = new JSONObject(jsonString); -+ final JSONArray epList = json.getJSONArray(""endpoints""); -+ for (int i = 0; i < epList.length(); i++) { -+ endpoints.add(Endpoint.fromJson(epList.getJSONObject(i).toString())); -+ } -+ return new ServiceData(endpoints); -+ } -+} -diff --git a/cn-service/src/main/java/org/cloudname/service/ServiceHandle.java b/cn-service/src/main/java/org/cloudname/service/ServiceHandle.java -new file mode 100644 -index 00000000..a9305bb2 ---- /dev/null -+++ b/cn-service/src/main/java/org/cloudname/service/ServiceHandle.java -@@ -0,0 +1,75 @@ -+package org.cloudname.service; -+import org.cloudname.core.LeaseHandle; -+ -+import java.util.logging.Level; -+import java.util.logging.Logger; -+ -+/** -+ * A handle to a service registration. The handle is used to modify the registered endpoints. The -+ * state is kept in the ServiceData instance held by the handle. Note that endpoints in the -+ * ServiceData instance isn't registered automatically when the handle is created. -+ * -+ * @author stalehd@gmail.com -+ */ -+public class ServiceHandle implements AutoCloseable { -+ private static final Logger LOG = Logger.getLogger(ServiceHandle.class.getName()); -+ private final LeaseHandle leaseHandle; -+ private final InstanceCoordinate instanceCoordinate; -+ private final ServiceData serviceData; -+ -+ /** -+ * @param instanceCoordinate The instance coordinate this handle belongs to -+ * @param serviceData The service data object -+ * @param leaseHandle The Cloudname handle for the lease -+ * @throws IllegalArgumentException if parameters are invalid -+ */ -+ public ServiceHandle( -+ final InstanceCoordinate instanceCoordinate, -+ final ServiceData serviceData, -+ final LeaseHandle leaseHandle) { -+ if (instanceCoordinate == null) { -+ throw new IllegalArgumentException(""Instance coordinate cannot be null""); -+ } -+ if (serviceData == null) { -+ throw new IllegalArgumentException(""Service data must be set""); -+ } -+ if (leaseHandle == null) { -+ throw new IllegalArgumentException(""Lease handle cannot be null""); -+ } -+ this.leaseHandle = leaseHandle; -+ this.instanceCoordinate = instanceCoordinate; -+ this.serviceData = serviceData; -+ } -+ -+ /** -+ * @param endpoint The endpoint to register -+ * @return true if endpoint is registered -+ */ -+ boolean registerEndpoint(final Endpoint endpoint) { -+ if (!serviceData.addEndpoint(endpoint)) { -+ return false; -+ } -+ return this.leaseHandle.writeLeaseData(serviceData.toJsonString()); -+ } -+ -+ /** -+ * @param endpoint The endpoint to remove -+ * @return true if endpoint is removed -+ */ -+ boolean removeEndpoint(final Endpoint endpoint) { -+ if (!serviceData.removeEndpoint(endpoint)) { -+ return false; -+ } -+ return this.leaseHandle.writeLeaseData(serviceData.toJsonString()); -+ } -+ -+ @Override -+ public void close() { -+ try { -+ leaseHandle.close(); -+ } catch (final Exception ex) { -+ LOG.log(Level.WARNING, ""Got exception closing lease for instance "" -+ + instanceCoordinate.toCanonicalString(), ex); -+ } -+ } -+} -diff --git a/cn-service/src/main/java/org/cloudname/service/ServiceListener.java b/cn-service/src/main/java/org/cloudname/service/ServiceListener.java -new file mode 100644 -index 00000000..ac36e6c8 ---- /dev/null -+++ b/cn-service/src/main/java/org/cloudname/service/ServiceListener.java -@@ -0,0 +1,33 @@ -+package org.cloudname.service; -+ -+/** -+ * Listener interface for services. -+ * -+ * @author stalehd@gmail.com -+ */ -+public interface ServiceListener { -+ /** -+ * Service is created. Note that this method is called once for every service that already -+ * exists when the listener is attached. -+ * -+ * @param coordinate Coordinate of instance -+ * @param serviceData The instance's data, ie its endpoints -+ */ -+ void onServiceCreated(final InstanceCoordinate coordinate, final ServiceData serviceData); -+ -+ /** -+ * Service's data have changed. -+ * @param coordinate Coordinate of instance -+ * @param data The instance's data -+ */ -+ void onServiceDataChanged(final InstanceCoordinate coordinate, final ServiceData data); -+ -+ /** -+ * Instance is removed. This means that the service has either closed its connection to -+ * the Cloudname backend or it has become unavailable for some other reason (f.e. caused -+ * by a network partition) -+ * -+ * @param coordinate The instance's coordinate -+ */ -+ void onServiceRemoved(final InstanceCoordinate coordinate); -+} -diff --git a/cn-service/src/test/java/org/cloudname/service/CloudnameServicePermanentTest.java b/cn-service/src/test/java/org/cloudname/service/CloudnameServicePermanentTest.java -new file mode 100644 -index 00000000..d819931c ---- /dev/null -+++ b/cn-service/src/test/java/org/cloudname/service/CloudnameServicePermanentTest.java -@@ -0,0 +1,261 @@ -+package org.cloudname.service; -+ -+import org.cloudname.backends.memory.MemoryBackend; -+import org.cloudname.core.CloudnameBackend; -+import org.junit.AfterClass; -+import org.junit.BeforeClass; -+import org.junit.Test; -+ -+import java.util.concurrent.CountDownLatch; -+import java.util.concurrent.TimeUnit; -+import java.util.concurrent.atomic.AtomicInteger; -+ -+import static org.hamcrest.CoreMatchers.is; -+import static org.junit.Assert.assertThat; -+import static org.junit.Assert.assertTrue; -+import static org.junit.Assert.fail; -+ -+/** -+ * Test persistent services functions. -+ */ -+public class CloudnameServicePermanentTest { -+ private static final String SERVICE_COORDINATE = ""myoldskoolserver.test.local""; -+ private static final CloudnameBackend memoryBackend = new MemoryBackend(); -+ private static final Endpoint DEFAULT_ENDPOINT = new Endpoint(""serviceport"", ""localhost"", 80); -+ private final ServiceCoordinate serviceCoordinate = ServiceCoordinate.parse(SERVICE_COORDINATE); -+ -+ @BeforeClass -+ public static void createServiceRegistration() { -+ try (final CloudnameService cloudnameService = new CloudnameService(memoryBackend)) { -+ assertThat( -+ cloudnameService.createPermanentService( -+ ServiceCoordinate.parse(SERVICE_COORDINATE), DEFAULT_ENDPOINT), -+ is(true)); -+ } -+ } -+ -+ @Test -+ public void testPersistentServiceChanges() throws InterruptedException { -+ try (final CloudnameService cloudnameService = new CloudnameService(memoryBackend)) { -+ -+ final CountDownLatch callCounter = new CountDownLatch(2); -+ final int secondsToWait = 1; -+ -+ // ...a listener on the service will trigger when there's a change plus the initial -+ // onCreate call. -+ cloudnameService.addPermanentServiceListener(serviceCoordinate, -+ new PermanentServiceListener() { -+ private final AtomicInteger createCount = new AtomicInteger(0); -+ private final AtomicInteger changeCount = new AtomicInteger(0); -+ -+ @Override -+ public void onServiceCreated(Endpoint endpoint) { -+ // Expect this to be called once and only once, even on updates -+ assertThat(createCount.incrementAndGet(), is(1)); -+ callCounter.countDown(); -+ } -+ -+ @Override -+ public void onServiceChanged(Endpoint endpoint) { -+ // This will be called when the endpoint changes -+ assertThat(changeCount.incrementAndGet(), is(1)); -+ callCounter.countDown(); -+ } -+ -+ @Override -+ public void onServiceRemoved() { -+ // This won't be called -+ fail(""Did not expect onServiceRemoved to be called""); -+ } -+ }); -+ -+ // Updating with invalid endpoint name fails -+ assertThat(cloudnameService.updatePermanentService(serviceCoordinate, -+ new Endpoint(""wrongep"", DEFAULT_ENDPOINT.getHost(), 81)), -+ is(false)); -+ -+ // Using the right one, however, does work -+ assertThat(cloudnameService.updatePermanentService(serviceCoordinate, -+ new Endpoint( -+ DEFAULT_ENDPOINT.getName(), DEFAULT_ENDPOINT.getHost(), 81)), -+ is(true)); -+ // Wait for notifications -+ callCounter.await(secondsToWait, TimeUnit.SECONDS); -+ -+ } -+ -+ // At this point the service created above is closed; changes to the service won't -+ // trigger errors in the listener declared. Just do one change to make sure. -+ final CloudnameService cloudnameService = new CloudnameService(memoryBackend); -+ assertThat(cloudnameService.updatePermanentService( -+ ServiceCoordinate.parse(SERVICE_COORDINATE), DEFAULT_ENDPOINT), is(true)); -+ } -+ -+ @Test -+ public void testDuplicateRegistration() { -+ try (final CloudnameService cloudnameService = new CloudnameService(memoryBackend)) { -+ // Creating the same permanent service will fail -+ assertThat(""Can't create two identical permanent services"", -+ cloudnameService.createPermanentService(serviceCoordinate, DEFAULT_ENDPOINT), -+ is(false)); -+ } -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void testNullCoordinateRegistration() { -+ try (final CloudnameService cloudnameService = new CloudnameService(memoryBackend)) { -+ cloudnameService.createPermanentService(null, DEFAULT_ENDPOINT); -+ } -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void testInvalidEndpoint() { -+ try (final CloudnameService cloudnameService = new CloudnameService(memoryBackend)) { -+ cloudnameService.createPermanentService(serviceCoordinate, null); -+ } -+ } -+ -+ @Test -+ public void testListenerOnServiceThatDoesntExist() throws InterruptedException { -+ final String anotherServiceCoordinate = ""someother.service.coordinate""; -+ -+ // It should be possible to listen for a permanent service that doesn't exist yet. Once the -+ // service is created it must trigger a callback to the clients listening. -+ try (final CloudnameService cloudnameService = new CloudnameService(memoryBackend)) { -+ -+ final CountDownLatch createCalls = new CountDownLatch(1); -+ final CountDownLatch removeCalls = new CountDownLatch(1); -+ final CountDownLatch updateCalls = new CountDownLatch(1); -+ -+ cloudnameService.addPermanentServiceListener( -+ ServiceCoordinate.parse(anotherServiceCoordinate), -+ new PermanentServiceListener() { -+ final AtomicInteger order = new AtomicInteger(0); -+ @Override -+ public void onServiceCreated(Endpoint endpoint) { -+ createCalls.countDown(); -+ assertThat(order.incrementAndGet(), is(1)); -+ } -+ -+ @Override -+ public void onServiceChanged(Endpoint endpoint) { -+ updateCalls.countDown(); -+ assertThat(order.incrementAndGet(), is(2)); -+ } -+ -+ @Override -+ public void onServiceRemoved() { -+ removeCalls.countDown(); -+ assertThat(order.incrementAndGet(), is(3)); -+ } -+ }); -+ -+ // Create the new service registration, change the endpoint, then remove it. The -+ // count down latches should count down and the order should be create, change, remove -+ final ServiceCoordinate another = ServiceCoordinate.parse(anotherServiceCoordinate); -+ cloudnameService.createPermanentService(another, DEFAULT_ENDPOINT); -+ cloudnameService.updatePermanentService(another, -+ new Endpoint(DEFAULT_ENDPOINT.getName(), ""otherhost"", 4711)); -+ cloudnameService.removePermanentService(another); -+ -+ final int secondsToWait = 1; -+ assertTrue(""Expected callback for create to trigger but it didn't"", -+ createCalls.await(secondsToWait, TimeUnit.SECONDS)); -+ assertTrue(""Expected callback for update to trigger but it didn't"", -+ updateCalls.await(secondsToWait, TimeUnit.SECONDS)); -+ assertTrue(""Expected callback for remove to trigger but it didn't"", -+ removeCalls.await(secondsToWait, TimeUnit.SECONDS)); -+ } -+ } -+ -+ @Test -+ public void testLeaseUpdateOnLeaseThatDoesntExist() { -+ try (final CloudnameService cloudnameService = new CloudnameService(memoryBackend)) { -+ assertThat(""Can't update a service that doesn't exist"", -+ cloudnameService.updatePermanentService( -+ ServiceCoordinate.parse(""foo.bar.baz""), DEFAULT_ENDPOINT), -+ is(false)); -+ } -+ } -+ -+ @Test -+ public void testRemoveServiceThatDoesntExist() { -+ try (final CloudnameService cloudnameService = new CloudnameService(memoryBackend)) { -+ assertThat(""Can't remove a service that doesn't exist"", -+ cloudnameService.removePermanentService(ServiceCoordinate.parse(""foo.bar.baz"")), -+ is(false)); -+ } -+ } -+ -+ @AfterClass -+ public static void removeServiceRegistration() throws InterruptedException { -+ try (final CloudnameService cloudnameService = new CloudnameService(memoryBackend)) { -+ final ServiceCoordinate serviceCoordinate = ServiceCoordinate.parse(SERVICE_COORDINATE); -+ final CountDownLatch callCounter = new CountDownLatch(2); -+ final int secondsToWait = 1; -+ cloudnameService.addPermanentServiceListener(serviceCoordinate, -+ new PermanentServiceListener() { -+ private final AtomicInteger createCount = new AtomicInteger(0); -+ private final AtomicInteger removeCount = new AtomicInteger(0); -+ -+ @Override -+ public void onServiceCreated(final Endpoint endpoint) { -+ // This will be called once and only once -+ assertThat(""Did not onServiceCreated to be called multiple times"", -+ createCount.incrementAndGet(), is(1)); -+ callCounter.countDown(); -+ } -+ -+ @Override -+ public void onServiceChanged(final Endpoint endpoint) { -+ fail(""Did not expect any calls to onServiceChanged""); -+ } -+ -+ @Override -+ public void onServiceRemoved() { -+ assertThat(""Did not expect onServiceRemoved to be called multiple"" -+ + "" times"", removeCount.incrementAndGet(), is(1)); -+ callCounter.countDown(); -+ } -+ }); -+ -+ // Remove the service created in the setup. -+ assertThat(cloudnameService.removePermanentService(serviceCoordinate), is(true)); -+ -+ assertTrue(""Did not receive the expected number of calls to listener. "" -+ + callCounter.getCount() + "" calls remaining."", -+ callCounter.await(secondsToWait, TimeUnit.SECONDS)); -+ -+ // Removing it twice will fail. -+ assertThat(cloudnameService.removePermanentService(serviceCoordinate), is(false)); -+ } -+ } -+ -+ private final ServiceCoordinate coordinate = ServiceCoordinate.parse(""service.tag.region""); -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void coordinateCanNotBeNullWhenUpdatingService() { -+ new CloudnameService(memoryBackend).updatePermanentService(null, null); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void endpointCanNotBeNullWhenUpdatingService() { -+ new CloudnameService(memoryBackend).updatePermanentService(coordinate, null); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void coordinateCanNotBeNullWhenRemovingService() { -+ new CloudnameService(memoryBackend).removePermanentService(null); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void coordinateCanNotBeNullWhenAddingListener() { -+ new CloudnameService(memoryBackend).addPermanentServiceListener(null, null); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void listenerCanNotBeNullWhenAddingListener() { -+ new CloudnameService(memoryBackend).addPermanentServiceListener(coordinate, null); -+ } -+ -+} -diff --git a/cn-service/src/test/java/org/cloudname/service/CloudnameServiceTest.java b/cn-service/src/test/java/org/cloudname/service/CloudnameServiceTest.java -new file mode 100644 -index 00000000..6fe4a9a9 ---- /dev/null -+++ b/cn-service/src/test/java/org/cloudname/service/CloudnameServiceTest.java -@@ -0,0 +1,316 @@ -+package org.cloudname.service; -+ -+import org.cloudname.backends.memory.MemoryBackend; -+import org.cloudname.core.CloudnameBackend; -+import org.junit.Test; -+ -+import java.util.ArrayList; -+import java.util.Arrays; -+import java.util.List; -+import java.util.Random; -+import java.util.concurrent.CountDownLatch; -+import java.util.concurrent.Executor; -+import java.util.concurrent.Executors; -+import java.util.concurrent.Semaphore; -+import java.util.concurrent.TimeUnit; -+ -+import static org.junit.Assert.assertTrue; -+import static org.junit.Assert.fail; -+import static org.junit.Assert.assertThat; -+import static org.hamcrest.CoreMatchers.is; -+ -+/** -+ * Test service registration with memory-based backend. -+ */ -+public class CloudnameServiceTest { -+ private static final CloudnameBackend memoryBackend = new MemoryBackend(); -+ -+ private final ServiceCoordinate coordinate = ServiceCoordinate.parse(""service.tag.region""); -+ -+ /** -+ * Max time to wait for changes to propagate to clients. In seconds. -+ */ -+ private static final int MAX_WAIT_S = 1; -+ -+ private final Random random = new Random(); -+ private int getRandomPort() { -+ return Math.max(1, Math.abs(random.nextInt(4096))); -+ } -+ -+ private ServiceHandle registerService(final CloudnameService cloudnameService, final String serviceCoordinateString) { -+ final ServiceCoordinate serviceCoordinate = ServiceCoordinate.parse(serviceCoordinateString); -+ -+ final Endpoint httpEndpoint = new Endpoint(""http"", ""127.0.0.1"", getRandomPort()); -+ final Endpoint webconsoleEndpoint = new Endpoint(""webconsole"", ""127.0.0.2"", getRandomPort()); -+ -+ final ServiceData serviceData = new ServiceData(Arrays.asList(httpEndpoint, webconsoleEndpoint)); -+ return cloudnameService.registerService(serviceCoordinate, serviceData); -+ } -+ -+ /** -+ * Create two sets of services, register both and check that notifications are sent to the -+ * subscribers. -+ */ -+ @Test -+ public void testServiceNotifications() throws InterruptedException { -+ final String SOME_COORDINATE = ""someservice.test.local""; -+ final String ANOTHER_COORDINATE = ""anotherservice.test.local""; -+ -+ final CloudnameService mainCloudname = new CloudnameService(memoryBackend); -+ -+ final int numOtherServices = 10; -+ final List handles = new ArrayList<>(); -+ for (int i = 0; i < numOtherServices; i++) { -+ handles.add(registerService(mainCloudname, ANOTHER_COORDINATE)); -+ } -+ -+ final Executor executor = Executors.newCachedThreadPool(); -+ final int numServices = 5; -+ final CountDownLatch registrationLatch = new CountDownLatch(numServices); -+ final CountDownLatch instanceLatch = new CountDownLatch(numServices * numOtherServices); -+ final CountDownLatch httpEndpointLatch = new CountDownLatch(numServices * numOtherServices); -+ final CountDownLatch webconsoleEndpointLatch = new CountDownLatch(numServices * numOtherServices); -+ final CountDownLatch removeLatch = new CountDownLatch(numServices * numOtherServices); -+ final Semaphore terminateSemaphore = new Semaphore(1); -+ final CountDownLatch completedLatch = new CountDownLatch(numServices); -+ -+ final Runnable service = new Runnable() { -+ @Override -+ public void run() { -+ try (final CloudnameService cloudnameService = new CloudnameService(memoryBackend)) { -+ try (final ServiceHandle handle = registerService(cloudnameService, SOME_COORDINATE)) { -+ registrationLatch.countDown(); -+ -+ final ServiceCoordinate otherServiceCoordinate = ServiceCoordinate.parse(ANOTHER_COORDINATE); -+ -+ // Do a service lookup on the other service. This will yield N elements. -+ cloudnameService.addServiceListener(otherServiceCoordinate, new ServiceListener() { -+ @Override -+ public void onServiceCreated(final InstanceCoordinate coordinate, final ServiceData data) { -+ instanceLatch.countDown(); -+ if (data.getEndpoint(""http"") != null) { -+ httpEndpointLatch.countDown(); -+ } -+ if (data.getEndpoint(""webconsole"") != null) { -+ webconsoleEndpointLatch.countDown(); -+ } -+ } -+ -+ @Override -+ public void onServiceDataChanged(final InstanceCoordinate coordinate, final ServiceData data) { -+ if (data.getEndpoint(""http"") != null) { -+ httpEndpointLatch.countDown(); -+ } -+ if (data.getEndpoint(""webconsole"") != null) { -+ webconsoleEndpointLatch.countDown(); -+ } -+ } -+ -+ @Override -+ public void onServiceRemoved(final InstanceCoordinate coordinate) { -+ removeLatch.countDown(); -+ } -+ }); -+ -+ // Wait for the go ahead before terminating -+ try { -+ terminateSemaphore.acquire(); -+ terminateSemaphore.release(); -+ } catch (final InterruptedException ie) { -+ throw new RuntimeException(ie); -+ } -+ } -+ // The service handle will close and the instance will be removed at this point. -+ } -+ completedLatch.countDown(); -+ } -+ }; -+ -+ // Grab the semaphore. This wil stop the services from terminating -+ terminateSemaphore.acquire(); -+ -+ // Start two threads which will register a service and look up a set of another. -+ for (int i = 0; i < numServices; i++) { -+ executor.execute(service); -+ } -+ -+ // Wait for the registrations and endpoints to propagate -+ assertTrue(""Expected registrations to complete"", -+ registrationLatch.await(MAX_WAIT_S, TimeUnit.SECONDS)); -+ -+ assertTrue(""Expected http endpoints to be registered but missing "" -+ + httpEndpointLatch.getCount(), -+ httpEndpointLatch.await(MAX_WAIT_S, TimeUnit.SECONDS)); -+ -+ assertTrue(""Expected webconsole endpoints to be registered but missing "" -+ + webconsoleEndpointLatch.getCount(), -+ webconsoleEndpointLatch.await(MAX_WAIT_S, TimeUnit.SECONDS)); -+ -+ // Registrations are now completed; remove the existing services -+ for (final ServiceHandle handle : handles) { -+ handle.close(); -+ } -+ -+ // This will trigger remove events in the threads. -+ assertTrue(""Expected services to be removed but "" + removeLatch.getCount() -+ + "" still remains"", removeLatch.await(MAX_WAIT_S, TimeUnit.SECONDS)); -+ -+ // Let the threads terminate. This will remove the registrations -+ terminateSemaphore.release(); -+ -+ assertTrue(""Expected services to complete but "" + completedLatch.getCount() -+ + "" still remains"", completedLatch.await(MAX_WAIT_S, TimeUnit.SECONDS)); -+ -+ // Success! There shouldn't be any more services registered at this point. Check to make sure -+ mainCloudname.addServiceListener(ServiceCoordinate.parse(SOME_COORDINATE), new ServiceListener() { -+ @Override -+ public void onServiceCreated(final InstanceCoordinate coordinate, final ServiceData data) { -+ fail(""Should not have any services but "" + coordinate + "" is still there""); -+ } -+ -+ @Override -+ public void onServiceDataChanged(final InstanceCoordinate coordinate, final ServiceData data) { -+ fail(""Should not have any services but "" + coordinate + "" reports data""); -+ } -+ -+ @Override -+ public void onServiceRemoved(final InstanceCoordinate coordinate) { -+ -+ } -+ }); -+ mainCloudname.addServiceListener(ServiceCoordinate.parse(ANOTHER_COORDINATE), new ServiceListener() { -+ @Override -+ public void onServiceCreated(final InstanceCoordinate coordinate, final ServiceData data) { -+ fail(""Should not have any services but "" + coordinate + "" is still there""); -+ } -+ -+ @Override -+ public void onServiceDataChanged(final InstanceCoordinate coordinate, final ServiceData data) { -+ fail(""Should not have any services but "" + coordinate + "" is still there""); -+ } -+ -+ @Override -+ public void onServiceRemoved(InstanceCoordinate coordinate) { -+ -+ } -+ }); -+ } -+ -+ /** -+ * Ensure data notifications works as expecte. Update a lot of endpoints on a single -+ * service and check that the subscribers get notified of all changes in the correct order. -+ */ -+ @Test -+ public void testDataNotifications() throws InterruptedException { -+ final CloudnameService cs = new CloudnameService(memoryBackend); -+ -+ final String serviceCoordinate = ""some.service.name""; -+ final ServiceHandle serviceHandle = cs.registerService( -+ ServiceCoordinate.parse(serviceCoordinate), -+ new ServiceData(new ArrayList())); -+ -+ final int numClients = 10; -+ final int numDataChanges = 50; -+ final int maxSecondsForNotifications = 1; -+ final CountDownLatch dataChangeLatch = new CountDownLatch(numClients * numDataChanges); -+ final CountDownLatch readyLatch = new CountDownLatch(numClients); -+ final String EP_NAME = ""endpoint""; -+ final Semaphore terminateSemaphore = new Semaphore(1); -+ -+ // Grab the semaphore, prevent threads from completing -+ terminateSemaphore.acquire(); -+ -+ final Runnable clientServices = new Runnable() { -+ @Override -+ public void run() { -+ try (final CloudnameService cn = new CloudnameService(memoryBackend)) { -+ cn.addServiceListener(ServiceCoordinate.parse(serviceCoordinate), new ServiceListener() { -+ int portNum = 0; -+ @Override -+ public void onServiceCreated(InstanceCoordinate coordinate, ServiceData serviceData) { -+ // ignore this -+ } -+ -+ @Override -+ public void onServiceDataChanged(InstanceCoordinate coordinate, ServiceData data) { -+ final Endpoint ep = data.getEndpoint(EP_NAME); -+ if (ep != null) { -+ dataChangeLatch.countDown(); -+ assertThat(ep.getPort(), is(portNum + 1)); -+ portNum = portNum + 1; -+ } -+ } -+ -+ @Override -+ public void onServiceRemoved(InstanceCoordinate coordinate) { -+ // ignore this -+ } -+ }); -+ readyLatch.countDown(); -+ -+ // Wait for the test to finish before closing. The endpoints will be -+ // processed once every thread is ready. -+ try { -+ terminateSemaphore.acquire(); -+ terminateSemaphore.release(); -+ } catch (final InterruptedException ie) { -+ throw new RuntimeException(ie); -+ } -+ } -+ } -+ }; -+ -+ final Executor executor = Executors.newCachedThreadPool(); -+ for (int i = 0; i < numClients; i++) { -+ executor.execute(clientServices); -+ } -+ -+ // Wait for the threads to be ready -+ readyLatch.await(); -+ -+ // Publish changes to the same endpoint; the endpoint is updated with a new port -+ // number for each update. -+ Endpoint oldEndpoint = null; -+ for (int portNum = 1; portNum < numDataChanges + 1; portNum++) { -+ if (oldEndpoint != null) { -+ serviceHandle.removeEndpoint(oldEndpoint); -+ } -+ final Endpoint newEndpoint = new Endpoint(EP_NAME, ""localhost"", portNum); -+ serviceHandle.registerEndpoint(newEndpoint); -+ oldEndpoint = newEndpoint; -+ } -+ -+ // Check if the threads have been notified of all the changes -+ assertTrue(""Expected "" + (numDataChanges * numClients) + "" changes but "" -+ + dataChangeLatch.getCount() + "" remains"", -+ dataChangeLatch.await(maxSecondsForNotifications, TimeUnit.SECONDS)); -+ -+ // Let threads terminate -+ terminateSemaphore.release(); -+ } -+ -+ @Test(expected = IllegalArgumentException.class) -+ public void coordinateCanNotBeNullWhenAddingListener() { -+ new CloudnameService(memoryBackend).addServiceListener(null, null); -+ } -+ -+ @Test(expected = IllegalArgumentException.class) -+ public void listenerCanNotBeNullWhenAddingListener() { -+ new CloudnameService(memoryBackend).addServiceListener(coordinate, null); -+ } -+ -+ @Test(expected = IllegalArgumentException.class) -+ public void serviceCannotBeNullWhenRegister() { -+ new CloudnameService(memoryBackend).registerService(null, null); -+ } -+ -+ @Test(expected = IllegalArgumentException.class) -+ public void serviceDataCannotBeNullWhenRegister() { -+ new CloudnameService(memoryBackend).registerService(coordinate, null); -+ } -+ -+ @Test(expected = IllegalArgumentException.class) -+ public void backendMustBeValid() { -+ new CloudnameService(null); -+ } -+} -diff --git a/cn-service/src/test/java/org/cloudname/service/EndpointTest.java b/cn-service/src/test/java/org/cloudname/service/EndpointTest.java -new file mode 100644 -index 00000000..8d1b368e ---- /dev/null -+++ b/cn-service/src/test/java/org/cloudname/service/EndpointTest.java -@@ -0,0 +1,97 @@ -+package org.cloudname.service; -+import org.junit.Test; -+ -+import static org.junit.Assert.assertThat; -+import static org.junit.Assert.fail; -+import static org.hamcrest.CoreMatchers.is; -+ -+/** -+ * Test the Endpoint class. Relatively straightforward; test creation and that -+ * fields are set correctly, test conversion to and from JSON, test the equals() -+ * implementation and test assertions in constructor. -+ */ -+public class EndpointTest { -+ @Test -+ public void testCreation() { -+ final Endpoint endpoint = new Endpoint(""foo"", ""localhost"", 80); -+ assertThat(endpoint.getName(), is(""foo"")); -+ assertThat(endpoint.getHost(), is(""localhost"")); -+ assertThat(endpoint.getPort(), is(80)); -+ } -+ -+ @Test -+ public void testJsonConversion() { -+ final Endpoint endpoint = new Endpoint(""bar"", ""baz"", 8888); -+ final String jsonString = endpoint.toJsonString(); -+ -+ final Endpoint endpointCopy = Endpoint.fromJson(jsonString); -+ -+ assertThat(endpointCopy.getName(), is(endpoint.getName())); -+ assertThat(endpointCopy.getHost(), is(endpoint.getHost())); -+ assertThat(endpointCopy.getPort(), is(endpoint.getPort())); -+ } -+ -+ @Test -+ public void testEquals() { -+ final Endpoint a = new Endpoint(""foo"", ""bar"", 1); -+ final Endpoint b = new Endpoint(""foo"", ""bar"", 1); -+ assertThat(a.equals(b), is(true)); -+ assertThat(b.equals(a), is(true)); -+ assertThat(b.hashCode(), is(a.hashCode())); -+ -+ final Endpoint c = new Endpoint(""bar"", ""foo"", 1); -+ assertThat(a.equals(c), is(false)); -+ assertThat(b.equals(c), is(false)); -+ -+ final Endpoint d = new Endpoint(""foo"", ""bar"", 2); -+ assertThat(a.equals(d), is(false)); -+ -+ final Endpoint e = new Endpoint(""foo"", ""baz"", 1); -+ assertThat(a.equals(e), is(false)); -+ -+ assertThat(a.equals(null), is(false)); -+ assertThat(a.equals(""some string""), is(false)); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void testNullName() { -+ new Endpoint(null, ""foo"", 0); -+ fail(""Constructor should have thrown exception for null name""); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void testEmptyName() { -+ new Endpoint("""", ""foo"", 0); -+ fail(""Constructor should have thrown exception for null name""); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void testNullHost() { -+ new Endpoint(""foo"", null, 0); -+ fail(""Constructor should have thrown exception for null host""); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void testEmptyHost() { -+ new Endpoint(""foo"", """", 0); -+ fail(""Constructor should have thrown exception for null host""); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void testZeroPort() { -+ new Endpoint(""foo"", ""bar"", 0); -+ fail(""Constructor should have thrown exception for 0 port""); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void testNegativePort() { -+ new Endpoint(""foo"", ""bar"", -1); -+ fail(""Constructor should have thrown exception for 0 port""); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void testInvalidName() { -+ new Endpoint(""æøå"", ""bar"", 80); -+ fail(""Constructor should have thrown exception for 0 port""); -+ } -+} -diff --git a/cn-service/src/test/java/org/cloudname/service/InstanceCoordinateTest.java b/cn-service/src/test/java/org/cloudname/service/InstanceCoordinateTest.java -new file mode 100644 -index 00000000..c7009f0b ---- /dev/null -+++ b/cn-service/src/test/java/org/cloudname/service/InstanceCoordinateTest.java -@@ -0,0 +1,92 @@ -+package org.cloudname.service; -+ -+import org.cloudname.core.CloudnamePath; -+import org.junit.Test; -+ -+import static org.hamcrest.CoreMatchers.equalTo; -+import static org.hamcrest.CoreMatchers.is; -+import static org.hamcrest.CoreMatchers.not; -+import static org.hamcrest.CoreMatchers.nullValue; -+import static org.junit.Assert.assertThat; -+ -+public class InstanceCoordinateTest { -+ @Test -+ public void testCreation() { -+ final String[] path = new String[] { ""region"", ""tag"", ""service"", ""instance"" }; -+ final InstanceCoordinate coordinate = new InstanceCoordinate(new CloudnamePath(path)); -+ -+ final String canonicalString = coordinate.toCanonicalString(); -+ assertThat(canonicalString, is(""instance.service.tag.region"")); -+ -+ final InstanceCoordinate fromCanonical = InstanceCoordinate.parse(canonicalString); -+ assertThat(fromCanonical.toCanonicalString(), is(canonicalString)); -+ assertThat(fromCanonical.getRegion(), is(coordinate.getRegion())); -+ assertThat(fromCanonical.getTag(), is(coordinate.getTag())); -+ assertThat(fromCanonical.getService(), is(coordinate.getService())); -+ assertThat(fromCanonical.getInstance(), is(coordinate.getInstance())); -+ -+ final String jsonString = coordinate.toJsonString(); -+ final InstanceCoordinate fromJson = InstanceCoordinate.fromJson(jsonString); -+ assertThat(fromJson.getRegion(), is(coordinate.getRegion())); -+ assertThat(fromJson.getTag(), is(coordinate.getTag())); -+ assertThat(fromJson.getService(), is(coordinate.getService())); -+ assertThat(fromJson.getInstance(), is(coordinate.getInstance())); -+ assertThat(fromJson.toCanonicalString(), is(coordinate.toCanonicalString())); -+ } -+ -+ @Test -+ public void testPathConversion() { -+ final CloudnamePath path = new CloudnamePath( -+ new String[] {""test"", ""local"", ""service"", ""instance"" }); -+ -+ final InstanceCoordinate coordinate = new InstanceCoordinate(path); -+ -+ final CloudnamePath cnPath = coordinate.toCloudnamePath(); -+ assertThat(cnPath.length(), is(path.length())); -+ assertThat(cnPath, is(equalTo(path))); -+ } -+ -+ /** -+ * Ensure toString() has a sensible representation ('ish) -+ */ -+ @Test -+ public void toStringMethod() { -+ final CloudnamePath pathA = new CloudnamePath( -+ new String[] {""test"", ""local"", ""service"", ""instance"" }); -+ final CloudnamePath pathB = new CloudnamePath( -+ new String[] {""test"", ""local"", ""service"", ""instance"" }); -+ final CloudnamePath pathC = new CloudnamePath( -+ new String[] {""test"", ""local"", ""service"", ""x"" }); -+ -+ final InstanceCoordinate a = new InstanceCoordinate(pathA); -+ final InstanceCoordinate b = new InstanceCoordinate(pathB); -+ final InstanceCoordinate c = new InstanceCoordinate(pathC); -+ assertThat(a.toString(), is(a.toString())); -+ assertThat(a.toString(), is(not(c.toString()))); -+ -+ assertThat(a.toCanonicalString(), is(b.toCanonicalString())); -+ } -+ -+ @Test -+ public void invalidStringConversion() { -+ assertThat(InstanceCoordinate.parse(""foo:bar.baz""), is(nullValue())); -+ assertThat(InstanceCoordinate.parse(null), is(nullValue())); -+ assertThat(InstanceCoordinate.parse(""foo.bar.baz""), is(nullValue())); -+ assertThat(InstanceCoordinate.parse(""""), is(nullValue())); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void invalidNames2() { -+ assertThat(InstanceCoordinate.parse(""æ.ø.å.a""), is(nullValue())); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void nullPathInConstructor() { -+ new InstanceCoordinate(null); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void invalidPathInConstructor() { -+ new InstanceCoordinate(new CloudnamePath(new String[] { ""foo"" })); -+ } -+} -diff --git a/cn-service/src/test/java/org/cloudname/service/ServiceCoordinateTest.java b/cn-service/src/test/java/org/cloudname/service/ServiceCoordinateTest.java -new file mode 100644 -index 00000000..c49126e0 ---- /dev/null -+++ b/cn-service/src/test/java/org/cloudname/service/ServiceCoordinateTest.java -@@ -0,0 +1,87 @@ -+package org.cloudname.service; -+ -+import org.cloudname.core.CloudnamePath; -+import org.junit.Test; -+ -+import static org.hamcrest.CoreMatchers.equalTo; -+import static org.hamcrest.CoreMatchers.is; -+import static org.hamcrest.CoreMatchers.not; -+import static org.hamcrest.CoreMatchers.nullValue; -+import static org.junit.Assert.assertThat; -+ -+public class ServiceCoordinateTest { -+ private final CloudnamePath cnPath = new CloudnamePath( -+ new String[] { ""local"", ""test"", ""service"" }); -+ -+ -+ @Test -+ public void testCreation() { -+ final ServiceCoordinate coordinate = new ServiceCoordinate(cnPath); -+ assertThat(coordinate.getRegion(), is(cnPath.get(0))); -+ assertThat(coordinate.getTag(), is(cnPath.get(1))); -+ assertThat(coordinate.getService(), is(cnPath.get(2))); -+ } -+ -+ @Test -+ public void testParse() { -+ final ServiceCoordinate coord = ServiceCoordinate.parse(""service.tag.region""); -+ assertThat(coord.getRegion(), is(""region"")); -+ assertThat(coord.getTag(), is(""tag"")); -+ assertThat(coord.getService(), is(""service"")); -+ } -+ -+ @Test -+ public void testEquals() { -+ final ServiceCoordinate coordA = ServiceCoordinate.parse(""a.b.c""); -+ final ServiceCoordinate coordB = ServiceCoordinate.parse(""a.b.c""); -+ final ServiceCoordinate coordC = ServiceCoordinate.parse(""a.b.d""); -+ final ServiceCoordinate coordD = ServiceCoordinate.parse(""a.a.c""); -+ final ServiceCoordinate coordE = ServiceCoordinate.parse(""a.a.a""); -+ final ServiceCoordinate coordF = ServiceCoordinate.parse(""c.b.c""); -+ -+ assertThat(coordA, is(equalTo(coordB))); -+ assertThat(coordB, is(equalTo(coordA))); -+ -+ assertThat(coordA, is(not(equalTo(coordC)))); -+ assertThat(coordA, is(not(equalTo(coordD)))); -+ assertThat(coordA, is(not(equalTo(coordE)))); -+ assertThat(coordA, is(not(equalTo(coordF)))); -+ -+ assertThat(coordA.equals(null), is(false)); -+ assertThat(coordA.equals(new Object()), is(false)); -+ } -+ -+ @Test -+ public void testHashCode() { -+ final ServiceCoordinate coordA = ServiceCoordinate.parse(""a.b.c""); -+ final ServiceCoordinate coordB = ServiceCoordinate.parse(""a.b.c""); -+ final ServiceCoordinate coordC = ServiceCoordinate.parse(""x.x.x""); -+ assertThat(coordA.hashCode(), is(coordB.hashCode())); -+ assertThat(coordC.hashCode(), is(not(coordA.hashCode()))); -+ } -+ @Test -+ public void testInvalidCoordinateString0() { -+ assertThat(ServiceCoordinate.parse(""foo bar baz""), is(nullValue())); -+ } -+ -+ @Test(expected = IllegalArgumentException.class) -+ public void testInvalidCoordinateString1() { -+ ServiceCoordinate.parse(""..""); -+ } -+ -+ @Test(expected = IllegalArgumentException.class) -+ public void testInvalidCoordinateString2() { -+ ServiceCoordinate.parse(""_._._""); -+ } -+ -+ @Test(expected = IllegalArgumentException.class) -+ public void nullPathParameter() { -+ new ServiceCoordinate(null); -+ } -+ -+ @Test(expected = IllegalArgumentException.class) -+ public void illegalPathParameter() { -+ new ServiceCoordinate(new CloudnamePath(new String[] { ""foo"" })); -+ } -+ -+} -diff --git a/cn-service/src/test/java/org/cloudname/service/ServiceDataTest.java b/cn-service/src/test/java/org/cloudname/service/ServiceDataTest.java -new file mode 100644 -index 00000000..0154a78a ---- /dev/null -+++ b/cn-service/src/test/java/org/cloudname/service/ServiceDataTest.java -@@ -0,0 +1,124 @@ -+package org.cloudname.service; -+ -+import org.junit.Test; -+ -+import java.util.ArrayList; -+import java.util.Arrays; -+ -+import static org.hamcrest.CoreMatchers.equalTo; -+import static org.hamcrest.CoreMatchers.is; -+import static org.hamcrest.CoreMatchers.nullValue; -+import static org.junit.Assert.assertThat; -+ -+public class ServiceDataTest { -+ @Test -+ public void testCreation() { -+ final Endpoint ep1 = new Endpoint(""foo"", ""bar"", 1); -+ final Endpoint ep2 = new Endpoint(""bar"", ""baz"", 1); -+ -+ final ServiceData data = new ServiceData(Arrays.asList(ep1, ep2)); -+ assertThat(data.getEndpoint(""foo""), is(equalTo(ep1))); -+ assertThat(data.getEndpoint(""bar""), is(equalTo(ep2))); -+ assertThat(data.getEndpoint(""baz""), is(nullValue())); -+ } -+ -+ @Test -+ public void testAddRemoveEndpoint() { -+ final ServiceData data = new ServiceData(new ArrayList()); -+ assertThat(data.getEndpoint(""a""), is(nullValue())); -+ assertThat(data.getEndpoint(""b""), is(nullValue())); -+ -+ final Endpoint ep1 = new Endpoint(""a"", ""localhost"", 80); -+ final Endpoint ep1a = new Endpoint(""a"", ""localhost"", 80); -+ // Endpoint can only be added once -+ assertThat(data.addEndpoint(ep1), is(true)); -+ assertThat(data.addEndpoint(ep1), is(false)); -+ // Endpoints must be unique -+ assertThat(data.addEndpoint(ep1a), is(false)); -+ -+ // Another endpoint can be added -+ final Endpoint ep2 = new Endpoint(""b"", ""localhost"", 80); -+ final Endpoint ep2a = new Endpoint(""b"", ""localhost"", 80); -+ assertThat(data.addEndpoint(ep2), is(true)); -+ // But the same rules applies -+ assertThat(data.addEndpoint(ep2), is(false)); -+ assertThat(data.addEndpoint(ep2a), is(false)); -+ -+ // Data now contains both endpoints -+ assertThat(data.getEndpoint(""a""), is(equalTo(ep1))); -+ assertThat(data.getEndpoint(""b""), is(equalTo(ep2))); -+ -+ assertThat(data.removeEndpoint(ep1), is(true)); -+ assertThat(data.removeEndpoint(ep1a), is(false)); -+ -+ // ...ditto for next endpoint -+ assertThat(data.removeEndpoint(ep2), is(true)); -+ assertThat(data.removeEndpoint(ep2), is(false)); -+ -+ // The endpoints with identical names can be added -+ assertThat(data.addEndpoint(ep1a), is(true)); -+ assertThat(data.addEndpoint(ep2a), is(true)); -+ } -+ -+ @Test -+ public void testConversionToFromJson() { -+ final Endpoint endpointA = new Endpoint(""foo"", ""bar"", 80); -+ final Endpoint endpointB = new Endpoint(""baz"", ""bar"", 81); -+ final ServiceData dataA = new ServiceData( -+ Arrays.asList(endpointA, endpointB)); -+ -+ final String jsonString = dataA.toJsonString(); -+ -+ final ServiceData dataB = ServiceData.fromJsonString(jsonString); -+ -+ assertThat(dataB.getEndpoint(""foo""), is(endpointA)); -+ assertThat(dataB.getEndpoint(""baz""), is(endpointB)); -+ } -+ -+ @Test -+ public void uniqueNamesAreRequired() { -+ final Endpoint endpointA = new Endpoint(""foo"", ""bar"", 80); -+ final Endpoint endpointB = new Endpoint(""foo"", ""baz"", 82); -+ final Endpoint endpointC = new Endpoint(""foo"", ""localhost"", 80); -+ final Endpoint endpointD = new Endpoint(""foobar"", ""localhost"", 80); -+ -+ final ServiceData serviceData = new ServiceData(new ArrayList()); -+ assertThat(serviceData.addEndpoint(endpointA), is(true)); -+ assertThat(serviceData.addEndpoint(endpointB), is(false)); -+ assertThat(serviceData.addEndpoint(endpointC), is(false)); -+ assertThat(serviceData.addEndpoint(endpointD), is(true)); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void testInvalidJson1() { -+ final String nullStr = null; -+ ServiceData.fromJsonString(nullStr); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void testInvalidJson2() { -+ ServiceData.fromJsonString(""""); -+ } -+ -+ @Test (expected = org.json.JSONException.class) -+ public void testInvalidJson3() { -+ ServiceData.fromJsonString(""}{""); -+ } -+ -+ @Test (expected = org.json.JSONException.class) -+ public void testInvalidJson4() { -+ ServiceData.fromJsonString(""{ \""foo\"": 12 }""); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void addNullEndpoint() { -+ final ServiceData data = new ServiceData(); -+ data.addEndpoint(null); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void removeNullEndpoint() { -+ final ServiceData data = new ServiceData(); -+ data.removeEndpoint(null); -+ } -+} -diff --git a/cn-service/src/test/java/org/cloudname/service/ServiceHandleTest.java b/cn-service/src/test/java/org/cloudname/service/ServiceHandleTest.java -new file mode 100644 -index 00000000..5b43e37c ---- /dev/null -+++ b/cn-service/src/test/java/org/cloudname/service/ServiceHandleTest.java -@@ -0,0 +1,104 @@ -+package org.cloudname.service; -+ -+import org.cloudname.core.CloudnamePath; -+import org.cloudname.core.LeaseHandle; -+import org.junit.Test; -+ -+import java.io.IOException; -+import java.util.ArrayList; -+import java.util.Arrays; -+ -+import static org.hamcrest.CoreMatchers.is; -+import static org.junit.Assert.assertThat; -+ -+public class ServiceHandleTest { -+ -+ @Test -+ public void testCreation() { -+ final InstanceCoordinate instanceCoordinate -+ = InstanceCoordinate.parse(""instance.service.tag.region""); -+ final ServiceData serviceData = new ServiceData(new ArrayList()); -+ final LeaseHandle handle = new LeaseHandle() { -+ @Override -+ public boolean writeLeaseData(String data) { -+ return true; -+ } -+ -+ @Override -+ public CloudnamePath getLeasePath() { -+ return instanceCoordinate.toCloudnamePath(); -+ } -+ -+ @Override -+ public void close() throws IOException { -+ // nothing -+ } -+ }; -+ -+ final ServiceHandle serviceHandle -+ = new ServiceHandle(instanceCoordinate, serviceData, handle); -+ -+ final Endpoint ep1 = new Endpoint(""foo"", ""bar"", 80); -+ assertThat(serviceHandle.registerEndpoint(ep1), is(true)); -+ assertThat(serviceHandle.registerEndpoint(ep1), is(false)); -+ -+ assertThat(serviceHandle.removeEndpoint(ep1), is(true)); -+ assertThat(serviceHandle.removeEndpoint(ep1), is(false)); -+ -+ serviceHandle.close(); -+ } -+ -+ @Test -+ public void testFailingHandle() { -+ final InstanceCoordinate instanceCoordinate -+ = InstanceCoordinate.parse(""instance.service.tag.region""); -+ final Endpoint ep1 = new Endpoint(""foo"", ""bar"", 80); -+ -+ final ServiceData serviceData = new ServiceData(Arrays.asList(ep1)); -+ final LeaseHandle handle = new LeaseHandle() { -+ @Override -+ public boolean writeLeaseData(String data) { -+ return false; -+ } -+ -+ @Override -+ public CloudnamePath getLeasePath() { -+ return instanceCoordinate.toCloudnamePath(); -+ } -+ -+ @Override -+ public void close() throws IOException { -+ throw new IOException(""I broke""); -+ } -+ }; -+ -+ final ServiceHandle serviceHandle -+ = new ServiceHandle(instanceCoordinate, serviceData, handle); -+ -+ final Endpoint ep2 = new Endpoint(""bar"", ""baz"", 81); -+ assertThat(serviceHandle.registerEndpoint(ep2), is(false)); -+ -+ assertThat(serviceHandle.removeEndpoint(ep1), is(false)); -+ assertThat(serviceHandle.removeEndpoint(ep2), is(false)); -+ -+ serviceHandle.close(); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void testWithNullParameters1() { -+ new ServiceHandle(null, null, null); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void testWithNullParameters2() { -+ new ServiceHandle(InstanceCoordinate.parse(""a.b.c.d""), null, null); -+ } -+ -+ @Test (expected = IllegalArgumentException.class) -+ public void testWithNullParameters3() { -+ new ServiceHandle( -+ InstanceCoordinate.parse(""a.b.c.d""), -+ new ServiceData(new ArrayList()), -+ null); -+ } -+} -diff --git a/cn-zookeeper/README.md b/cn-zookeeper/README.md -new file mode 100644 -index 00000000..3ce1e123 ---- /dev/null -+++ b/cn-zookeeper/README.md -@@ -0,0 +1,4 @@ -+# ZooKeeper backend -+ -+# Node structure -+The root path is set to `/cn` and the leases are stored in `/cn/temporary` and `/cn/permanent`. Temporary leases use ephemeral nodes with a randomly assigned 4-byte long ID. Permanent leases are named by the client. The Curator library is used for the majority of ZooKeeper access. The containing nodes have the `CONTAINER` bit set, i.e. they will be cleaned up by ZooKeeper when there's no more child nodes inside each of the containers. Note that this feature is slated for ZooKeeper 3.5 which is currently in Alpha (as of November 2015). Until then the Curator library uses regular nodes so if it is deployed on a ZooKeeper 3.4 or lower manual cleanups of nodes is necessary. -diff --git a/cn-zookeeper/pom.xml b/cn-zookeeper/pom.xml -new file mode 100644 -index 00000000..4d33d081 ---- /dev/null -+++ b/cn-zookeeper/pom.xml -@@ -0,0 +1,81 @@ -+ -+ 4.0.0 -+ -+ -+ org.cloudname -+ cloudname-parent -+ 3.0-SNAPSHOT -+ -+ -+ cn-zookeeper -+ jar -+ -+ Cloudname ZooKeeper backend -+ ZooKeeper backend for cloudname -+ https://github.com/Cloudname/cloudname -+ -+ -+ -+ org.cloudname -+ cn-core -+ -+ -+ -+ junit -+ junit -+ test -+ -+ -+ -+ org.hamcrest -+ hamcrest-all -+ 1.3 -+ -+ -+ -+ org.apache.curator -+ curator-framework -+ 2.9.0 -+ -+ -+ -+ org.apache.curator -+ curator-recipes -+ 2.9.0 -+ -+ -+ -+ org.apache.curator -+ curator-test -+ 2.9.0 -+ test -+ -+ -+ -+ org.slf4j -+ slf4j-nop -+ 1.7.6 -+ -+ -+ org.cloudname -+ testtools -+ test -+ -+ -+ -+ -+ -+ -+ -+ org.apache.maven.plugins -+ maven-surefire-plugin -+ -+ -+ org.apache.maven.plugins -+ maven-compiler-plugin -+ -+ -+ -+ -+ -diff --git a/cn-zookeeper/src/main/java/org/cloudname/backends/zookeeper/NodeCollectionWatcher.java b/cn-zookeeper/src/main/java/org/cloudname/backends/zookeeper/NodeCollectionWatcher.java -new file mode 100644 -index 00000000..e8ccf998 ---- /dev/null -+++ b/cn-zookeeper/src/main/java/org/cloudname/backends/zookeeper/NodeCollectionWatcher.java -@@ -0,0 +1,257 @@ -+package org.cloudname.backends.zookeeper; -+ -+import com.google.common.base.Charsets; -+import org.apache.zookeeper.KeeperException; -+import org.apache.zookeeper.WatchedEvent; -+import org.apache.zookeeper.Watcher; -+import org.apache.zookeeper.ZooKeeper; -+import org.apache.zookeeper.data.Stat; -+ -+import java.util.HashMap; -+import java.util.HashSet; -+import java.util.List; -+import java.util.Map; -+import java.util.Set; -+import java.util.concurrent.atomic.AtomicBoolean; -+import java.util.logging.Level; -+import java.util.logging.Logger; -+ -+/** -+ * Monitor a set of child nodes for changes. Needs to do this with the ZooKeeper API since -+ * Curator doesn't provide the necessary interface and the PathChildrenCache is best effort -+ * (and not even a very good effort) -+ * -+ * Watches are kept as usual and the mzxid for each node is kept. If that changes between -+ * watches it mens we've missed an event and the appropriate event is generated to the -+ * listener. -+ * -+ * Note that this class only watches for changes one level down. Changes in children aren't -+ * monitored. The path must exist beforehand. -+ * -+ * @author stalehd@gmail.com -+ */ -+public class NodeCollectionWatcher { -+ private static final Logger LOG = Logger.getLogger(NodeCollectionWatcher.class.getName()); -+ -+ private final Map childMzxid = new HashMap<>(); -+ private final Object syncObject = new Object(); -+ -+ private final ZooKeeper zk; -+ private final String pathToWatch; -+ private final AtomicBoolean shuttingDown = new AtomicBoolean(false); -+ private final NodeWatcherListener listener; -+ -+ -+ /** -+ * @param zk ZooKeeper instance to use -+ * @param pathToWatch Path to observe -+ * @param listener Listener for callbacks -+ */ -+ public NodeCollectionWatcher( -+ final ZooKeeper zk, final String pathToWatch, final NodeWatcherListener listener) { -+ this.pathToWatch = pathToWatch; -+ this.zk = zk; -+ this.listener = listener; -+ readChildNodes(); -+ } -+ -+ /** -+ * Shut down watchers. The listener won't get notified of changes after it has been shut down. -+ */ -+ public void shutdown() { -+ shuttingDown.set(true); -+ } -+ -+ /** -+ * Watcher for node collections. Set by getChildren() -+ */ -+ private final Watcher nodeCollectionWatcher = new Watcher() { -+ @Override -+ public void process(WatchedEvent watchedEvent) { -+ switch (watchedEvent.getType()) { -+ case NodeChildrenChanged: -+ // Child values have changed, read children, generate events -+ readChildNodes(); -+ break; -+ case None: -+ // Some zookeeper event. Watches might not apply anymore. Reapply. -+ switch (watchedEvent.getState()) { -+ case ConnectedReadOnly: -+ LOG.severe(""Connected to readonly cluster""); -+ // Connected to a cluster without quorum. Nodes might not be -+ // correct but re-read the nodes. -+ readChildNodes(); -+ break; -+ case SyncConnected: -+ LOG.info(""Connected to cluster""); -+ // (re-)Connected to the cluster. Nodes must be re-read. Discard -+ // those that aren't found, keep unchanged ones. -+ readChildNodes(); -+ break; -+ case Disconnected: -+ // Disconnected from the cluster. The nodes might not be -+ // up to date (but a reconnect might solve the issue) -+ LOG.log(Level.WARNING, ""Disconnected from zk cluster""); -+ break; -+ case Expired: -+ // Session has expired. Nodes are no longer available -+ removeAllChildNodes(); -+ break; -+ default: -+ break; -+ } -+ } -+ -+ } -+ }; -+ -+ /** -+ * A watcher for the child nodes (set via getData() -+ */ -+ private final Watcher changeWatcher = new Watcher() { -+ @Override -+ public void process(WatchedEvent watchedEvent) { -+ if (shuttingDown.get()) { -+ return; -+ } -+ switch (watchedEvent.getType()) { -+ case NodeDeleted: -+ removeChildNode(watchedEvent.getPath()); -+ break; -+ case NodeDataChanged: -+ processNode(watchedEvent.getPath()); -+ break; -+ -+ } -+ } -+ }; -+ -+ /** -+ * Remove all nodes. -+ */ -+ private void removeAllChildNodes() { -+ System.out.println(""Remove all child nodes""); -+ final Set nodesToRemove = new HashSet<>(); -+ synchronized (syncObject) { -+ nodesToRemove.addAll(childMzxid.keySet()); -+ } -+ for (final String node : nodesToRemove) { -+ removeChildNode(node); -+ } -+ } -+ -+ /** -+ * Read nodes from ZooKeeper, generating events as necessary. If a node is missing from the -+ * result it will generate a remove notification, ditto with new nodes and changes in nodes. -+ */ -+ private void readChildNodes() { -+ try { -+ final List childNodes = zk.getChildren(pathToWatch, nodeCollectionWatcher); -+ final Set childrenToDelete = new HashSet<>(); -+ synchronized (syncObject) { -+ childrenToDelete.addAll(childMzxid.keySet()); -+ } -+ for (final String nodeName : childNodes) { -+ processNode(pathToWatch + ""/"" + nodeName); -+ childrenToDelete.remove(pathToWatch + ""/"" + nodeName); -+ } -+ for (final String nodePath : childrenToDelete) { -+ removeChildNode(nodePath); -+ } -+ } catch (final KeeperException.ConnectionLossException e) { -+ // We've been disconnected. Let the watcher deal with it -+ if (!shuttingDown.get()) { -+ LOG.info(""Lost connection to ZooKeeper while reading child nodes.""); -+ } -+ } catch (final KeeperException.NoNodeException e) { -+ // Node has been removed. Ignore the error? -+ removeChildNode(e.getPath()); -+ } catch (final KeeperException|InterruptedException e) { -+ LOG.log(Level.WARNING, ""Got exception reading child nodes"", e); -+ } -+ } -+ -+ /** -+ * Add a node, generate create or data change notification if needed. -+ */ -+ private void processNode(final String nodePath) { -+ if (shuttingDown.get()) { -+ return; -+ } -+ try { -+ final Stat stat = new Stat(); -+ final byte[] nodeData = zk.getData(nodePath, changeWatcher, stat); -+ final String data = new String(nodeData, Charsets.UTF_8); -+ synchronized (syncObject) { -+ if (!childMzxid.containsKey(nodePath)) { -+ childMzxid.put(nodePath, stat.getMzxid()); -+ generateCreateEvent(nodePath, data); -+ return; -+ } -+ final Long zxid = childMzxid.get(nodePath); -+ if (zxid != stat.getMzxid()) { -+ // the data have changed. Generate event -+ childMzxid.put(nodePath, stat.getMzxid()); -+ generateDataChangeEvent(nodePath, data); -+ } -+ } -+ } catch (final KeeperException.ConnectionLossException e) { -+ // We've been disconnected. Let the watcher deal with it -+ if (!shuttingDown.get()) { -+ LOG.info(""Lost connection to ZooKeeper while reading child nodes.""); -+ } -+ } catch (final KeeperException.NoNodeException e) { -+ removeChildNode(e.getPath()); -+ // Node has been removed before we got to do anything. Ignore error? -+ } catch (final KeeperException|InterruptedException e) { -+ LOG.log(Level.WARNING, ""Got exception adding child node with path "" + nodePath, e); -+ } catch (Exception ex) { -+ LOG.log(Level.SEVERE, ""Pooop!"", ex); -+ } -+ } -+ -+ /** -+ * Remove node. Generate remove event if needed. -+ */ -+ private void removeChildNode(final String nodePath) { -+ synchronized (syncObject) { -+ if (childMzxid.containsKey(nodePath)) { -+ childMzxid.remove(nodePath); -+ generateRemoveEvent(nodePath); -+ } -+ } -+ } -+ -+ /** -+ * Invoke nodeCreated on listener -+ */ -+ private void generateCreateEvent(final String nodePath, final String data) { -+ try { -+ listener.nodeCreated(nodePath, data); -+ } catch (final Exception exception) { -+ LOG.log(Level.WARNING, ""Got exception calling listener.nodeCreated"", exception); -+ } -+ } -+ -+ /** -+ * Invoke dataChanged on listener -+ */ -+ private void generateDataChangeEvent(final String nodePath, final String data) { -+ try { -+ listener.dataChanged(nodePath, data); -+ } catch (final Exception exception) { -+ LOG.log(Level.WARNING, ""Got exception calling listener.dataChanged"", exception); -+ } -+ } -+ -+ /** -+ * Invoke nodeRemoved on listener -+ */ -+ private void generateRemoveEvent(final String nodePath) { -+ try { -+ listener.nodeRemoved(nodePath); -+ } catch (final Exception exception) { -+ LOG.log(Level.WARNING, ""Got exception calling listener.nodeRemoved"", exception); -+ } -+ } -+} -diff --git a/cn-zookeeper/src/main/java/org/cloudname/backends/zookeeper/NodeWatcherListener.java b/cn-zookeeper/src/main/java/org/cloudname/backends/zookeeper/NodeWatcherListener.java -new file mode 100644 -index 00000000..91e6a77c ---- /dev/null -+++ b/cn-zookeeper/src/main/java/org/cloudname/backends/zookeeper/NodeWatcherListener.java -@@ -0,0 +1,38 @@ -+package org.cloudname.backends.zookeeper; -+ -+/** -+ * Listener interface for node change events -+ * -+ * @author stalehd@gmail.com -+ */ -+public interface NodeWatcherListener { -+ /** -+ * A node is created. Note that rapid changes with create, data update (and even -+ * create + delete + create + data change might yield just one create notification. -+ * -+ * @param zkPath path to node -+ * @param data data of node -+ */ -+ void nodeCreated(final String zkPath, final String data); -+ -+ /** -+ * Data on a node is changed. Note that you might not get data change notifications -+ * for nodes that are created and updated within a short time span, only a create -+ * notification. -+ * Nodes that are created, deleted, then recreated will also generate this event, even if -+ * the data is unchanged. -+ * -+ * @param zkPath path of node -+ * @param data data of node -+ */ -+ void dataChanged(final String zkPath, final String data); -+ -+ /** -+ * Node is removed. -+ * -+ * @param zkPath Path of the node that is removed. -+ */ -+ void nodeRemoved(final String zkPath); -+} -+ -+ -diff --git a/cn-zookeeper/src/main/java/org/cloudname/backends/zookeeper/ZooKeeperBackend.java b/cn-zookeeper/src/main/java/org/cloudname/backends/zookeeper/ZooKeeperBackend.java -new file mode 100644 -index 00000000..d51632d8 ---- /dev/null -+++ b/cn-zookeeper/src/main/java/org/cloudname/backends/zookeeper/ZooKeeperBackend.java -@@ -0,0 +1,342 @@ -+package org.cloudname.backends.zookeeper; -+import com.google.common.base.Charsets; -+import org.apache.curator.RetryPolicy; -+import org.apache.curator.framework.CuratorFramework; -+import org.apache.curator.framework.CuratorFrameworkFactory; -+import org.apache.curator.retry.ExponentialBackoffRetry; -+import org.apache.zookeeper.CreateMode; -+import org.apache.zookeeper.KeeperException; -+import org.apache.zookeeper.data.Stat; -+import org.cloudname.core.CloudnameBackend; -+import org.cloudname.core.CloudnamePath; -+import org.cloudname.core.LeaseHandle; -+import org.cloudname.core.LeaseListener; -+ -+import java.io.IOException; -+import java.util.HashMap; -+import java.util.Map; -+import java.util.Random; -+import java.util.concurrent.TimeUnit; -+import java.util.concurrent.atomic.AtomicBoolean; -+import java.util.logging.Level; -+import java.util.logging.Logger; -+ -+/** -+ * A ZooKeeper backend for Cloudname. Leases are represented as nodes; client leases are ephemeral -+ * nodes inside container nodes and permanent leases are container nodes. -+ * -+ * @author stalehd@gmail.com -+ */ -+public class ZooKeeperBackend implements CloudnameBackend { -+ private static final Logger LOG = Logger.getLogger(ZooKeeperBackend.class.getName()); -+ private static final String TEMPORARY_ROOT = ""/cn/temporary/""; -+ private static final String PERMANENT_ROOT = ""/cn/permanent/""; -+ private static final int CONNECTION_TIMEOUT_SECONDS = 30; -+ -+ // PRNG for instance names. These will be ""random enough"" for instance identifiers -+ private final Random random = new Random(); -+ private final CuratorFramework curator; -+ private final Map clientListeners = new HashMap<>(); -+ private final Map permanentListeners = new HashMap<>(); -+ private final Object syncObject = new Object(); -+ /** -+ * @param connectionString ZooKeeper connection string -+ * @throws IllegalStateException if the cluster isn't available. -+ */ -+ public ZooKeeperBackend(final String connectionString) { -+ final RetryPolicy retryPolicy = new ExponentialBackoffRetry(200, 10); -+ curator = CuratorFrameworkFactory.newClient(connectionString, retryPolicy); -+ curator.start(); -+ -+ try { -+ curator.blockUntilConnected(CONNECTION_TIMEOUT_SECONDS, TimeUnit.SECONDS); -+ LOG.info(""Connected to zk cluster @ "" + connectionString); -+ } catch (final InterruptedException ie) { -+ throw new IllegalStateException(""Could not connect to ZooKeeper"", ie); -+ } -+ } -+ -+ @Override -+ public LeaseHandle createTemporaryLease(final CloudnamePath path, final String data) { -+ boolean created = false; -+ CloudnamePath tempInstancePath = null; -+ String tempZkPath = null; -+ while (!created) { -+ final long instanceId = random.nextLong(); -+ tempInstancePath = new CloudnamePath(path, Long.toHexString(instanceId)); -+ tempZkPath = TEMPORARY_ROOT + tempInstancePath.join('/'); -+ try { -+ -+ curator.create() -+ .creatingParentContainersIfNeeded() -+ .withMode(CreateMode.EPHEMERAL) -+ .forPath(tempZkPath, data.getBytes(Charsets.UTF_8)); -+ created = true; -+ } catch (final Exception ex) { -+ LOG.log(Level.WARNING, ""Could not create client node at "" + tempInstancePath, ex); -+ } -+ } -+ final CloudnamePath instancePath = tempInstancePath; -+ final String zkInstancePath = tempZkPath; -+ return new LeaseHandle() { -+ private AtomicBoolean closed = new AtomicBoolean(false); -+ -+ @Override -+ public boolean writeLeaseData(final String data) { -+ if (closed.get()) { -+ LOG.info(""Attempt to write data to closed leased handle "" + data); -+ return false; -+ } -+ return writeTemporaryLeaseData(instancePath, data); -+ } -+ -+ @Override -+ public CloudnamePath getLeasePath() { -+ if (closed.get()) { -+ return null; -+ } -+ return instancePath; -+ } -+ -+ @Override -+ public void close() throws IOException { -+ if (closed.get()) { -+ return; -+ } -+ try { -+ curator.delete().forPath(zkInstancePath); -+ closed.set(true); -+ } catch (final Exception ex) { -+ throw new IOException(ex); -+ } -+ } -+ }; -+ } -+ -+ @Override -+ public boolean writeTemporaryLeaseData(final CloudnamePath path, final String data) { -+ final String zkPath = TEMPORARY_ROOT + path.join('/'); -+ try { -+ final Stat nodeStat = curator.checkExists().forPath(zkPath); -+ if (nodeStat == null) { -+ LOG.log(Level.WARNING, ""Could not write client lease data for "" + path -+ + "" with data since the path does not exist. Data = "" + data); -+ } -+ curator.setData().forPath(zkPath, data.getBytes(Charsets.UTF_8)); -+ return true; -+ } catch (final Exception ex) { -+ LOG.log(Level.WARNING, ""Got exception writing lease data to "" + path -+ + "" with data "" + data); -+ return false; -+ } -+ } -+ -+ @Override -+ public String readTemporaryLeaseData(final CloudnamePath path) { -+ if (path == null) { -+ return null; -+ } -+ final String zkPath = TEMPORARY_ROOT + path.join('/'); -+ try { -+ curator.sync().forPath(zkPath); -+ final byte[] bytes = curator.getData().forPath(zkPath); -+ return new String(bytes, Charsets.UTF_8); -+ } catch (final Exception ex) { -+ LOG.log(Level.WARNING, ""Got exception reading client lease data at "" + path, ex); -+ } -+ return null; -+ } -+ -+ private CloudnamePath toCloudnamePath(final String zkPath, final String pathPrefix) { -+ final String clientPath = zkPath.substring(pathPrefix.length()); -+ final String[] elements = clientPath.split(""/""); -+ return new CloudnamePath(elements); -+ } -+ -+ @Override -+ public void addTemporaryLeaseListener( -+ final CloudnamePath pathToObserve, final LeaseListener listener) { -+ // Ideally the PathChildrenCache class in Curator would be used here to keep track of the -+ // changes but it is ever so slightly broken and misses most of the watches that ZooKeeper -+ // triggers, ignores the mzxid on the nodes and generally makes a mess of things. Enter -+ // custom code. -+ final String zkPath = TEMPORARY_ROOT + pathToObserve.join('/'); -+ try { -+ curator.createContainers(zkPath); -+ final NodeCollectionWatcher watcher = new NodeCollectionWatcher(curator.getZookeeperClient().getZooKeeper(), -+ zkPath, -+ new NodeWatcherListener() { -+ @Override -+ public void nodeCreated(final String path, final String data) { -+ listener.leaseCreated(toCloudnamePath(path, TEMPORARY_ROOT), data); -+ } -+ @Override -+ public void dataChanged(final String path, final String data) { -+ listener.dataChanged(toCloudnamePath(path, TEMPORARY_ROOT), data); -+ } -+ @Override -+ public void nodeRemoved(final String path) { -+ listener.leaseRemoved(toCloudnamePath(path, TEMPORARY_ROOT)); -+ } -+ }); -+ -+ synchronized (syncObject) { -+ clientListeners.put(listener, watcher); -+ } -+ } catch (final Exception exception) { -+ LOG.log(Level.WARNING, ""Got exception when creating node watcher"", exception); -+ } -+ } -+ -+ @Override -+ public void removeTemporaryLeaseListener(final LeaseListener listener) { -+ synchronized (syncObject) { -+ final NodeCollectionWatcher watcher = clientListeners.get(listener); -+ if (watcher != null) { -+ clientListeners.remove(listener); -+ watcher.shutdown(); -+ } -+ } -+ } -+ -+ @Override -+ public boolean createPermanantLease(final CloudnamePath path, final String data) { -+ final String zkPath = PERMANENT_ROOT + path.join('/'); -+ try { -+ curator.sync().forPath(zkPath); -+ final Stat nodeStat = curator.checkExists().forPath(zkPath); -+ if (nodeStat == null) { -+ curator.create() -+ .creatingParentContainersIfNeeded() -+ .forPath(zkPath, data.getBytes(Charsets.UTF_8)); -+ return true; -+ } -+ LOG.log(Level.INFO, ""Attempt to create permanent node at "" + path -+ + "" with data "" + data + "" but it already exists""); -+ } catch (final Exception ex) { -+ LOG.log(Level.WARNING, ""Got exception creating parent container for permanent lease"" -+ + "" for lease "" + path + "" with data "" + data, ex); -+ } -+ return false; -+ } -+ -+ @Override -+ public boolean removePermanentLease(final CloudnamePath path) { -+ final String zkPath = PERMANENT_ROOT + path.join('/'); -+ try { -+ final Stat nodeStat = curator.checkExists().forPath(zkPath); -+ if (nodeStat != null) { -+ curator.delete() -+ .withVersion(nodeStat.getVersion()) -+ .forPath(zkPath); -+ return true; -+ } -+ return false; -+ } catch (final Exception ex) { -+ LOG.log(Level.WARNING, ""Got error removing permanent lease for lease "" + path, ex); -+ return false; -+ } -+ } -+ -+ @Override -+ public boolean writePermanentLeaseData(final CloudnamePath path, final String data) { -+ final String zkPath = PERMANENT_ROOT + path.join('/'); -+ try { -+ curator.sync().forPath(zkPath); -+ final Stat nodeStat = curator.checkExists().forPath(zkPath); -+ if (nodeStat == null) { -+ LOG.log(Level.WARNING, ""Can't write permanent lease data for lease "" + path -+ + "" with data "" + data + "" since the lease doesn't exist""); -+ return false; -+ } -+ curator.setData() -+ .withVersion(nodeStat.getVersion()) -+ .forPath(zkPath, data.getBytes(Charsets.UTF_8)); -+ } catch (final Exception ex) { -+ LOG.log(Level.WARNING, ""Got exception writing permanent lease data for "" + path -+ + "" with data "" + data, ex); -+ return false; -+ } -+ return true; -+ } -+ -+ @Override -+ public String readPermanentLeaseData(final CloudnamePath path) { -+ final String zkPath = PERMANENT_ROOT + path.join('/'); -+ try { -+ curator.sync().forPath(zkPath); -+ final byte[] bytes = curator.getData().forPath(zkPath); -+ return new String(bytes, Charsets.UTF_8); -+ } catch (final Exception ex) { -+ if (ex instanceof KeeperException.NoNodeException) { -+ // OK - nothing to worry about -+ return null; -+ } -+ LOG.log(Level.WARNING, ""Got exception reading permanent lease data for "" + path, ex); -+ return null; -+ } -+ } -+ -+ @Override -+ public void addPermanentLeaseListener(final CloudnamePath pathToObserve, final LeaseListener listener) { -+ try { -+ -+ final String parentPath = PERMANENT_ROOT + pathToObserve.getParent().join('/'); -+ final String fullPath = PERMANENT_ROOT + pathToObserve.join('/'); -+ curator.createContainers(parentPath); -+ final NodeCollectionWatcher watcher = new NodeCollectionWatcher(curator.getZookeeperClient().getZooKeeper(), -+ parentPath, -+ new NodeWatcherListener() { -+ @Override -+ public void nodeCreated(final String path, final String data) { -+ if (path.equals(fullPath)) { -+ listener.leaseCreated(toCloudnamePath(path, PERMANENT_ROOT), data); -+ } -+ } -+ @Override -+ public void dataChanged(final String path, final String data) { -+ if (path.equals(fullPath)) { -+ listener.dataChanged(toCloudnamePath(path, PERMANENT_ROOT), data); -+ } -+ } -+ @Override -+ public void nodeRemoved(final String path) { -+ if (path.equals(fullPath)) { -+ listener.leaseRemoved(toCloudnamePath(path, PERMANENT_ROOT)); -+ } -+ } -+ }); -+ -+ synchronized (syncObject) { -+ permanentListeners.put(listener, watcher); -+ } -+ } catch (final Exception exception) { -+ LOG.log(Level.WARNING, ""Got exception when creating node watcher"", exception); -+ } -+ } -+ -+ @Override -+ public void removePermanentLeaseListener(final LeaseListener listener) { -+ synchronized (syncObject) { -+ final NodeCollectionWatcher watcher = permanentListeners.get(listener); -+ if (watcher != null) { -+ permanentListeners.remove(listener); -+ watcher.shutdown(); -+ } -+ } -+ } -+ -+ @Override -+ public void close() { -+ synchronized (syncObject) { -+ for (final NodeCollectionWatcher watcher : clientListeners.values()) { -+ watcher.shutdown(); -+ } -+ clientListeners.clear(); -+ for (final NodeCollectionWatcher watcher : permanentListeners.values()) { -+ watcher.shutdown(); -+ } -+ permanentListeners.clear(); -+ } -+ } -+} -diff --git a/cn-zookeeper/src/test/java/org/cloudname/backends/zookeeper/NodeCollectionWatcherTest.java b/cn-zookeeper/src/test/java/org/cloudname/backends/zookeeper/NodeCollectionWatcherTest.java -new file mode 100644 -index 00000000..adb41310 ---- /dev/null -+++ b/cn-zookeeper/src/test/java/org/cloudname/backends/zookeeper/NodeCollectionWatcherTest.java -@@ -0,0 +1,367 @@ -+package org.cloudname.backends.zookeeper; -+ -+import org.apache.curator.CuratorConnectionLossException; -+import org.apache.curator.RetryPolicy; -+import org.apache.curator.framework.CuratorFramework; -+import org.apache.curator.framework.CuratorFrameworkFactory; -+import org.apache.curator.retry.RetryUntilElapsed; -+import org.apache.curator.test.InstanceSpec; -+import org.apache.curator.test.TestingCluster; -+import org.apache.zookeeper.ZooKeeper; -+import org.apache.zookeeper.data.Stat; -+import org.junit.AfterClass; -+import org.junit.BeforeClass; -+import org.junit.Test; -+ -+import java.nio.charset.Charset; -+import java.util.concurrent.CountDownLatch; -+import java.util.concurrent.TimeUnit; -+import java.util.concurrent.atomic.AtomicBoolean; -+import java.util.concurrent.atomic.AtomicInteger; -+ -+import static org.hamcrest.CoreMatchers.is; -+import static org.hamcrest.CoreMatchers.not; -+import static org.hamcrest.CoreMatchers.notNullValue; -+import static org.junit.Assert.assertThat; -+import static org.junit.Assert.assertTrue; -+import static org.junit.Assume.assumeThat; -+ -+/** -+ * Test the node watching mechanism. -+ */ -+public class NodeCollectionWatcherTest { -+ private static TestingCluster zkServer; -+ private static CuratorFramework curator; -+ private static ZooKeeper zooKeeper; -+ -+ @BeforeClass -+ public static void setUp() throws Exception { -+ zkServer = new TestingCluster(3); -+ zkServer.start(); -+ final RetryPolicy retryPolicy = new RetryUntilElapsed(60000, 100); -+ curator = CuratorFrameworkFactory.newClient(zkServer.getConnectString(), retryPolicy); -+ curator.start(); -+ curator.blockUntilConnected(10, TimeUnit.SECONDS); -+ zooKeeper = curator.getZookeeperClient().getZooKeeper(); -+ } -+ -+ @AfterClass -+ public static void tearDown() throws Exception { -+ zkServer.close(); -+ } -+ -+ private final AtomicInteger counter = new AtomicInteger(0); -+ -+ private byte[] getData() { -+ return ("""" + counter.incrementAndGet()).getBytes(Charset.defaultCharset()); -+ } -+ -+ /** -+ * A custom listener that counts and counts down notifications. -+ */ -+ private class ListenerCounter implements NodeWatcherListener { -+ // Then a few counters to check the number of events -+ public AtomicInteger createCount = new AtomicInteger(0); -+ public AtomicInteger dataCount = new AtomicInteger(0); -+ public AtomicInteger removeCount = new AtomicInteger(0); -+ public CountDownLatch createLatch; -+ public CountDownLatch dataLatch; -+ public CountDownLatch removeLatch; -+ -+ public ListenerCounter(final int createLatchCount, final int dataLatchCount, final int removeLatchCount) { -+ createLatch = new CountDownLatch(createLatchCount); -+ dataLatch = new CountDownLatch(dataLatchCount); -+ removeLatch = new CountDownLatch(removeLatchCount); -+ } -+ -+ @Override -+ public void nodeCreated(String zkPath, String data) { -+ createCount.incrementAndGet(); -+ createLatch.countDown(); -+ } -+ -+ @Override -+ public void dataChanged(String zkPath, String data) { -+ dataCount.incrementAndGet(); -+ dataLatch.countDown(); -+ } -+ -+ @Override -+ public void nodeRemoved(String zkPath) { -+ removeCount.incrementAndGet(); -+ removeLatch.countDown(); -+ } -+ } -+ -+ @Test -+ public void sequentialNotifications() throws Exception { -+ final int maxPropagationTime = 4; -+ -+ final String pathPrefix = ""/foo/slow""; -+ curator.create().creatingParentsIfNeeded().forPath(pathPrefix); -+ -+ final ListenerCounter listener = new ListenerCounter(1, 1, 1); -+ -+ final NodeCollectionWatcher nodeCollectionWatcher = new NodeCollectionWatcher(zooKeeper, pathPrefix, listener); -+ -+ // Create should trigger create notification (and no other notification) -+ curator.create().forPath(pathPrefix + ""/node1"", getData()); -+ assertTrue(listener.createLatch.await(maxPropagationTime, TimeUnit.MILLISECONDS)); -+ assertThat(listener.createCount.get(), is(1)); -+ assertThat(listener.dataCount.get(), is(0)); -+ assertThat(listener.removeCount.get(), is(0)); -+ -+ // Data change should trigger the data notification (and no other notification) -+ curator.setData().forPath(pathPrefix + ""/node1"", getData()); -+ assertTrue(listener.dataLatch.await(maxPropagationTime, TimeUnit.MILLISECONDS)); -+ assertThat(listener.createCount.get(), is(1)); -+ assertThat(listener.dataCount.get(), is(1)); -+ assertThat(listener.removeCount.get(), is(0)); -+ -+ // Delete should trigger the remove notification (and no other notification) -+ curator.delete().forPath(pathPrefix + ""/node1""); -+ assertTrue(listener.removeLatch.await(maxPropagationTime, TimeUnit.MILLISECONDS)); -+ assertThat(listener.createCount.get(), is(1)); -+ assertThat(listener.dataCount.get(), is(1)); -+ assertThat(listener.removeCount.get(), is(1)); -+ -+ nodeCollectionWatcher.shutdown(); -+ -+ // Ensure that there are no notifications when the watcher shuts down -+ curator.create().forPath(pathPrefix + ""node_9"", getData()); -+ Thread.sleep(maxPropagationTime); -+ assertThat(listener.createCount.get(), is(1)); -+ assertThat(listener.dataCount.get(), is(1)); -+ assertThat(listener.removeCount.get(), is(1)); -+ -+ curator.setData().forPath(pathPrefix + ""node_9"", getData()); -+ Thread.sleep(maxPropagationTime); -+ assertThat(listener.createCount.get(), is(1)); -+ assertThat(listener.dataCount.get(), is(1)); -+ assertThat(listener.removeCount.get(), is(1)); -+ -+ curator.delete().forPath(pathPrefix + ""node_9""); -+ Thread.sleep(maxPropagationTime); -+ assertThat(listener.createCount.get(), is(1)); -+ assertThat(listener.dataCount.get(), is(1)); -+ assertThat(listener.removeCount.get(), is(1)); -+ } -+ -+ /** -+ * Make rapid changes to ZooKeeper. The changes (most likely) won't be caught by the -+ * watcher events but must be generated by the class itself. Ensure the correct number -+ * of notifications is generated. -+ */ -+ @Test -+ public void rapidChanges() throws Exception { -+ final int maxPropagationTime = 100; -+ -+ final String pathPrefix = ""/foo/rapido""; -+ -+ curator.create().creatingParentsIfNeeded().forPath(pathPrefix); -+ -+ final int numNodes = 50; -+ final ListenerCounter listener = new ListenerCounter(numNodes, 0, numNodes); -+ -+ final NodeCollectionWatcher nodeCollectionWatcher = new NodeCollectionWatcher(zooKeeper, pathPrefix, listener); -+ // Create all of the nodes at once -+ for (int i = 0; i < numNodes; i++) { -+ curator.create().forPath(pathPrefix + ""/node"" + i, getData()); -+ } -+ assertTrue(listener.createLatch.await(maxPropagationTime, TimeUnit.MILLISECONDS)); -+ assertThat(listener.createCount.get(), is(numNodes)); -+ assertThat(listener.dataCount.get(), is(0)); -+ assertThat(listener.removeCount.get(), is(0)); -+ -+ // Repeat data test multiple times to ensure data changes are detected -+ // repeatedly on the same nodes -+ int total = 0; -+ for (int j = 0; j < 5; j++) { -+ listener.dataLatch = new CountDownLatch(numNodes); -+ // Since there's a watch for every node all of the data changes should be detected -+ for (int i = 0; i < numNodes; i++) { -+ curator.setData().forPath(pathPrefix + ""/node"" + i, getData()); -+ } -+ total += numNodes; -+ assertTrue(listener.dataLatch.await(maxPropagationTime, TimeUnit.MILLISECONDS)); -+ assertThat(listener.createCount.get(), is(numNodes)); -+ assertThat(listener.dataCount.get(), is(total)); -+ assertThat(listener.removeCount.get(), is(0)); -+ } -+ -+ // Finally, remove everything in rapid succession -+ // Create all of the nodes at once -+ for (int i = 0; i < numNodes; i++) { -+ curator.delete().forPath(pathPrefix + ""/node"" + i); -+ } -+ -+ assertTrue(listener.removeLatch.await(maxPropagationTime, TimeUnit.MILLISECONDS)); -+ assertThat(listener.createCount.get(), is(numNodes)); -+ assertThat(listener.dataCount.get(), is(total)); -+ assertThat(listener.removeCount.get(), is(numNodes)); -+ -+ nodeCollectionWatcher.shutdown(); -+ } -+ -+ /** -+ * Emulate a network partition by killing off two out of three ZooKeeper instances -+ * and check the output. Set the system property NodeWatcher.SlowTests to ""ok"" to enable -+ * it. The test itself can be quite slow depending on what Curator is connected to. If -+ * Curator uses one of the servers that are killed it will try a reconnect and the whole -+ * test might take up to 120-180 seconds to complete. -+ */ -+ @Test -+ public void networkPartitionTest() throws Exception { -+ assumeThat(System.getProperty(""NodeCollectionWatcher.SlowTests""), is(""ok"")); -+ -+ final int maxPropagationTime = 10; -+ -+ final String pathPrefix = ""/foo/partition""; -+ curator.create().creatingParentsIfNeeded().forPath(pathPrefix); -+ -+ final int nodeCount = 10; -+ -+ final ListenerCounter listener = new ListenerCounter(nodeCount, nodeCount, nodeCount); -+ -+ final NodeCollectionWatcher nodeCollectionWatcher = new NodeCollectionWatcher(zooKeeper, pathPrefix, listener); -+ -+ // Create a few nodes to set the initial state -+ for (int i = 0; i < nodeCount; i++) { -+ curator.create().forPath(pathPrefix + ""/node"" + i, getData()); -+ } -+ assertTrue(listener.createLatch.await(maxPropagationTime, TimeUnit.MILLISECONDS)); -+ assertThat(listener.createCount.get(), is(nodeCount)); -+ assertThat(listener.removeCount.get(), is(0)); -+ assertThat(listener.dataCount.get(), is(0)); -+ -+ final InstanceSpec firstInstance = zkServer.findConnectionInstance(zooKeeper); -+ zkServer.killServer(firstInstance); -+ -+ listener.createLatch = new CountDownLatch(1); -+ // Client should reconnect to one of the two remaining -+ curator.create().forPath(pathPrefix + ""/stillalive"", getData()); -+ // Wait for the notification to go through. This could take some time since there's -+ // reconnects and all sorts of magic happening under the hood -+ assertTrue(listener.createLatch.await(10, TimeUnit.SECONDS)); -+ assertThat(listener.createCount.get(), is(nodeCount + 1)); -+ assertThat(listener.removeCount.get(), is(0)); -+ assertThat(listener.dataCount.get(), is(0)); -+ -+ // Kill the 2nd server. The cluster won't have a quorum now -+ final InstanceSpec secondInstance = zkServer.findConnectionInstance(zooKeeper); -+ assertThat(firstInstance, is(not(secondInstance))); -+ zkServer.killServer(secondInstance); -+ -+ boolean retry; -+ do { -+ System.out.println(""Checking node with Curator... This might take a while...""); -+ try { -+ final Stat stat = curator.checkExists().forPath(pathPrefix); -+ retry = false; -+ assertThat(stat, is(notNullValue())); -+ } catch (CuratorConnectionLossException ex) { -+ System.out.println(""Missing connection. Retrying""); -+ retry = true; -+ } -+ } while (retry); -+ -+ zkServer.restartServer(firstInstance); -+ zkServer.restartServer(secondInstance); -+ listener.createLatch = new CountDownLatch(1); -+ -+ System.out.println(""Creating node via Curator... This might take a while...""); -+ curator.create().forPath(pathPrefix + ""/imback"", getData()); -+ -+ assertTrue(listener.createLatch.await(maxPropagationTime, TimeUnit.MILLISECONDS)); -+ assertThat(listener.createCount.get(), is(nodeCount + 2)); -+ assertThat(listener.removeCount.get(), is(0)); -+ assertThat(listener.dataCount.get(), is(0)); -+ -+ // Ensure data notifications are propagated after a failure -+ for (int i = 0; i < nodeCount; i++) { -+ final Stat stat = curator.setData().forPath(pathPrefix + ""/node"" + i, getData()); -+ assertThat(stat, is(notNullValue())); -+ } -+ assertTrue(listener.dataLatch.await(maxPropagationTime, TimeUnit.MILLISECONDS)); -+ assertThat(listener.createCount.get(), is(nodeCount + 2)); -+ assertThat(listener.removeCount.get(), is(0)); -+ assertThat(listener.dataCount.get(), is(nodeCount)); -+ -+ // ..and remove notifications are sent -+ for (int i = 0; i < nodeCount; i++) { -+ curator.delete().forPath(pathPrefix + ""/node"" + i); -+ } -+ assertTrue(listener.removeLatch.await(maxPropagationTime, TimeUnit.MILLISECONDS)); -+ assertThat(listener.createCount.get(), is(nodeCount + 2)); -+ assertThat(listener.removeCount.get(), is(nodeCount)); -+ assertThat(listener.dataCount.get(), is(nodeCount)); -+ -+ nodeCollectionWatcher.shutdown(); -+ -+ } -+ -+ /** -+ * Be a misbehaving client and throw exceptions in the listners. Ensure the watcher still works -+ * afterwards. -+ */ -+ @Test -+ public void misbehavingClient() throws Exception { -+ final int propagationTime = 5; -+ -+ final AtomicBoolean triggerExceptions = new AtomicBoolean(false); -+ final CountDownLatch createLatch = new CountDownLatch(1); -+ final CountDownLatch dataLatch = new CountDownLatch(1); -+ final CountDownLatch removeLatch = new CountDownLatch(1); -+ -+ final NodeWatcherListener listener = new NodeWatcherListener() { -+ @Override -+ public void nodeCreated(String zkPath, String data) { -+ if (triggerExceptions.get()) { -+ throw new RuntimeException(""boo!""); -+ } -+ createLatch.countDown(); -+ } -+ -+ @Override -+ public void dataChanged(String zkPath, String data) { -+ if (triggerExceptions.get()) { -+ throw new RuntimeException(""boo!""); -+ } -+ dataLatch.countDown(); -+ } -+ -+ @Override -+ public void nodeRemoved(String zkPath) { -+ if (triggerExceptions.get()) { -+ throw new RuntimeException(""boo!""); -+ } -+ removeLatch.countDown(); -+ } -+ }; -+ -+ final String pathPrefix = ""/foo/misbehaving""; -+ -+ curator.create().creatingParentsIfNeeded().forPath(pathPrefix); -+ -+ final NodeCollectionWatcher nodeCollectionWatcher = new NodeCollectionWatcher(zooKeeper, pathPrefix, listener); -+ -+ triggerExceptions.set(true); -+ curator.create().forPath(pathPrefix + ""/first"", getData()); -+ Thread.sleep(propagationTime); -+ curator.setData().forPath(pathPrefix + ""/first"", getData()); -+ Thread.sleep(propagationTime); -+ curator.delete().forPath(pathPrefix + ""/first""); -+ Thread.sleep(propagationTime); -+ -+ // Now create a node but without setting the data field. -+ triggerExceptions.set(false); -+ curator.create().forPath(pathPrefix + ""/second""); -+ assertTrue(createLatch.await(propagationTime, TimeUnit.MILLISECONDS)); -+ curator.setData().forPath(pathPrefix + ""/second"", getData()); -+ assertTrue(dataLatch.await(propagationTime, TimeUnit.MILLISECONDS)); -+ curator.delete().forPath(pathPrefix + ""/second""); -+ assertTrue(removeLatch.await(propagationTime, TimeUnit.MILLISECONDS)); -+ -+ nodeCollectionWatcher.shutdown(); -+ } -+} -diff --git a/cn-zookeeper/src/test/java/org/cloudname/backends/zookeeper/ZooKeeperBackendTest.java b/cn-zookeeper/src/test/java/org/cloudname/backends/zookeeper/ZooKeeperBackendTest.java -new file mode 100644 -index 00000000..393b78b4 ---- /dev/null -+++ b/cn-zookeeper/src/test/java/org/cloudname/backends/zookeeper/ZooKeeperBackendTest.java -@@ -0,0 +1,36 @@ -+package org.cloudname.backends.zookeeper; -+ -+import org.apache.curator.test.TestingCluster; -+import org.cloudname.core.CloudnameBackend; -+import org.cloudname.testtools.backend.CoreBackendTest; -+import org.junit.AfterClass; -+import org.junit.BeforeClass; -+ -+import java.util.concurrent.atomic.AtomicReference; -+ -+/** -+ * Test the ZooKeeper backend. -+ */ -+public class ZooKeeperBackendTest extends CoreBackendTest { -+ private static TestingCluster testCluster; -+ private AtomicReference backend = new AtomicReference<>(null); -+ -+ @BeforeClass -+ public static void setUp() throws Exception { -+ testCluster = new TestingCluster(3); -+ testCluster.start(); -+ } -+ -+ @AfterClass -+ public static void tearDown() throws Exception { -+ testCluster.stop(); -+ } -+ -+ protected CloudnameBackend getBackend() { -+ if (backend.get() == null) { -+ backend.compareAndSet(null, new ZooKeeperBackend(testCluster.getConnectString())); -+ } -+ return backend.get(); -+ -+ } -+} -diff --git a/cn/pom.xml b/cn/pom.xml -deleted file mode 100644 -index f46d83ff..00000000 ---- a/cn/pom.xml -+++ /dev/null -@@ -1,93 +0,0 @@ -- -- 4.0.0 -- -- -- org.cloudname -- cloudname-parent -- 3.0-SNAPSHOT -- -- -- cn -- jar -- -- Cloudname Library -- Simple library for managing resources using ZooKeeper. -- https://github.com/Cloudname/cloudname -- -- -- -- org.cloudname -- testtools -- -- -- -- org.apache.zookeeper -- zookeeper -- -- -- -- com.fasterxml.jackson.core -- jackson-databind -- -- -- -- org.cloudname -- flags -- -- -- -- junit -- junit-dep -- test -- -- -- -- org.hamcrest -- hamcrest-all -- 1.3 -- -- -- -- -- -- -- org.dstovall -- onejar-maven-plugin -- 1.4.4 -- -- -- -- -- true -- org.cloudname.zk.ZkTool -- ZkTool.jar -- -- -- one-jar -- -- -- -- -- -- org.apache.maven.plugins -- maven-surefire-plugin -- -- -- org.apache.maven.plugins -- maven-antrun-plugin -- -- -- org.codehaus.mojo -- build-helper-maven-plugin -- -- -- org.apache.maven.plugins -- maven-compiler-plugin -- -- -- maven-failsafe-plugin -- -- -- -- -diff --git a/cn/src/integrationtest/java/org/cloudname/zk/ZkCloudnameIntegrationTest.java b/cn/src/integrationtest/java/org/cloudname/zk/ZkCloudnameIntegrationTest.java -deleted file mode 100644 -index b5d7daaa..00000000 ---- a/cn/src/integrationtest/java/org/cloudname/zk/ZkCloudnameIntegrationTest.java -+++ /dev/null -@@ -1,480 +0,0 @@ --package org.cloudname.zk; -- --import org.apache.zookeeper.WatchedEvent; --import org.apache.zookeeper.Watcher; --import org.apache.zookeeper.ZooDefs; --import org.apache.zookeeper.ZooKeeper; --import org.cloudname.Cloudname; --import org.cloudname.CloudnameException; --import org.cloudname.Coordinate; --import org.cloudname.CoordinateException; --import org.cloudname.CoordinateExistsException; --import org.cloudname.CoordinateListener; --import org.cloudname.ServiceHandle; --import org.cloudname.ServiceState; --import org.cloudname.ServiceStatus; --import org.cloudname.testtools.Net; --import org.cloudname.testtools.network.PortForwarder; --import org.cloudname.testtools.zookeeper.EmbeddedZooKeeper; --import org.junit.After; --import org.junit.Before; --import org.junit.Rule; --import org.junit.Test; --import org.junit.rules.TemporaryFolder; -- --import java.io.File; --import java.util.ArrayList; --import java.util.HashSet; --import java.util.List; --import java.util.Set; --import java.util.concurrent.CopyOnWriteArrayList; --import java.util.concurrent.CountDownLatch; --import java.util.concurrent.TimeUnit; --import java.util.logging.Logger; -- --import static org.junit.Assert.*; -- --/** -- * Integration tests for testing ZkCloudname. -- * Contains mostly heavy tests containing sleep calls not fit as a unit test. -- */ --public class ZkCloudnameIntegrationTest { -- private static final Logger LOG = Logger.getLogger(ZkCloudnameIntegrationTest.class.getName()); -- -- private EmbeddedZooKeeper ezk; -- private ZooKeeper zk; -- private int zkport; -- private PortForwarder forwarder = null; -- private int forwarderPort; -- private ZkCloudname cn = null; -- -- @Rule -- public TemporaryFolder temp = new TemporaryFolder(); -- -- /** -- * Set up an embedded ZooKeeper instance backed by a temporary -- * directory. The setup procedure also allocates a port that is -- * free for the ZooKeeper server so that you should be able to run -- * multiple instances of this test. -- */ -- @Before -- public void setup() throws Exception { -- File rootDir = temp.newFolder(""zk-test""); -- zkport = Net.getFreePort(); -- -- LOG.info(""EmbeddedZooKeeper rootDir="" + rootDir.getCanonicalPath() + "", port="" + zkport); -- -- // Set up and initialize the embedded ZooKeeper -- ezk = new EmbeddedZooKeeper(rootDir, zkport); -- ezk.init(); -- -- // Set up a zookeeper client that we can use for inspection -- final CountDownLatch connectedLatch = new CountDownLatch(1); -- -- zk = new ZooKeeper(""localhost:"" + zkport, 1000, new Watcher() { -- @Override -- public void process(WatchedEvent event) { -- if (event.getState() == Event.KeeperState.SyncConnected) { -- connectedLatch.countDown(); -- } -- } -- }); -- connectedLatch.await(); -- -- LOG.info(""ZooKeeper port is "" + zkport); -- } -- -- @After -- public void tearDown() throws Exception { -- zk.close(); -- if (forwarder != null) { -- forwarder.close(); -- } -- ezk.shutdown(); -- } -- -- /** -- * A coordinate listener that stores events and calls a latch. -- */ -- class TestCoordinateListener implements CoordinateListener { -- private final List events = new CopyOnWriteArrayList(); -- -- private final Set listenerLatches; -- -- private final List waitForEvent = new ArrayList(); -- private final Object eventMonitor = new Object(); -- private final List waitForLatch = new ArrayList(); -- -- public boolean failOnWrongEvent = false; -- private CountDownLatch latestLatch = null; -- -- void waitForExpected() throws InterruptedException { -- final CountDownLatch latch; -- synchronized (eventMonitor) { -- if (waitForEvent.size() > 0) { -- LOG.info(""Waiting for event "" + waitForEvent.get(waitForEvent.size() - 1)); -- latch = latestLatch; -- } else { -- return; -- } -- } -- assert(latch.await(25, TimeUnit.SECONDS)); -- LOG.info(""Event happened.""); -- } -- -- public TestCoordinateListener(final Set listenerLatches) { -- this.listenerLatches = listenerLatches; -- } -- -- public void expectEvent(final Event event) { -- LOG.info(""Expecting event "" + event.name()); -- synchronized (eventMonitor) { -- waitForEvent.add(event); -- latestLatch = new CountDownLatch(1); -- waitForLatch.add(latestLatch); -- } -- } -- -- @Override -- public void onCoordinateEvent(Event event, String message) { -- LOG.info(""I got event ..."" + event.name() + "" "" + message); -- synchronized (eventMonitor) { -- if (waitForEvent.size() > 0) { -- LOG.info(""Waiting for event "" + waitForEvent.get(0)); -- } else { -- LOG.info(""not expecting any specific events""); -- } -- events.add(event); -- for (CountDownLatch countDownLatch :listenerLatches) { -- countDownLatch.countDown(); -- } -- if (waitForEvent.size() > 0 && waitForEvent.get(0) == event) { -- waitForLatch.remove(0).countDown(); -- waitForEvent.remove(0); -- } else { -- assertFalse(failOnWrongEvent); -- } -- } -- } -- } -- -- private TestCoordinateListener setUpListenerEnvironment( -- final CountDownLatch latch) throws Exception { -- Set latches = new HashSet(); -- latches.add(latch); -- return setUpListenerEnvironment(latches); -- } -- -- private TestCoordinateListener setUpListenerEnvironment( -- final Set listenerLatches) throws Exception { -- forwarderPort = Net.getFreePort(); -- forwarder = new PortForwarder(forwarderPort, ""127.0.0.1"", zkport); -- final Coordinate c = Coordinate.parse(""1.service.user.cell""); -- -- cn = makeLocalZkCloudname(forwarderPort); -- try { -- cn.createCoordinate(c); -- } catch (CoordinateException e) { -- fail(e.toString()); -- } -- final TestCoordinateListener listener = new TestCoordinateListener(listenerLatches); -- ServiceHandle serviceHandle = cn.claim(c); -- assert(serviceHandle.waitForCoordinateOkSeconds(3 /* secs */)); -- serviceHandle.registerCoordinateListener(listener); -- -- return listener; -- } -- -- @Test -- public void testCoordinateListenerInitialEvent() throws Exception { -- final CountDownLatch connectedLatch1 = new CountDownLatch(1); -- final TestCoordinateListener listener = setUpListenerEnvironment(connectedLatch1); -- assertTrue(connectedLatch1.await(15, TimeUnit.SECONDS)); -- assertEquals(1, listener.events.size()); -- assertEquals(CoordinateListener.Event.COORDINATE_OK, listener.events.get(0)); -- } -- -- @Test -- public void testCoordinateListenerConnectionDies() throws Exception { -- final CountDownLatch connectedLatch1 = new CountDownLatch(1); -- final TestCoordinateListener listener = setUpListenerEnvironment(connectedLatch1); -- assertTrue(connectedLatch1.await(20, TimeUnit.SECONDS)); -- -- listener.expectEvent(CoordinateListener.Event.NO_CONNECTION_TO_STORAGE); -- forwarder.close(); -- forwarder = null; -- listener.waitForExpected(); -- } -- -- @Test -- public void testCoordinateListenerCoordinateCorrupted() throws Exception { -- final CountDownLatch connectedLatch1 = new CountDownLatch(1); -- final TestCoordinateListener listener = setUpListenerEnvironment(connectedLatch1); -- assertTrue(connectedLatch1.await(20, TimeUnit.SECONDS)); -- -- listener.expectEvent(CoordinateListener.Event.NOT_OWNER); -- -- byte[] garbageBytes = ""sdfgsdfgsfgdsdfgsdfgsdfg"".getBytes(""UTF-16LE""); -- -- zk.setData(""/cn/cell/user/service/1/status"", garbageBytes, -1); -- listener.waitForExpected(); -- } -- -- @Test -- public void testCoordinateListenerCoordinateOutOfSync() throws Exception { -- final CountDownLatch connectedLatch1 = new CountDownLatch(1); -- final TestCoordinateListener listener = setUpListenerEnvironment(connectedLatch1); -- assertTrue(connectedLatch1.await(20, TimeUnit.SECONDS)); -- -- listener.expectEvent(CoordinateListener.Event.NOT_OWNER); -- -- String source = ""\""{\\\""state\\\"":\\\""STARTING\\\"",\\\""message\\\"":\\\""Lost hamster.\\\""}\"" {}""; -- byte[] byteArray = source.getBytes(Util.CHARSET_NAME); -- -- zk.setData(""/cn/cell/user/service/1/status"", byteArray, -1); -- -- listener.waitForExpected(); -- } -- -- @Test -- public void testCoordinateListenerCoordinateLost() throws Exception { -- final CountDownLatch connectedLatch1 = new CountDownLatch(1); -- final TestCoordinateListener listener = setUpListenerEnvironment(connectedLatch1); -- listener.expectEvent(CoordinateListener.Event.NO_CONNECTION_TO_STORAGE); -- listener.expectEvent(CoordinateListener.Event.NOT_OWNER); -- -- assertTrue(connectedLatch1.await(20, TimeUnit.SECONDS)); -- LOG.info(""Deleting coordinate""); -- forwarder.pause(); -- zk.delete(""/cn/cell/user/service/1/status"", -1); -- zk.delete(""/cn/cell/user/service/1/config"", -1); -- zk.delete(""/cn/cell/user/service/1"", -1); -- forwarder.unpause(); -- -- listener.waitForExpected(); -- -- } -- -- @Test -- public void testCoordinateListenerStolenCoordinate() throws Exception { -- -- final CountDownLatch connectedLatch1 = new CountDownLatch(1); -- final TestCoordinateListener listener = setUpListenerEnvironment(connectedLatch1); -- assertTrue(connectedLatch1.await(20, TimeUnit.SECONDS)); -- LOG.info(""Killing zookeeper""); -- assertTrue(zk.getState() == ZooKeeper.States.CONNECTED); -- -- LOG.info(""Killing connection""); -- forwarder.pause(); -- -- zk.delete(""/cn/cell/user/service/1/status"", -1); -- Util.mkdir(zk, ""/cn/cell/user/service/1/status"" , ZooDefs.Ids.OPEN_ACL_UNSAFE); -- -- forwarder.unpause(); -- -- listener.expectEvent(CoordinateListener.Event.NOT_OWNER); -- listener.waitForExpected(); -- } -- -- -- @Test -- public void testCoordinateListenerConnectionDiesReconnect() throws Exception { -- final CountDownLatch connectedLatch1 = new CountDownLatch(1); -- final TestCoordinateListener listener = setUpListenerEnvironment(connectedLatch1); -- assertTrue(connectedLatch1.await(20, TimeUnit.SECONDS)); -- -- listener.expectEvent(CoordinateListener.Event.NO_CONNECTION_TO_STORAGE); -- -- forwarder.pause(); -- listener.waitForExpected(); -- -- listener.expectEvent(CoordinateListener.Event.COORDINATE_OK); -- forwarder.unpause(); -- listener.waitForExpected(); -- } -- -- /** -- * In this test the ZK server thinks the client is connected, but the client wants to reconnect -- * due to a disconnect. To trig this condition the connection needs to be down for -- * a specific time. This test does not fail even if it does not manage to create this -- * state. It will write the result to the log. The test is useful for development and -- * should not fail. -- */ -- @Test -- public void testCoordinateListenerConnectionDiesReconnectAfterTimeoutClient() -- throws Exception { -- final CountDownLatch connectedLatch1 = new CountDownLatch(1); -- final TestCoordinateListener listener = setUpListenerEnvironment(connectedLatch1); -- assertTrue(connectedLatch1.await(20, TimeUnit.SECONDS)); -- assertEquals(CoordinateListener.Event.COORDINATE_OK, -- listener.events.get(listener.events.size() -1 )); -- -- listener.expectEvent(CoordinateListener.Event.NO_CONNECTION_TO_STORAGE); -- LOG.info(""Killing connection""); -- forwarder.pause(); -- -- LOG.info(""Connection down.""); -- listener.waitForExpected(); -- -- // Client sees problem, server not. -- listener.expectEvent(CoordinateListener.Event.COORDINATE_OK); -- -- // 3400 is a magic number for getting zookeeper and local client in a specific state. -- Thread.sleep(2400); -- LOG.info(""Recreating connection soon"" + forwarderPort + ""->"" + zkport); -- -- -- forwarder.unpause(); -- listener.waitForExpected(); // COORDINATE_OK -- -- // If the previous event is NOT_OWNER, the wanted situation was created by the test. -- if (listener.events.get(listener.events.size() - 2) == -- CoordinateListener.Event.NOT_OWNER) { -- LOG.info(""Manage to trig event inn ZooKeeper, true positive.""); -- } else { -- LOG.info(""Did NOT manage to trig event in ZooKeeper. This depends on timing, so "" + -- ""ignoring this problem""); -- } -- } -- -- @Test -- public void testCoordinateListenerConnectionDiesReconnectAfterTimeout() throws Exception { -- final CountDownLatch connectedLatch1 = new CountDownLatch(1); -- final TestCoordinateListener listener = setUpListenerEnvironment(connectedLatch1); -- assertTrue(connectedLatch1.await(20, TimeUnit.SECONDS)); -- assertEquals(CoordinateListener.Event.COORDINATE_OK, -- listener.events.get(listener.events.size() -1 )); -- -- listener.expectEvent(CoordinateListener.Event.NO_CONNECTION_TO_STORAGE); -- -- forwarder.close(); -- forwarder = null; -- listener.waitForExpected(); -- // We do not want NOT OWNER event from ZooKeeper. Therefore this long time out. -- LOG.info(""Going into sleep, waiting for zookeeper to loose node""); -- Thread.sleep(10000); -- -- listener.expectEvent(CoordinateListener.Event.COORDINATE_OK); -- forwarder = new PortForwarder(forwarderPort, ""127.0.0.1"", zkport); -- -- // We need to re-instantiate the forwarder, or zookeeper thinks -- // the connection is good and will not kill the ephemeral node. -- // This is probably because we keep the server socket against zookeeper open -- // in pause mode. -- -- listener.waitForExpected(); -- } -- -- -- /** -- * Tests the behavior of Zookeeper upon a restart. ZK should clean up old coordinates. -- * @throws Exception -- */ -- @Test -- public void testZookeeperRestarts() throws Exception { -- final CountDownLatch connectedLatch1 = new CountDownLatch(1); -- final TestCoordinateListener listener = setUpListenerEnvironment(connectedLatch1); -- assertTrue(connectedLatch1.await(20, TimeUnit.SECONDS)); -- -- -- listener.expectEvent(CoordinateListener.Event.NO_CONNECTION_TO_STORAGE); -- forwarder.pause(); -- listener.waitForExpected(); -- -- ezk.shutdown(); -- ezk.del(); -- ezk.init(); -- -- listener.expectEvent(CoordinateListener.Event.NOT_OWNER); -- -- forwarder.unpause(); -- listener.waitForExpected(); -- -- createCoordinateWithRetries(); -- -- listener.expectEvent(CoordinateListener.Event.COORDINATE_OK); -- listener.waitForExpected(); -- } -- -- private void createCoordinateWithRetries() throws CoordinateExistsException, -- InterruptedException, CloudnameException { -- Coordinate c = Coordinate.parse(""1.service.user.cell""); -- int retries = 10; -- for (;;) { -- try { -- cn.createCoordinate(c); -- break; -- } catch (CloudnameException e) { -- /* -- * CloudnameException indicates that the connection with -- * ZooKeeper isn't back up yet. Retry a few times. -- */ -- if (retries-- > 0) { -- LOG.info(""Failed to create coordinate: "" + e -- + "", retrying in 1 second""); -- Thread.sleep(1000); -- } else { -- throw e; -- } -- } -- } -- } -- -- /** -- * Tests that one process claims a coordinate, then another process tries to claim the same coordinate. -- * The first coordinate looses connection to ZooKeeper and the other process gets the coordinate. -- * @throws Exception -- */ -- @Test -- public void testFastHardRestart() throws Exception { -- final Coordinate c = Coordinate.parse(""1.service.user.cell""); -- final CountDownLatch claimLatch1 = new CountDownLatch(1); -- forwarderPort = Net.getFreePort(); -- forwarder = new PortForwarder(forwarderPort, ""127.0.0.1"", zkport); -- final Cloudname cn1 = new ZkCloudname.Builder().setConnectString( -- ""localhost:"" + forwarderPort).build().connect(); -- cn1.createCoordinate(c); -- -- ServiceHandle handle1 = cn1.claim(c); -- handle1.registerCoordinateListener(new CoordinateListener() { -- -- @Override -- public void onCoordinateEvent(Event event, String message) { -- if (event == Event.COORDINATE_OK) { -- claimLatch1.countDown(); -- } -- } -- }); -- assertTrue(claimLatch1.await(5, TimeUnit.SECONDS)); -- -- final Cloudname cn2 = new ZkCloudname.Builder().setConnectString( -- ""localhost:"" + zkport).build().connect(); -- -- ServiceHandle handle2 = cn2.claim(c); -- -- forwarder.close(); -- forwarder = null; -- -- assertTrue(handle2.waitForCoordinateOkSeconds(20)); -- -- ServiceStatus status = new ServiceStatus(ServiceState.RUNNING, ""updated status""); -- handle2.setStatus(status); -- -- final Cloudname cn3 = new ZkCloudname.Builder().setConnectString(""localhost:"" + zkport) -- .build().connect(); -- ServiceStatus statusRetrieved = cn3.getStatus(c); -- assertEquals(""updated status"", statusRetrieved.getMessage()); -- -- cn1.close(); -- cn2.close(); -- cn3.close(); -- } -- -- /** -- * Makes a local ZkCloudname instance with the port given by zkPort. -- * Then it connects to ZK. -- */ -- private ZkCloudname makeLocalZkCloudname(int port) throws CloudnameException { -- return new ZkCloudname.Builder().setConnectString(""localhost:"" + port).build().connect(); -- } --} -diff --git a/cn/src/integrationtest/java/org/cloudname/zk/ZkResolverIntegrationTest.java b/cn/src/integrationtest/java/org/cloudname/zk/ZkResolverIntegrationTest.java -deleted file mode 100644 -index b0a01517..00000000 ---- a/cn/src/integrationtest/java/org/cloudname/zk/ZkResolverIntegrationTest.java -+++ /dev/null -@@ -1,420 +0,0 @@ --package org.cloudname.zk; -- --import org.apache.zookeeper.*; --import org.cloudname.*; --import org.cloudname.testtools.Net; --import org.cloudname.testtools.zookeeper.EmbeddedZooKeeper; --import org.junit.After; --import org.junit.Before; --import org.junit.Rule; --import org.junit.Test; --import org.junit.rules.TemporaryFolder; -- --import java.io.File; --import java.util.ArrayList; --import java.util.List; --import java.util.Set; --import java.util.concurrent.CountDownLatch; --import java.util.concurrent.TimeUnit; -- --import static org.junit.Assert.*; --import static org.junit.Assert.assertEquals; -- --/** -- * Integration tests for the ZkResolver class. -- * This test class contains tests dependent on timing or -- * tests depending on other modules, or both. -- */ --public class ZkResolverIntegrationTest { -- private ZooKeeper zk; -- private Cloudname cn; -- private Coordinate coordinateRunning; -- private Coordinate coordinateDraining; -- @Rule -- public TemporaryFolder temp = new TemporaryFolder(); -- private ServiceHandle handleDraining; -- -- -- /** -- * Set up an embedded ZooKeeper instance backed by a temporary -- * directory. The setup procedure also allocates a port that is -- * free for the ZooKeeper server so that you should be able to run -- * multiple instances of this test. -- */ -- @Before -- public void setup() throws Exception { -- -- // Speed up tests waiting for this event to happen. -- DynamicExpression.TIME_BETWEEN_NODE_SCANNING_MS = 200; -- -- File rootDir = temp.newFolder(""zk-test""); -- final int zkport = Net.getFreePort(); -- -- // Set up and initialize the embedded ZooKeeper -- final EmbeddedZooKeeper ezk = new EmbeddedZooKeeper(rootDir, zkport); -- ezk.init(); -- -- // Set up a zookeeper client that we can use for inspection -- final CountDownLatch connectedLatch = new CountDownLatch(1); -- zk = new ZooKeeper(""localhost:"" + zkport, 1000, new Watcher() { -- public void process(WatchedEvent event) { -- if (event.getState() == Event.KeeperState.SyncConnected) { -- connectedLatch.countDown(); -- } -- } -- }); -- connectedLatch.await(); -- coordinateRunning = Coordinate.parse(""1.service.user.cell""); -- cn = new ZkCloudname.Builder().setConnectString(""localhost:"" + zkport).build().connect(); -- cn.createCoordinate(coordinateRunning); -- ServiceHandle handleRunning = cn.claim(coordinateRunning); -- assertTrue(handleRunning.waitForCoordinateOkSeconds(30)); -- -- handleRunning.putEndpoint(new Endpoint( -- coordinateRunning, ""foo"", ""localhost"", 1234, ""http"", ""data"")); -- handleRunning.putEndpoint(new Endpoint( -- coordinateRunning, ""bar"", ""localhost"", 1235, ""http"", null)); -- ServiceStatus statusRunning = new ServiceStatus(ServiceState.RUNNING, ""Running message""); -- handleRunning.setStatus(statusRunning); -- -- coordinateDraining = Coordinate.parse(""0.service.user.cell""); -- cn.createCoordinate(coordinateDraining); -- handleDraining = cn.claim(coordinateDraining); -- assertTrue(handleDraining.waitForCoordinateOkSeconds(10)); -- handleDraining.putEndpoint(new Endpoint( -- coordinateDraining, ""foo"", ""localhost"", 5555, ""http"", ""data"")); -- handleDraining.putEndpoint(new Endpoint( -- coordinateDraining, ""bar"", ""localhost"", 5556, ""http"", null)); -- -- ServiceStatus statusDraining = new ServiceStatus(ServiceState.DRAINING, ""Draining message""); -- handleDraining.setStatus(statusDraining); -- } -- -- @After -- public void tearDown() throws Exception { -- zk.close(); -- } -- -- -- public void undrain() throws CoordinateMissingException, CloudnameException { -- ServiceStatus statusDraining = new ServiceStatus(ServiceState.RUNNING, ""alive""); -- handleDraining.setStatus(statusDraining); -- } -- -- public void drain() throws CoordinateMissingException, CloudnameException { -- ServiceStatus statusDraining = new ServiceStatus(ServiceState.DRAINING, ""dead""); -- handleDraining.setStatus(statusDraining); -- } -- -- public void changeEndpointData() throws CoordinateMissingException, CloudnameException { -- handleDraining.putEndpoint(new Endpoint( -- coordinateDraining, ""foo"", ""localhost"", 5555, ""http"", ""dataChanged"")); -- } -- -- public void changeEndpointPort() throws CoordinateMissingException, CloudnameException { -- handleDraining.putEndpoint(new Endpoint( -- coordinateDraining, ""foo"", ""localhost"", 5551, ""http"", ""dataChanged"")); -- } -- -- @Test -- public void testStatus() throws Exception { -- ServiceStatus status = cn.getStatus(coordinateRunning); -- assertEquals(ServiceState.RUNNING, status.getState()); -- assertEquals(""Running message"", status.getMessage()); -- } -- -- @Test -- public void testBasicSyncResolving() throws Exception { -- List endpoints = cn.getResolver().resolve(""foo.1.service.user.cell""); -- assertEquals(1, endpoints.size()); -- assertEquals(""foo"", endpoints.get(0).getName()); -- assertEquals(""localhost"", endpoints.get(0).getHost()); -- assertEquals(""1.service.user.cell"", endpoints.get(0).getCoordinate().toString()); -- assertEquals(""data"", endpoints.get(0).getEndpointData()); -- assertEquals(""http"", endpoints.get(0).getProtocol()); -- } -- -- -- @Test -- public void testAnyResolving() throws Exception { -- List endpoints = cn.getResolver().resolve(""foo.any.service.user.cell""); -- assertEquals(1, endpoints.size()); -- assertEquals(""foo"", endpoints.get(0).getName()); -- assertEquals(""localhost"", endpoints.get(0).getHost()); -- assertEquals(""1.service.user.cell"", endpoints.get(0).getCoordinate().toString()); -- } -- -- @Test -- public void testAllResolving() throws Exception { -- List endpoints = cn.getResolver().resolve(""all.service.user.cell""); -- assertEquals(2, endpoints.size()); -- assertEquals(""foo"", endpoints.get(0).getName()); -- assertEquals(""bar"", endpoints.get(1).getName()); -- } -- -- /** -- * Tests that all registered endpoints are returned. -- */ -- @Test -- public void testGetCoordinateDataAll() throws Exception { -- Resolver.CoordinateDataFilter filter = new Resolver.CoordinateDataFilter(); -- Set endpoints = cn.getResolver().getEndpoints(filter); -- assertEquals(4, endpoints.size()); -- } -- -- /** -- * Tests that all methods of the filters are called and some basic filtering are functional. -- */ -- @Test -- public void testGetCoordinateDataFilterOptions() throws Exception { -- final StringBuilder filterCalls = new StringBuilder(); -- -- Resolver.CoordinateDataFilter filter = new Resolver.CoordinateDataFilter() { -- @Override -- public boolean includeCell(final String datacenter) { -- filterCalls.append(datacenter).append("":""); -- return true; -- } -- @Override -- public boolean includeUser(final String user) { -- filterCalls.append(user).append("":""); -- return true; -- } -- @Override -- public boolean includeService(final String service) { -- filterCalls.append(service).append("":""); -- return true; -- } -- @Override -- public boolean includeEndpointname(final String endpointName) { -- return endpointName.equals(""foo""); -- } -- @Override -- public boolean includeServiceState(final ServiceState state) { -- return state == ServiceState.RUNNING; -- } -- }; -- Set endpoints = cn.getResolver().getEndpoints(filter); -- assertEquals(1, endpoints.size()); -- Endpoint selectedEndpoint = endpoints.iterator().next(); -- -- assertEquals(""foo"", selectedEndpoint.getName()); -- assertEquals(""cell:user:service:"", filterCalls.toString()); -- } -- -- -- /** -- * Test an unclaimed coordinate and a path that is not complete. -- * Number of endpoints should not increase when inputting bad data. -- * @throws Exception -- */ -- @Test -- public void testGetCoordinateDataAllNoClaimedCoordinate() throws Exception { -- // Create unclaimned coordinate. -- Coordinate coordinateNoStatus = Coordinate.parse(""4.service.user.cell""); -- cn.createCoordinate(coordinateNoStatus); -- -- // Throw in a incomplete path. -- zk.create(""/cn/foo"", new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); -- -- Resolver resolver = cn.getResolver(); -- -- Resolver.CoordinateDataFilter filter = new Resolver.CoordinateDataFilter(); -- Set endpoints = resolver.getEndpoints(filter); -- assertEquals(4, endpoints.size()); -- } -- -- @Test -- public void testBasicAsyncResolving() throws Exception { -- Resolver resolver = cn.getResolver(); -- -- final List endpointListNew = new ArrayList(); -- final List endpointListRemoved = new ArrayList(); -- final List endpointListModified = new ArrayList(); -- -- // This class is needed since the abstract resolver listener class can only access final variables. -- class LatchWrapper { -- public CountDownLatch latch; -- } -- final LatchWrapper latchWrapper = new LatchWrapper(); -- -- latchWrapper.latch = new CountDownLatch(1); -- -- resolver.addResolverListener( -- ""foo.all.service.user.cell"", new Resolver.ResolverListener() { -- -- @Override -- public void endpointEvent(Event event, Endpoint endpoint) { -- switch (event) { -- case NEW_ENDPOINT: -- endpointListNew.add(endpoint); -- latchWrapper.latch.countDown(); -- break; -- case REMOVED_ENDPOINT: -- endpointListRemoved.add(endpoint); -- latchWrapper.latch.countDown(); -- break; -- case MODIFIED_ENDPOINT_DATA: -- endpointListModified.add(endpoint); -- latchWrapper.latch.countDown(); -- break; -- } -- } -- }); -- assertTrue(latchWrapper.latch.await(24000, TimeUnit.MILLISECONDS)); -- assertEquals(1, endpointListNew.size()); -- assertEquals(""foo"", endpointListNew.get(0).getName()); -- assertEquals(""1.service.user.cell"", endpointListNew.get(0).getCoordinate().toString()); -- endpointListNew.clear(); -- latchWrapper.latch = new CountDownLatch(1); -- -- undrain(); -- -- -- assertTrue(latchWrapper.latch.await(125000, TimeUnit.MILLISECONDS)); -- -- assertEquals(1, endpointListNew.size()); -- -- assertEquals(""foo"", endpointListNew.get(0).getName()); -- assertEquals(""0.service.user.cell"", endpointListNew.get(0).getCoordinate().toString()); -- -- latchWrapper.latch = new CountDownLatch(1); -- endpointListNew.clear(); -- -- changeEndpointData(); -- -- assertTrue(latchWrapper.latch.await(26000, TimeUnit.MILLISECONDS)); -- -- assertEquals(1, endpointListModified.size()); -- -- assertEquals(""0.service.user.cell"", endpointListModified.get(0).getCoordinate().toString()); -- assertEquals(""foo"", endpointListModified.get(0).getName()); -- assertEquals(""dataChanged"", endpointListModified.get(0).getEndpointData()); -- -- endpointListModified.clear(); -- -- latchWrapper.latch = new CountDownLatch(2); -- -- changeEndpointPort(); -- -- assertTrue(latchWrapper.latch.await(27000, TimeUnit.MILLISECONDS)); -- -- assertEquals(1, endpointListNew.size()); -- assertEquals(1, endpointListRemoved.size()); -- -- endpointListNew.clear(); -- endpointListRemoved.clear(); -- -- -- -- latchWrapper.latch = new CountDownLatch(1); -- -- drain(); -- -- assertTrue(latchWrapper.latch.await(27000, TimeUnit.MILLISECONDS)); -- -- assertEquals(1, endpointListRemoved.size()); -- -- assertEquals(""0.service.user.cell"", endpointListRemoved.get(0).getCoordinate().toString()); -- assertEquals(""foo"", endpointListRemoved.get(0).getName()); -- } -- -- @Test -- public void testBasicAsyncResolvingAnyStrategy() throws Exception { -- Resolver resolver = cn.getResolver(); -- -- final List endpointListNew = new ArrayList(); -- -- // This class is needed since the abstract resolver listener class can only access -- // final variables. -- class LatchWrapper { -- public CountDownLatch latch; -- } -- final LatchWrapper latchWrapper = new LatchWrapper(); -- -- latchWrapper.latch = new CountDownLatch(1); -- -- resolver.addResolverListener( -- ""foo.any.service.user.cell"", new Resolver.ResolverListener() { -- -- @Override -- public void endpointEvent(Event event, Endpoint endpoint) { -- switch (event) { -- case NEW_ENDPOINT: -- endpointListNew.add(endpoint); -- latchWrapper.latch.countDown(); -- break; -- case REMOVED_ENDPOINT: -- latchWrapper.latch.countDown(); -- break; -- } -- } -- }); -- assertTrue(latchWrapper.latch.await(5000, TimeUnit.MILLISECONDS)); -- assertEquals(1, endpointListNew.size()); -- assertEquals(""foo"", endpointListNew.get(0).getName()); -- assertEquals(""1.service.user.cell"", endpointListNew.get(0).getCoordinate().toString()); -- endpointListNew.clear(); -- -- latchWrapper.latch = new CountDownLatch(1); -- -- undrain(); -- -- assertFalse(latchWrapper.latch.await(3000, TimeUnit.MILLISECONDS)); -- } -- -- @Test -- public void testStopAsyncResolving() throws Exception { -- Resolver resolver = cn.getResolver(); -- -- final List endpointListNew = new ArrayList(); -- -- // This class is needed since the abstract resolver listener class can only access -- // final variables. -- class LatchWrapper { -- public CountDownLatch latch; -- } -- final LatchWrapper latchWrapper = new LatchWrapper(); -- -- latchWrapper.latch = new CountDownLatch(1); -- -- -- Resolver.ResolverListener resolverListener = new Resolver.ResolverListener() { -- @Override -- public void endpointEvent(Event event, Endpoint endpoint) { -- switch (event) { -- -- case NEW_ENDPOINT: -- endpointListNew.add(endpoint); -- latchWrapper.latch.countDown(); -- break; -- case REMOVED_ENDPOINT: -- latchWrapper.latch.countDown(); -- break; -- } -- } -- }; -- resolver.addResolverListener(""foo.all.service.user.cell"", resolverListener); -- assertTrue(latchWrapper.latch.await(5000, TimeUnit.MILLISECONDS)); -- assertEquals(1, endpointListNew.size()); -- assertEquals(""foo"", endpointListNew.get(0).getName()); -- assertEquals(""1.service.user.cell"", endpointListNew.get(0).getCoordinate().toString()); -- endpointListNew.clear(); -- -- latchWrapper.latch = new CountDownLatch(1); -- -- resolver.removeResolverListener(resolverListener); -- -- undrain(); -- -- assertFalse(latchWrapper.latch.await(100, TimeUnit.MILLISECONDS)); -- -- try { -- resolver.removeResolverListener(resolverListener); -- } catch (IllegalArgumentException e) { -- // This is expected. -- return; -- } -- fail(""Did not throw an exception on deleting a non existing listener.""); -- } --} -diff --git a/cn/src/main/java/org/cloudname/Cloudname.java b/cn/src/main/java/org/cloudname/Cloudname.java -deleted file mode 100644 -index d1fbb729..00000000 ---- a/cn/src/main/java/org/cloudname/Cloudname.java -+++ /dev/null -@@ -1,84 +0,0 @@ --package org.cloudname; -- --/** -- * The main interface for interacting with Cloudname. -- * -- * @author borud -- * @author dybdahl -- */ --public interface Cloudname { -- /** -- * Claim a coordinate returning a {@link ServiceHandle} through -- * which the service can interact with the system. This is an asynchronous operation, to check result -- * use the returned Servicehandle. E.g. for waiting up to ten seconds for a claim to happen: -- * -- * Cloudname cn = ... -- * Coordinate coordinate = ... -- * ServiceHandle serviceHandle = cn.claim(coordinate); -- * boolean claimSuccess = serviceHandle.waitForCoordinateOkSeconds(10); -- * -- * @param coordinate of the service we wish to claim. -- * @return a ServiceHandle that can wait for the claim to be successful and listen to the state of the claim. -- */ -- ServiceHandle claim(Coordinate coordinate); -- -- /** -- * Get a resolver instance. -- */ -- Resolver getResolver(); -- -- /** -- * Create a coordinate in the persistent service store. Must -- * throw an exception if the coordinate has already been defined. -- * -- * -- * @param coordinate the coordinate we wish to create -- * @throws CoordinateExistsException if coordinate already exists. -- * @throws CloudnameException if problems with talking with storage. -- */ -- void createCoordinate(Coordinate coordinate) -- throws CloudnameException, CoordinateExistsException; -- -- /** -- * Deletes a coordinate in the persistent service store. It will throw an exception if the coordinate is claimed. -- * @param coordinate the coordinate we wish to destroy. -- * @throws CoordinateMissingException if coordinate does not exist. -- * @throws CloudnameException if problems talking with storage. -- * @throws CoordinateDeletionException if problems occurred during deletion. -- */ -- void destroyCoordinate(Coordinate coordinate) -- throws CoordinateDeletionException, CoordinateMissingException, CloudnameException; -- -- /** -- * Get the ServiceStatus for a given Coordinate. -- * -- * @param coordinate the coordinate we want to get the status of -- * @return a ServiceStatus instance. -- * @throws CloudnameException if problems with talking with storage. -- */ -- ServiceStatus getStatus(Coordinate coordinate) -- throws CloudnameException; -- -- /** -- * Updates the config for a coordinate. If the oldConfig is set (not null) it will require that the old config -- * matches otherwise it will throw an exception -- * @throws CoordinateMissingException if coordinate does not exist. -- * @throws CloudnameException if problems including oldConfig does not match old config. -- */ -- void setConfig(final Coordinate coordinate, final String newConfig, final String oldConfig) -- throws CoordinateMissingException, CloudnameException; -- -- /** -- * Get config for a coordinate. -- * @return the new config. -- * @throws CoordinateMissingException if coordinate does not exist. -- * @throws CloudnameException -- */ -- String getConfig(final Coordinate coordinate) -- throws CoordinateMissingException, CloudnameException; -- -- /** -- * Close down connection to storage. -- */ -- void close(); --} -diff --git a/cn/src/main/java/org/cloudname/CloudnameException.java b/cn/src/main/java/org/cloudname/CloudnameException.java -deleted file mode 100644 -index 66da94f7..00000000 ---- a/cn/src/main/java/org/cloudname/CloudnameException.java -+++ /dev/null -@@ -1,21 +0,0 @@ --package org.cloudname; -- --/** -- * Exceptions for Cloudname caused by problems talking to storage. -- * -- * @author borud -- */ --public class CloudnameException extends Exception { -- -- public CloudnameException(Throwable t) { -- super(t); -- } -- -- public CloudnameException(String message) { -- super(message); -- } -- -- public CloudnameException(String message, Throwable t) { -- super(message, t); -- } --} -diff --git a/cn/src/main/java/org/cloudname/CloudnameTestBootstrapper.java b/cn/src/main/java/org/cloudname/CloudnameTestBootstrapper.java -deleted file mode 100644 -index 5b29aae6..00000000 ---- a/cn/src/main/java/org/cloudname/CloudnameTestBootstrapper.java -+++ /dev/null -@@ -1,63 +0,0 @@ --package org.cloudname; -- --import org.apache.zookeeper.WatchedEvent; --import org.apache.zookeeper.Watcher; -- --import java.io.File; --import java.util.concurrent.CountDownLatch; --import java.util.logging.Logger; -- --import org.apache.zookeeper.ZooKeeper; --import org.cloudname.testtools.Net; --import org.cloudname.testtools.zookeeper.EmbeddedZooKeeper; --import org.cloudname.zk.ZkCloudname; -- -- --/** -- * Helper class for bootstrapping cloudname for unit-tests. It also exposes the ZooKeeper instance. -- * @author @author dybdahl -- */ --public class CloudnameTestBootstrapper { -- -- private static final Logger LOGGER = Logger.getLogger(CloudnameTestBootstrapper.class.getName()); -- private EmbeddedZooKeeper embeddedZooKeeper; -- private ZooKeeper zooKeeper; -- private Cloudname cloudname; -- private File rootDir; -- -- public CloudnameTestBootstrapper(File rootDir) { -- this.rootDir = rootDir; -- } -- -- public void init() throws Exception, CloudnameException { -- int zookeeperPort = Net.getFreePort(); -- -- LOGGER.info(""EmbeddedZooKeeper rootDir="" + rootDir.getCanonicalPath() -- + "", port="" + zookeeperPort -- ); -- -- // Set up and initialize the embedded ZooKeeper -- embeddedZooKeeper = new EmbeddedZooKeeper(rootDir, zookeeperPort); -- embeddedZooKeeper.init(); -- -- // Set up a zookeeper client that we can use for inspection -- final CountDownLatch connectedLatch = new CountDownLatch(1); -- zooKeeper = new ZooKeeper(""localhost:"" + zookeeperPort, 1000, new Watcher() { -- public void process(WatchedEvent event) { -- if (event.getState() == Watcher.Event.KeeperState.SyncConnected) { -- connectedLatch.countDown(); -- } -- } -- }); -- connectedLatch.await(); -- cloudname = new ZkCloudname.Builder().setConnectString(""localhost:"" + zookeeperPort).build().connect(); -- } -- -- public ZooKeeper getZooKeeper() { -- return zooKeeper; -- } -- -- public Cloudname getCloudname() { -- return cloudname; -- } --} -diff --git a/cn/src/main/java/org/cloudname/ConfigListener.java b/cn/src/main/java/org/cloudname/ConfigListener.java -deleted file mode 100644 -index 04f3c5d8..00000000 ---- a/cn/src/main/java/org/cloudname/ConfigListener.java -+++ /dev/null -@@ -1,25 +0,0 @@ --package org.cloudname; -- --/** -- * This interface defines the callback interface used to notify of -- * config node changes. -- * -- * @author borud -- */ -- --public interface ConfigListener { -- public enum Event { -- CREATE, -- UPDATED, -- DELETED, -- } -- -- /** -- * This method is called whenever the application needs to be -- * notified of events related to configuration. -- * -- * @param event the type of event observed on the config node. -- * @param data the contents of the config node -- */ -- public void onConfigEvent(Event event, String data); --} -\ No newline at end of file -diff --git a/cn/src/main/java/org/cloudname/Coordinate.java b/cn/src/main/java/org/cloudname/Coordinate.java -deleted file mode 100644 -index e35d8b8c..00000000 ---- a/cn/src/main/java/org/cloudname/Coordinate.java -+++ /dev/null -@@ -1,184 +0,0 @@ --package org.cloudname; -- --import java.util.regex.Pattern; --import java.util.regex.Matcher; -- --import com.fasterxml.jackson.databind.ObjectMapper; --import com.fasterxml.jackson.annotation.JsonCreator; --import com.fasterxml.jackson.annotation.JsonProperty; -- --import java.io.IOException; -- --/** -- * This class represents a service coordinate. A coordinate is given -- * by four pieces of data. -- * -- *
-- *
Cell -- *
A cell is roughly equivalent to ""data center"". The strict definition -- * is that a cell represents a ZooKeeper installation. You can have -- * multiple cells in a physical datacenter, but it is not advisable to -- * have ZooKeeper installations span physical data centers. -- * -- *
User -- *
The user owning the service. May or may not have any relation to -- * the operating system user. -- * -- *
Service -- *
The name of the service. -- * -- *
Instance -- * An integer [0, Integer.MAX_VALUE) indicating the instance number -- * of the service. -- * -- * The canonical form of a coordinate is {@code 0.service.user.dc}. -- * -- * This class is immutable. -- * -- * @author borud -- */ --public class Coordinate { -- private final String cell; -- private final String user; -- private final String service; -- private final int instance; -- -- // TODO(borud): allow for numbers in service, user and cell. Just -- // not the first character. -- public static final Pattern coordinatePattern -- = Pattern.compile(""^(\\d+)\\."" // instance -- + ""([a-z][a-z0-9-_]*)\\."" // service -- + ""([a-z][a-z0-9-_]*)\\."" // user -- + ""([a-z][a-z0-9-_]*)\\z""); // cell -- -- /** -- * Create a new coordinate instance. -- * -- * @param instance the instance number -- * @param service the service name -- * @param user the user name -- * @param cell the cell name -- * @throws IllegalArgumentException if the coordinate is invalid. -- */ -- @JsonCreator -- public Coordinate (@JsonProperty(""instance"") int instance, -- @JsonProperty(""service"") String service, -- @JsonProperty(""user"") String user, -- @JsonProperty(""cell"") String cell) { -- // Enables validation of coordinate. -- this(instance, service, user, cell, true); -- } -- -- /** -- * Internal version of constructor. Makes validation optional. -- */ -- public Coordinate (int instance, String service, String user, String cell, boolean validate) { -- this.instance = instance; -- this.service = service; -- this.user = user; -- this.cell = cell; -- -- if (instance < 0) { -- throw new IllegalArgumentException(""Invalid instance number: "" + instance); -- } -- -- // If the coordinate was created by the parse() method the -- // coordinate has already been parsed using the -- // coordinatePattern so no validation is required. If the -- // coordinate was defined using the regular constructor we -- // need to validate the parts. And we do this by re-using the -- // coordinatePattern. -- if (validate) { -- if (! coordinatePattern.matcher(asString()).matches()) { -- throw new IllegalArgumentException(""Invalid coordinate: '"" + asString() + ""'""); -- } -- } -- } -- -- /** -- * Parse coordinate and create a {@code Coordinate} instance from -- * a {@code String}. -- * -- * @param s Coordinate we wish to parse as a string. -- * @return a Coordinate instance equivalent to {@code s} -- * @throws IllegalArgumentException if the coordinate string {@s} -- * is not a valid coordinate. -- */ -- public static Coordinate parse(String s) { -- Matcher m = coordinatePattern.matcher(s); -- if (! m.matches()) { -- throw new IllegalArgumentException(""Malformed coordinate: "" + s); -- } -- -- int instance = Integer.parseInt(m.group(1)); -- String service = m.group(2); -- String user = m.group(3); -- String cell = m.group(4); -- -- return new Coordinate(instance, service, user, cell, false); -- } -- -- public String getCell() { -- return cell; -- } -- -- public String getUser() { -- return user; -- } -- -- public String getService() { -- return service; -- } -- -- public int getInstance() { -- return instance; -- } -- -- public String asString() { -- return instance + ""."" + service + ""."" + user + ""."" + cell; -- } -- -- @Override -- public String toString() { -- return asString(); -- } -- -- @Override -- public boolean equals(Object o) { -- if (null == o) { -- return false; -- } -- -- if (this == o) { -- return true; -- } -- -- if (getClass() != o.getClass()) { -- return false; -- } -- -- Coordinate c = (Coordinate) o; -- return ((instance == c.instance) -- && service.equals(c.service) -- && user.equals(c.user) -- && cell.equals(c.cell)); -- } -- -- @Override -- public int hashCode() { -- return asString().hashCode(); -- } -- -- public String toJson() { -- try { -- return new ObjectMapper().writeValueAsString(this); -- } catch (IOException e) { -- return null; -- } -- } -- -- public static Coordinate fromJson(String json) throws IOException { -- return new ObjectMapper().readValue(json, Coordinate.class); -- } -- --} -diff --git a/cn/src/main/java/org/cloudname/CoordinateDeletionException.java b/cn/src/main/java/org/cloudname/CoordinateDeletionException.java -deleted file mode 100644 -index c1ac89c7..00000000 ---- a/cn/src/main/java/org/cloudname/CoordinateDeletionException.java -+++ /dev/null -@@ -1,11 +0,0 @@ --package org.cloudname; -- --/** -- * Thrown when there are problems deleting a coordinate. -- * @auther dybdahl -- */ --public class CoordinateDeletionException extends CoordinateException { -- public CoordinateDeletionException(String reason) { -- super(reason); -- } --} -diff --git a/cn/src/main/java/org/cloudname/CoordinateException.java b/cn/src/main/java/org/cloudname/CoordinateException.java -deleted file mode 100644 -index fadb25b7..00000000 ---- a/cn/src/main/java/org/cloudname/CoordinateException.java -+++ /dev/null -@@ -1,12 +0,0 @@ --package org.cloudname; -- --/** -- * Base class for exception related to a specific coordinate. -- * @auther dybdahl -- */ --public abstract class CoordinateException extends Exception { -- -- public CoordinateException(String reason) { -- super(reason); -- } --} -diff --git a/cn/src/main/java/org/cloudname/CoordinateExistsException.java b/cn/src/main/java/org/cloudname/CoordinateExistsException.java -deleted file mode 100644 -index 7ba067a0..00000000 ---- a/cn/src/main/java/org/cloudname/CoordinateExistsException.java -+++ /dev/null -@@ -1,11 +0,0 @@ --package org.cloudname; -- --/** -- * It was assumed that the coordinate did not exist, but it did. -- * @auther dybdahl -- */ --public class CoordinateExistsException extends CoordinateException { -- public CoordinateExistsException(String reason) { -- super(reason); -- } --} -diff --git a/cn/src/main/java/org/cloudname/CoordinateListener.java b/cn/src/main/java/org/cloudname/CoordinateListener.java -deleted file mode 100644 -index 9a057ae7..00000000 ---- a/cn/src/main/java/org/cloudname/CoordinateListener.java -+++ /dev/null -@@ -1,48 +0,0 @@ --package org.cloudname; -- --/** -- * Interface for listening to status on a claimed coordinate. -- * @author dybdahl -- */ --public interface CoordinateListener { -- /** -- * Events that can be triggered when monitoring a coordinate. -- */ -- public enum Event { -- -- /** -- * Everything is fine. -- */ -- COORDINATE_OK, -- -- /** -- * Connection lost to storage, no more events will occur. -- */ -- NO_CONNECTION_TO_STORAGE, -- -- /** -- * Problems with parsing the data in storage for this coordinate. -- */ -- COORDINATE_CORRUPTED, -- -- /** -- * The data in the storage and memory is out of sync. -- */ -- COORDINATE_OUT_OF_SYNC, -- -- /** -- * No longer the owner of the coordinate. -- */ -- NOT_OWNER, -- } -- -- -- -- /** -- * Implement this function to receive the events. -- * Return false if no more events are wanted, will stop eventually. -- * @param event the event that happened. -- * @param message some message associated with the event. -- */ -- public void onCoordinateEvent(Event event, String message); --} -diff --git a/cn/src/main/java/org/cloudname/CoordinateMissingException.java b/cn/src/main/java/org/cloudname/CoordinateMissingException.java -deleted file mode 100644 -index 6fcdb56a..00000000 ---- a/cn/src/main/java/org/cloudname/CoordinateMissingException.java -+++ /dev/null -@@ -1,11 +0,0 @@ --package org.cloudname; -- --/** -- * Exception related to a coordinate that is missing. -- * @auther dybdahl -- */ --public class CoordinateMissingException extends CoordinateException { -- public CoordinateMissingException(String reason) { -- super(reason); -- } --} -diff --git a/cn/src/main/java/org/cloudname/Endpoint.java b/cn/src/main/java/org/cloudname/Endpoint.java -deleted file mode 100644 -index b892cfaf..00000000 ---- a/cn/src/main/java/org/cloudname/Endpoint.java -+++ /dev/null -@@ -1,102 +0,0 @@ --package org.cloudname; -- --import com.fasterxml.jackson.databind.ObjectMapper; --import com.fasterxml.jackson.annotation.JsonCreator; --import com.fasterxml.jackson.annotation.JsonProperty; -- --import java.io.IOException; -- --/** -- * Representation of an endpoint. This class is used to describe a -- * wide range of endpoints, but it is, initially geared mainly towards -- * services for which we need to know a hostname, port and protocol. -- * As a stop-gap measure we provide an {@code endpointData} field -- * which can be used in a pinch to communicate extra information about -- * the endpoint. -- * -- * Instances of this class are immutable. -- * -- * TODO(borud): decide if coordinate and name should be part of this -- * class. -- * -- * @author borud -- */ --public class Endpoint { -- // This gets saved into ZooKeeper as well and is redundant info, -- // but it makes sense to have this information in the Endpoint -- // instances to make it possible for clients to get a list of -- // endpoints and be able to figure out what coordinates they come -- // from if they were gathered from multiple services. -- private final Coordinate coordinate; -- // Ditto for name. -- private final String name; -- private final String host; -- private final int port; -- private final String protocol; -- private final String endpointData; -- -- @JsonCreator -- public Endpoint(@JsonProperty(""coordinate"") Coordinate coordinate, -- @JsonProperty(""name"") String name, -- @JsonProperty(""host"") String host, -- @JsonProperty(""port"") int port, -- @JsonProperty(""protocol"") String protocol, -- @JsonProperty(""endpointData"") String endpointData) -- { -- this.coordinate = coordinate; -- this.name = name; -- this.host = host; -- this.port = port; -- this.protocol = protocol; -- this.endpointData = endpointData; -- } -- -- public Coordinate getCoordinate() { -- return coordinate; -- } -- -- public String getName() { -- return name; -- } -- -- public String getHost() { -- return host; -- } -- -- public int getPort() { -- return port; -- } -- -- public String getProtocol() { -- return protocol; -- } -- -- public String getEndpointData() { -- return endpointData; -- } -- -- @Override -- public boolean equals(Object endpoint) { -- return endpoint instanceof Endpoint && ((Endpoint) endpoint).toJson().equals(toJson()); -- } -- -- public int hashCode() { -- return toJson().hashCode(); -- } -- -- public static Endpoint fromJson(String json) throws IOException { -- return new ObjectMapper().readValue(json, Endpoint.class); -- } -- -- public String toJson() { -- try { -- return new ObjectMapper().writeValueAsString(this); -- } catch (IOException e) { -- return null; -- } -- } -- -- public String toString() { -- return toJson(); -- } --} -diff --git a/cn/src/main/java/org/cloudname/Resolver.java b/cn/src/main/java/org/cloudname/Resolver.java -deleted file mode 100644 -index e6c08728..00000000 ---- a/cn/src/main/java/org/cloudname/Resolver.java -+++ /dev/null -@@ -1,115 +0,0 @@ --package org.cloudname; -- --import java.util.List; --import java.util.Set; -- --/** -- * This interface defines how we resolve endpoints in Cloudname. The client has to keep a reference to this Resolver -- * object otherwise it will stop resolving. -- * -- * @author borud -- */ --public interface Resolver { -- -- /** -- * Resolve an expression to a list of endpoints. The order of the -- * endpoints may be subject to ranking criteria. -- * -- * @param expression The expression to resolve, e.g. for ZooKeeper implementation there are various formats like -- * endpoint.instance.service.user.cell (see ZkResolver for details). -- * @throws CloudnameException if problems talking with storage. -- */ -- List resolve(String expression) throws CloudnameException; -- -- -- /** -- * Implement this interface to get dynamic information about what endpoints that are available. -- * If you want to register more than 1000 listeners in the same resolver, you might consider overriding -- * equals() and hashCode(), but the default implementation should work in normal cases. -- */ -- interface ResolverListener { -- enum Event { -- /** -- * New endpoint was added. -- */ -- NEW_ENDPOINT, -- /** -- * Endpoint removed. This include when the coordinate goes to draining. -- */ -- REMOVED_ENDPOINT, -- /** -- * Endpoint data has been modified. -- */ -- MODIFIED_ENDPOINT_DATA, -- /** -- * Lost connection to storage. The list of endpoints will get stale. The system will reconnect -- * automatically. -- */ -- LOST_CONNECTION, -- /** -- * Connection to storage is good, list of endpoints will be updated. -- */ -- CONNECTION_OK -- } -- -- /** -- * An Event happened related to the expression, see enum Event above. -- * @param endpoint is only populated for the Event NEW_ENDPOINT and REMOVED_ENDPOINT. -- */ -- void endpointEvent(Event event, final Endpoint endpoint); -- } -- -- /** -- * Registers a ResolverListener to get dynamic information about an expression. The expression is set in the -- * ResolverListener. You will only get updates as long as you keep a reference to Resolver. -- * If you don't have a reference, it is up to the garbage collector to decide how long you will receive callbacks. -- * One listener can only be registered once. -- * -- * @param expression The expression to resolve, e.g. for ZooKeeper implementation there are various formats like -- * endpoint.instance.service.user.cell (see ZkResolver for details). This should be static data, i.e. -- * the function might be called only once. -- */ -- void addResolverListener(String expression, ResolverListener listener) throws CloudnameException; -- -- /** -- * Calling this function unregisters the listener, i.e. stopping future callbacks. -- * The listener must be registered. For identification of listener, see comment on ResolverListener. -- * The default is to use object id. -- */ -- void removeResolverListener(ResolverListener listener); -- -- /** -- * This class is used as a parameter to {@link #getEndpoints(CoordinateDataFilter)}. Override methods to filter -- * the endpoints to be -- * returned. -- */ -- class CoordinateDataFilter { -- /** -- * Override these methods to filter on cell, user, service, endpointName, and/or service state. -- */ -- -- public boolean includeCell(final String cell) { -- return true; -- } -- public boolean includeUser(final String user) { -- return true; -- } -- public boolean includeService(final String service) { -- return true; -- } -- public boolean includeEndpointname(final String endpointName) { -- return true; -- } -- public boolean includeServiceState(final ServiceState state) { -- return true; -- } -- } -- -- /** -- * This method reads out all the nodes from the storage. IT CAN BE VERY EXPENSIVE AND SHOULD BE USED ONLY -- * WHEN NO OTHER METHODS ARE FEASIBLE. Do not call it frequently! -- * @param filter class for filtering out endpoints -- * @return list of endpoints. -- */ -- Set getEndpoints(CoordinateDataFilter filter) throws CloudnameException, InterruptedException; --} -diff --git a/cn/src/main/java/org/cloudname/ResolverStrategy.java b/cn/src/main/java/org/cloudname/ResolverStrategy.java -deleted file mode 100644 -index b28ebfb2..00000000 ---- a/cn/src/main/java/org/cloudname/ResolverStrategy.java -+++ /dev/null -@@ -1,29 +0,0 @@ --package org.cloudname; -- --import java.util.List; -- --/** -- * The ResolverStrategy is an interface for implementing a strategy when resolving endpoints. -- * -- * @auther dybdahl -- */ -- --public interface ResolverStrategy { -- -- /** -- * Given a list of endpoints, return only those endpoints that are desired for this strategy. -- */ -- public List filter(List endpoints); -- -- /** -- * Returns the endpoints ordered according to strategy specific scheme. -- */ -- public List order(List endpoints); -- -- /** -- * Returns the name of this strategy. This is the same name that is used in the resolver -- * (e.g. ""all"", ""any"" etc). -- * @return name of strategy. -- */ -- public String getName(); --} -diff --git a/cn/src/main/java/org/cloudname/ServiceHandle.java b/cn/src/main/java/org/cloudname/ServiceHandle.java -deleted file mode 100644 -index e0b9d81e..00000000 ---- a/cn/src/main/java/org/cloudname/ServiceHandle.java -+++ /dev/null -@@ -1,105 +0,0 @@ --package org.cloudname; -- --import java.util.List; -- --/** -- * The service handle -- the interface through which services -- * communicate their state to the outside world and where services can -- * register listeners to handle configuration updates. -- * -- * @author borud -- */ --public interface ServiceHandle { -- -- /** -- * This is a convenient function for waiting for the connection to storage to be ok. It is the same as -- * registering a CoordinateListener and waiting for event coordinate ok. -- */ -- boolean waitForCoordinateOkSeconds(int seconds) throws InterruptedException; -- -- /** -- * Set the status of this service. -- * -- * @param status the new status. -- * @throws CoordinateMissingException if coordinate does not exist. -- * @throws CloudnameException if coordinate is not claimed, connection to storage is down, or problems -- * with ZooKeeper. -- */ -- void setStatus(ServiceStatus status) throws CoordinateMissingException, CloudnameException; -- -- /** -- * Publish a named endpoint. It is legal to push an endpoint with updated data. -- * -- * @param endpoint the endpoint data. -- * @throws CoordinateMissingException if coordinate does not exist. -- * @throws CloudnameException if coordinate is not claimed, connection to storage is down, or problems -- * with ZooKeeper. -- */ -- void putEndpoint(Endpoint endpoint) throws CoordinateMissingException, CloudnameException; -- -- /** -- * Same as putEndpoints, but takes a list. -- * -- * @param endpoints the endpoints data. -- * @throws CloudnameException if coordinate is not claimed, connection to storage is down, or problems -- * with ZooKeeper. -- * @throws CoordinateMissingException if coordinate does not exist. -- */ -- void putEndpoints(List endpoints) throws CoordinateMissingException, CloudnameException; -- -- /** -- * Remove a published endpoint. -- * -- * @param name the name of the endpoint we wish to remove. -- * @throws CoordinateMissingException if coordinate does not exist. -- * @throws CloudnameException if coordinate is not claimed, connection to storage is down, or problems -- * with ZooKeeper. -- */ -- void removeEndpoint(String name) throws CoordinateMissingException, CloudnameException; -- -- /** -- * Same as removeEndpoint() but takes a list of names. -- * -- * @throws CloudnameException if coordinate is not claimed, connection to storage is down, or problems -- * with ZooKeeper. -- * @throws CoordinateMissingException if coordinate does not exist. -- * @throws CoordinateMissingException if coordinate does not exist. -- */ -- void removeEndpoints(List names) throws CoordinateMissingException, CloudnameException; -- -- -- /** -- * Register a ConfigListener which will be called whenever there -- * is a configuration change. -- * -- * There may have been configuration pushed to the backing storage -- * already by the time a ConfigListener is registered. In that -- * case the ConfigListener will see these configuration items as -- * being created. -- */ -- // TODO(dybdahl): This logic lacks tests. Before used in any production code, tests have to be added. -- void registerConfigListener(ConfigListener listener); -- -- /** -- * After registering a new listener, a new event is triggered which include current state, even without change -- * of state. -- * Don't call the cloudname library, do any heavy lifting, or do any IO operation from this callback thread. -- * That might deadlock as there is no guarantee what kind of thread that runs the callback. -- * -- * @throws CloudnameException if problems talking with storage. -- */ -- void registerCoordinateListener(CoordinateListener listener) -- throws CloudnameException; -- -- /** -- * Close the service handle and free up the coordinate so it can -- * be claimed by others. After close() has been called all -- * operations on this instance of the service handle will result -- * in an exception being thrown. All endpoints are deleted. -- * @throws CloudnameException if problems removing the claim. -- */ -- void close() -- throws CloudnameException; -- --} -- -diff --git a/cn/src/main/java/org/cloudname/ServiceState.java b/cn/src/main/java/org/cloudname/ServiceState.java -deleted file mode 100644 -index 6af89561..00000000 ---- a/cn/src/main/java/org/cloudname/ServiceState.java -+++ /dev/null -@@ -1,29 +0,0 @@ --package org.cloudname; -- --/** -- * The defined states of a service. -- * -- * @author borud -- */ --public enum ServiceState { -- // This means that no service has claimed the coordinate, or in -- // more practical terms: there is no ephemeral node called -- // ""status"" in the service root path in ZooKeeper. -- UNASSIGNED, -- -- // A running process has claimed the coordinate and is in the -- // process of starting up. -- STARTING, -- -- // A running process has claimed the coordinate and is running -- // normally. -- RUNNING, -- -- // A running process has claimed the coordinate and is running, -- // but it is in the process of shutting down and will not accept -- // new work. -- DRAINING, -- -- // An error condition has occurred. -- ERROR --} -diff --git a/cn/src/main/java/org/cloudname/ServiceStatus.java b/cn/src/main/java/org/cloudname/ServiceStatus.java -deleted file mode 100644 -index eba04e20..00000000 ---- a/cn/src/main/java/org/cloudname/ServiceStatus.java -+++ /dev/null -@@ -1,51 +0,0 @@ --package org.cloudname; -- --import com.fasterxml.jackson.databind.ObjectMapper; --import com.fasterxml.jackson.annotation.JsonCreator; --import com.fasterxml.jackson.annotation.JsonProperty; -- --import java.io.IOException; -- --/** -- * A representation of the basic runtime status of a service. -- * -- * Instances of ServiceStatus are immutable. -- * -- * @author borud -- */ --public class ServiceStatus { -- private final ServiceState state; -- private final String message; -- -- /** -- * @param state the state of the service -- * @param message a human readable message -- */ -- @JsonCreator -- public ServiceStatus(@JsonProperty(""state"") ServiceState state, -- @JsonProperty(""message"") String message) -- { -- this.state = state; -- this.message = message; -- } -- -- public ServiceState getState() { -- return state; -- } -- -- public String getMessage() { -- return message; -- } -- -- public static ServiceStatus fromJson(String json) throws IOException { -- return new ObjectMapper().readValue(json, ServiceStatus.class); -- } -- -- public String toJson() { -- try { -- return new ObjectMapper().writeValueAsString(this); -- } catch (IOException e) { -- return null; -- } -- } --} -\ No newline at end of file -diff --git a/cn/src/main/java/org/cloudname/StrategyAll.java b/cn/src/main/java/org/cloudname/StrategyAll.java -deleted file mode 100644 -index 2bf6dcbb..00000000 ---- a/cn/src/main/java/org/cloudname/StrategyAll.java -+++ /dev/null -@@ -1,35 +0,0 @@ --package org.cloudname; -- --import java.util.List; -- --/** -- * A strategy that implements ""all"" and returns everything and does not change order. -- * @author : dybdahl -- */ --public class StrategyAll implements ResolverStrategy { -- -- /** -- * Returns all the endpoints. -- */ -- @Override -- public List filter(List endpoints) { -- return endpoints; -- } -- -- /** -- * Doesn't change ordering of endpoints. -- */ -- @Override -- public List order(List endpoints) { -- return endpoints; -- } -- -- /** -- * The name of the strategy is ""all"". -- */ -- @Override -- public String getName() { -- return ""all""; -- } -- --} -diff --git a/cn/src/main/java/org/cloudname/StrategyAny.java b/cn/src/main/java/org/cloudname/StrategyAny.java -deleted file mode 100644 -index 76679a55..00000000 ---- a/cn/src/main/java/org/cloudname/StrategyAny.java -+++ /dev/null -@@ -1,60 +0,0 @@ --package org.cloudname; -- --import java.util.ArrayList; --import java.util.Collection; --import java.util.Collections; --import java.util.Comparator; --import java.util.List; --import java.util.SortedSet; -- --/** -- * A strategy that returns the first element of the sorted coordinates (by instance value) hashed with -- * the time of this object creation. This is useful for returning the same endpoint in most cases even -- * if an endpoint is removed or added. -- * @author : dybdahl -- */ --public class StrategyAny implements ResolverStrategy { -- -- // Some systems might not have nano seconds accuracy and we do not want zeros in the least significant -- // numbers. -- private int sortSeed = (int) System.nanoTime() / 1000; -- -- /** -- * Returns a list of the first endpoint if any, else returns the empty list. -- */ -- @Override -- public List filter(List endpoints) { -- if (endpoints.size() > 0) { -- List retVal = new ArrayList(); -- retVal.add(endpoints.get(0)); -- return retVal; -- } -- // Empty list. -- return endpoints; -- } -- -- /** -- * We return a list that is sorted differently for different clients. In this way only a few -- * clients are touched when an endpoint is added/removed. -- */ -- @Override -- public List order(List endpoints) { -- Collections.sort(endpoints, new Comparator() { -- @Override -- public int compare(Endpoint endpointA, Endpoint endpointB) { -- int instanceA = endpointA.getCoordinate().getInstance() ^ sortSeed; -- int instanceB = endpointB.getCoordinate().getInstance() ^ sortSeed; -- return (instanceA > instanceB ? -1 : (instanceA == instanceB ? 0 : 1)); -- } -- }); -- return endpoints; -- } -- -- /** -- * The name of the strategy is ""any"" -- */ -- @Override -- public String getName() { -- return ""any""; -- } --} -diff --git a/cn/src/main/java/org/cloudname/zk/ClaimedCoordinate.java b/cn/src/main/java/org/cloudname/zk/ClaimedCoordinate.java -deleted file mode 100644 -index f4d8e133..00000000 ---- a/cn/src/main/java/org/cloudname/zk/ClaimedCoordinate.java -+++ /dev/null -@@ -1,535 +0,0 @@ --package org.cloudname.zk; -- --import org.apache.zookeeper.*; --import org.apache.zookeeper.data.Stat; --import org.cloudname.*; -- --import java.io.IOException; -- --import java.io.UnsupportedEncodingException; --import java.util.*; --import java.util.concurrent.Executors; --import java.util.concurrent.ScheduledExecutorService; --import java.util.concurrent.TimeUnit; --import java.util.concurrent.atomic.AtomicBoolean; --import java.util.logging.Logger; -- --/** -- * This class keeps track of coordinate data and endpoints for a coordinate. It is notified about -- * the state of ZooKeeper connection by implementing the ZkObjectHandler.ConnectionStateChanged. -- * It implements the Watcher interface to track the specific path of the coordinate. -- * This is useful for being notified if something happens to the coordinate (if it -- * is overwritten etc). -- * -- * @author dybdahl -- */ --public class ClaimedCoordinate implements Watcher, ZkObjectHandler.ConnectionStateChanged { -- -- /** -- * True if we know that our state is in sync with zookeeper. -- */ -- private final AtomicBoolean isSynchronizedWithZooKeeper = new AtomicBoolean(false); -- -- /** -- * The client of the class has to call start. This will flip this bit. -- */ -- private final AtomicBoolean started = new AtomicBoolean(false); -- -- /** -- * The connection from client to ZooKeeper might go down. If it comes up again within some time -- * window the server might think an ephemeral node should be alive. The client lib might think -- * otherwise. If this flag is set, the class will eventually check version. -- */ -- private final AtomicBoolean checkVersion = new AtomicBoolean(false); -- -- /** -- * We keep track of the last version so we know if we are in sync. We set a large value to make -- * sure we do not accidentally overwrite an existing not owned coordinate. -- */ -- private int lastStatusVersion = Integer.MAX_VALUE; -- -- private final Object lastStatusVersionMonitor = new Object(); -- -- private static final Logger LOG = Logger.getLogger(ClaimedCoordinate.class.getName()); -- -- private final ZkObjectHandler.Client zkClient; -- -- /** -- * The claimed coordinate. -- */ -- private final Coordinate coordinate; -- -- /** -- * Status path of the coordinate. -- */ -- private final String path; -- -- /** -- * This is needed to make sure that the first message about state is sent while -- * other update messages are queued. -- */ -- private final Object callbacksMonitor = new Object(); -- -- /** -- * The endpoints and the status of the coordinate is stored here. -- */ -- private final ZkCoordinateData zkCoordinateData = new ZkCoordinateData(); -- -- /** -- * For running internal thread. -- */ -- private final ScheduledExecutorService scheduler = -- Executors.newSingleThreadScheduledExecutor(); -- -- /** -- * A list of the coordinate listeners that are registered for this coordinate. -- */ -- private final List coordinateListenerList = -- Collections.synchronizedList(new ArrayList()); -- -- /** -- * A list of tracked configs for this coordinate. -- */ -- private final List trackedConfigList = -- Collections.synchronizedList(new ArrayList()); -- -- -- /** -- * This class implements the logic for handling callbacks from ZooKeeper on claim. -- * In general we could just ignore errors since we have a time based retry mechanism. However, -- * we want to notify clients, and we need to update the consistencyState. -- */ -- class ClaimCallback implements AsyncCallback.StringCallback { -- -- @Override -- public void processResult( -- int rawReturnCode, String notUsed, Object parent, String notUsed2) { -- -- KeeperException.Code returnCode = KeeperException.Code.get(rawReturnCode); -- ClaimedCoordinate claimedCoordinate = (ClaimedCoordinate) parent; -- LOG.fine(""Claim callback with "" + returnCode.name() + "" "" + claimedCoordinate.path -- + "" synched: "" + isSynchronizedWithZooKeeper.get() + "" thread: "" + this); -- switch (returnCode) { -- // The claim was successful. This means that the node was created. We need to -- // populate the status and endpoints. -- case OK: -- -- // We should be the first one to write to the new node, or fail. -- // This requires that the first version is 0, have not seen this documented -- // but it should be a fair assumption and is verified by unit tests. -- synchronized (lastStatusVersionMonitor) { -- lastStatusVersion = 0; -- } -- -- // We need to set this to synced or updateCoordinateData will complain. -- // updateCoordinateData will set it to out-of-sync in case of problems. -- isSynchronizedWithZooKeeper.set(true); -- -- -- try { -- registerWatcher(); -- } catch (CloudnameException e) { -- LOG.fine(""Failed register watcher after claim. Going to state out of sync: "" -- + e.getMessage()); -- -- isSynchronizedWithZooKeeper.set(false); -- return; -- -- } catch (InterruptedException e) { -- -- LOG.fine(""Interrupted while setting up new watcher. Going to state out of sync.""); -- isSynchronizedWithZooKeeper.set(false); -- return; -- -- } -- // No exceptions, let's celebrate with a log message. -- LOG.info(""Claim processed ok, path: "" + path); -- claimedCoordinate.sendEventToCoordinateListener( -- CoordinateListener.Event.COORDINATE_OK, ""claimed""); -- return; -- -- case NODEEXISTS: -- // Someone has already claimed the coordinate. It might have been us in a -- // different thread. If we already have claimed the coordinate then don't care. -- // Else notify the client. If everything is fine, this is not a true negative, -- // so ignore it. It might happen if two attempts to tryClaim the coordinate run -- // in parallel. -- if (isSynchronizedWithZooKeeper.get() && started.get()) { -- LOG.fine(""Everything is fine, ignoring NODEEXISTS message, path: "" + path); -- return; -- } -- -- LOG.info(""Claimed fail, node already exists, will retry: "" + path); -- claimedCoordinate.sendEventToCoordinateListener( -- CoordinateListener.Event.NOT_OWNER, ""Node already exists.""); -- LOG.info(""isSynchronizedWithZooKeeper: "" + isSynchronizedWithZooKeeper.get()); -- checkVersion.set(true); -- return; -- case NONODE: -- LOG.info(""Could not claim due to missing coordinate, path: "" + path); -- claimedCoordinate.sendEventToCoordinateListener( -- CoordinateListener.Event.NOT_OWNER, -- ""No node on claiming coordinate: "" + returnCode.name()); -- return; -- -- default: -- // Random problem, report the problem to the client. -- claimedCoordinate.sendEventToCoordinateListener( -- CoordinateListener.Event.NO_CONNECTION_TO_STORAGE, -- ""Could not reclaim coordinate. Return code: "" + returnCode.name()); -- return; -- } -- } -- } -- -- -- private class ResolveProblems implements Runnable { -- @Override -- public void run() { -- if (isSynchronizedWithZooKeeper.get() || ! zkClient.isConnected() || -- ! started.get()) { -- -- return; -- } -- if (checkVersion.getAndSet(false)) { -- try { -- synchronized (lastStatusVersionMonitor) { -- final Stat stat = zkClient.getZookeeper().exists(path, null); -- if (stat != null && zkClient.getZookeeper().getSessionId() == -- stat.getEphemeralOwner()) { -- zkClient.getZookeeper().delete(path, lastStatusVersion); -- } -- } -- } catch (InterruptedException e) { -- LOG.info(""Interrupted""); -- checkVersion.set(true); -- } catch (KeeperException e) { -- LOG.info(""exception ""+ e.getMessage()); -- checkVersion.set(true); -- } -- -- } -- LOG.info(""We are out-of-sync, have a zookeeper connection, and are started, trying reclaim: "" -- + path + this); -- tryClaim(); -- } -- } -- -- /** -- * Constructor. -- * @param coordinate The coordinate that is claimed. -- * @param zkClient for getting access to ZooKeeper. -- */ -- public ClaimedCoordinate(final Coordinate coordinate, final ZkObjectHandler.Client zkClient) { -- this.coordinate = coordinate; -- path = ZkCoordinatePath.getStatusPath(coordinate); -- this.zkClient = zkClient; -- } -- -- /** -- * Claims a coordinate. To know if it was successful or not you need to register a listener. -- * @return this. -- */ -- public ClaimedCoordinate start() { -- zkClient.registerListener(this); -- started.set(true); -- final long periodicDelayMs = 2000; -- scheduler.scheduleWithFixedDelay(new ResolveProblems(), 1 /* initial delay ms */, -- periodicDelayMs, TimeUnit.MILLISECONDS); -- return this; -- } -- -- /** -- * Callbacks from zkClient -- */ -- @Override -- public void connectionUp() { -- isSynchronizedWithZooKeeper.set(false); -- } -- -- /** -- * Callbacks from zkClient -- */ -- @Override -- public void connectionDown() { -- List coordinateListenerListCopy = new ArrayList(); -- synchronized (coordinateListenerList) { -- coordinateListenerListCopy.addAll(coordinateListenerList); -- } -- for (CoordinateListener coordinateListener : coordinateListenerListCopy) { -- coordinateListener.onCoordinateEvent( -- CoordinateListener.Event.NO_CONNECTION_TO_STORAGE, ""down""); -- } -- isSynchronizedWithZooKeeper.set(false); -- } -- -- @Override -- public void shutDown() { -- scheduler.shutdown(); -- } -- -- /** -- * Updates the ServiceStatus and persists it. Only allowed if we claimed the coordinate. -- * @param status The new value for serviceStatus. -- */ -- public void updateStatus(final ServiceStatus status) -- throws CloudnameException, CoordinateMissingException { -- zkCoordinateData.setStatus(status); -- updateCoordinateData(); -- } -- -- /** -- * Adds new endpoints and persist them. Requires that this instance owns the tryClaim to the -- * coordinate. -- * @param newEndpoints endpoints to be added. -- */ -- public void putEndpoints(final List newEndpoints) -- throws CloudnameException, CoordinateMissingException { -- zkCoordinateData.putEndpoints(newEndpoints); -- updateCoordinateData(); -- } -- -- /** -- * Remove endpoints and persist it. Requires that this instance owns the tryClaim to the -- * coordinate. -- * @param names names of endpoints to be removed. -- */ -- public void removeEndpoints(final List names) -- throws CloudnameException, CoordinateMissingException { -- zkCoordinateData.removeEndpoints(names); -- updateCoordinateData(); -- } -- -- /** -- * Release the tryClaim of the coordinate. It means that nobody owns the coordinate anymore. -- * Requires that that this instance owns the tryClaim to the coordinate. -- */ -- public void releaseClaim() throws CloudnameException { -- scheduler.shutdown(); -- zkClient.deregisterListener(this); -- -- while (true) { -- final TrackedConfig config; -- synchronized (trackedConfigList) { -- if (trackedConfigList.isEmpty()) { -- break; -- } -- config = trackedConfigList.remove(0); -- } -- config.stop(); -- } -- -- sendEventToCoordinateListener( -- CoordinateListener.Event.NOT_OWNER, ""Released claim of coordinate""); -- -- synchronized (lastStatusVersionMonitor) { -- try { -- zkClient.getZookeeper().delete(path, lastStatusVersion); -- } catch (InterruptedException e) { -- throw new CloudnameException(e); -- } catch (KeeperException e) { -- throw new CloudnameException(e); -- } -- } -- } -- -- -- /** -- * Creates a string for debugging etc -- * @return serialized version of the instance data. -- */ -- public synchronized String toString() { -- return zkCoordinateData.snapshot().toString(); -- } -- -- /** -- * Registers a coordinatelistener that will receive events when there are changes to the status -- * node. Don't do any heavy lifting in the callback and don't call cloudname from the callback -- * as this might create a deadlock. -- * @param coordinateListener -- */ -- public void registerCoordinateListener(final CoordinateListener coordinateListener) { -- -- String message = ""New listener added, resending current state.""; -- synchronized (callbacksMonitor) { -- coordinateListenerList.add(coordinateListener); -- if (isSynchronizedWithZooKeeper.get()) { -- coordinateListener.onCoordinateEvent( -- CoordinateListener.Event.COORDINATE_OK, message); -- } -- } -- } -- -- -- public void deregisterCoordinateListener(final CoordinateListener coordinateListener) { -- coordinateListenerList.remove(coordinateListener); -- } -- -- /** -- * Registers a configlistener that will receive events when there are changes to the config node. -- * Don't do any heavy lifting in the callback and don't call cloudname from the callback as -- * this might create a deadlock. -- * @param trackedConfig -- */ -- public void registerTrackedConfig(final TrackedConfig trackedConfig) { -- trackedConfigList.add(trackedConfig); -- } -- -- /** -- * Handles event from ZooKeeper for this coordinate. -- * @param event -- */ -- @Override public void process(WatchedEvent event) { -- LOG.info(""Got an event from ZooKeeper "" + event.toString()); -- synchronized (lastStatusVersionMonitor) { -- switch (event.getType()) { -- -- case None: -- switch (event.getState()) { -- case SyncConnected: -- break; -- case Disconnected: -- case AuthFailed: -- case Expired: -- default: -- // If we lost connection, we don't attempt to register another watcher as -- // this might be blocking forever. Parent will try to reconnect (reclaim) -- // later. -- isSynchronizedWithZooKeeper.set(false); -- sendEventToCoordinateListener( -- CoordinateListener.Event.NO_CONNECTION_TO_STORAGE, -- event.toString()); -- -- return; -- } -- return; -- -- case NodeDeleted: -- // If node is deleted, we have no node to place a new watcher so we stop watching. -- isSynchronizedWithZooKeeper.set(false); -- sendEventToCoordinateListener(CoordinateListener.Event.NOT_OWNER, event.toString()); -- return; -- -- case NodeDataChanged: -- LOG.fine(""Node data changed, check versions.""); -- boolean verifiedSynchronized = false; -- try { -- final Stat stat = zkClient.getZookeeper().exists(path, this); -- if (stat == null) { -- LOG.info(""Could not stat path, setting out of synch, will retry claim""); -- } else { -- LOG.fine(""Previous version is "" + lastStatusVersion + "" now is "" -- + stat.getVersion()); -- if (stat.getVersion() != lastStatusVersion) { -- LOG.fine(""Version mismatch, sending out of sync.""); -- } else { -- verifiedSynchronized = true; -- } -- } -- } catch (KeeperException e) { -- LOG.fine(""Problems with zookeeper, sending consistencyState out of sync: "" -- + e.getMessage()); -- } catch (InterruptedException e) { -- LOG.fine(""Got interrupted: "" + e.getMessage()); -- return; -- } finally { -- isSynchronizedWithZooKeeper.set(verifiedSynchronized); -- } -- -- if (verifiedSynchronized) { -- sendEventToCoordinateListener( -- CoordinateListener.Event.COORDINATE_OUT_OF_SYNC, event.toString()); -- } -- return; -- -- case NodeChildrenChanged: -- case NodeCreated: -- // This should not happen.. -- isSynchronizedWithZooKeeper.set(false); -- sendEventToCoordinateListener( -- CoordinateListener.Event.COORDINATE_OUT_OF_SYNC, event.toString()); -- return; -- } -- } -- } -- -- private void tryClaim() { -- try { -- zkClient.getZookeeper().create( -- path, zkCoordinateData.snapshot().serialize().getBytes(Util.CHARSET_NAME), -- ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL, new ClaimCallback(), this); -- } catch (IOException e) { -- LOG.info(""Got IO exception on claim with new ZooKeeper instance "" + e.getMessage()); -- } -- } -- -- -- /** -- * Sends an event too all coordinate listeners. Note that the event is sent from this thread so -- * if the callback code does the wrong calls, deadlocks might occur. -- * @param event -- * @param message -- */ -- private void sendEventToCoordinateListener( -- final CoordinateListener.Event event, final String message) { -- synchronized (callbacksMonitor) { -- LOG.fine(""Event "" + event.name() + "" "" + message); -- List coordinateListenerListCopy = -- new ArrayList(); -- synchronized (coordinateListenerList) { -- coordinateListenerListCopy.addAll(coordinateListenerList); -- } -- for (CoordinateListener listener : coordinateListenerListCopy) { -- listener.onCoordinateEvent(event, message); -- } -- } -- } -- -- /** -- * Register a watcher for the coordinate. -- */ -- private void registerWatcher() throws CloudnameException, InterruptedException { -- LOG.fine(""Register watcher for ZooKeeper..""); -- try { -- zkClient.getZookeeper().exists(path, this); -- } catch (KeeperException e) { -- throw new CloudnameException(e); -- } -- } -- -- /** -- * Creates the serialized value of the object and stores this in ZooKeeper under the path. -- * It updates the lastStatusVersion. It does not set a watcher for the path. -- */ -- private void updateCoordinateData() throws CoordinateMissingException, CloudnameException { -- if (! started.get()) { -- throw new IllegalStateException(""Not started.""); -- } -- -- if (! zkClient.isConnected()) { -- throw new CloudnameException(""No proper connection with zookeeper.""); -- } -- -- synchronized (lastStatusVersionMonitor) { -- try { -- Stat stat = zkClient.getZookeeper().setData(path, -- zkCoordinateData.snapshot().serialize().getBytes(Util.CHARSET_NAME), -- lastStatusVersion); -- LOG.fine(""Updated coordinate, latest version is "" + stat.getVersion()); -- lastStatusVersion = stat.getVersion(); -- } catch (KeeperException.NoNodeException e) { -- throw new CoordinateMissingException(""Coordinate does not exist "" + path); -- } catch (KeeperException e) { -- throw new CloudnameException(""ZooKeeper errror in updateCoordinateData: "" -- + e.getMessage(), e); -- } catch (UnsupportedEncodingException e) { -- throw new CloudnameException(e); -- } catch (InterruptedException e) { -- throw new CloudnameException(e); -- } catch (IOException e) { -- throw new CloudnameException(e); -- } -- } -- -- } --} -diff --git a/cn/src/main/java/org/cloudname/zk/DynamicExpression.java b/cn/src/main/java/org/cloudname/zk/DynamicExpression.java -deleted file mode 100644 -index 3811e640..00000000 ---- a/cn/src/main/java/org/cloudname/zk/DynamicExpression.java -+++ /dev/null -@@ -1,379 +0,0 @@ --package org.cloudname.zk; -- --import org.apache.zookeeper.WatchedEvent; --import org.apache.zookeeper.Watcher; --import org.cloudname.CloudnameException; --import org.cloudname.Endpoint; --import org.cloudname.Resolver; -- --import java.util.ArrayList; --import java.util.HashMap; --import java.util.HashSet; --import java.util.Iterator; --import java.util.List; --import java.util.Map; --import java.util.Random; --import java.util.Set; --import java.util.concurrent.Executors; --import java.util.concurrent.RejectedExecutionException; --import java.util.concurrent.ScheduledExecutorService; --import java.util.concurrent.TimeUnit; --import java.util.logging.Level; --import java.util.logging.Logger; -- --/** -- * Class that is capable of tracking an expression. An expression can include many nodes. -- * The number of nodes is dynamic and can change over time. -- * For now, the implementation is rather simple. For single endpoints it does use feedback from -- * ZooKeeper watcher events. For keeping track of new nodes, it does a scan on regular intervals. -- * @author dybdahl -- */ --class DynamicExpression implements Watcher, TrackedCoordinate.ExpressionResolverNotify, -- ZkObjectHandler.ConnectionStateChanged { -- -- /** -- * Keeps track of what picture (what an expression has resolved to) is sent to the user so that -- * we know when to send new events. -- */ -- private final Map clientPicture = new HashMap(); -- -- /** -- * Where to notify changes. -- */ -- private final Resolver.ResolverListener clientCallback; -- -- /** -- * This is the expression to dynamically resolved represented as ZkResolver.Parameters. -- */ -- private final ZkResolver.Parameters parameters; -- -- /** -- * When ZooKeeper reports an error about an path, when to try to read it again. -- */ -- private final long RETRY_INTERVAL_ZOOKEEPER_ERROR_MS = 30000; // 30 seconds -- -- /** -- * We wait a bit after a node has changed because in many cases there might be several updates, -- * e.g. an application registers several endpoints, each causing an update. -- */ -- private final long REFRESH_NODE_AFTER_CHANGE_MS = 2000; // two seconds -- -- /** -- * Does a full scan with this interval. Non-final so unit test can run faster. -- */ -- protected static long TIME_BETWEEN_NODE_SCANNING_MS = 1 * 60 * 1000; // one minute -- -- /** -- * A Map with all the coordinate we care about for now. -- */ -- final private Map coordinateByPath = -- new HashMap(); -- -- /** -- * We always add some random noise to when to do things so not all servers fire at the same time -- * against -- * ZooKeeper. -- */ -- private final Random random = new Random(); -- -- private static final Logger log = Logger.getLogger(DynamicExpression.class.getName()); -- -- private boolean stopped = false; -- -- private final ZkResolver zkResolver; -- -- private final ZkObjectHandler.Client zkClient; -- -- private final ScheduledExecutorService scheduler = -- Executors.newSingleThreadScheduledExecutor(); -- -- private final Object instanceLock = new Object(); -- -- /** -- * Start getting notified about changes to expression. -- * @param expression Coordinate expression. -- * @param clientCallback called on changes and initially. -- */ -- public DynamicExpression( -- final String expression, -- final Resolver.ResolverListener clientCallback, -- final ZkResolver zkResolver, -- final ZkObjectHandler.Client zkClient) { -- this.clientCallback = clientCallback; -- this.parameters = new ZkResolver.Parameters(expression); -- this.zkResolver = zkResolver; -- this.zkClient = zkClient; -- } -- -- public void start() { -- zkClient.registerListener(this); -- scheduler.scheduleWithFixedDelay(new NodeScanner(""""), 1 /* initial delay ms */, -- TIME_BETWEEN_NODE_SCANNING_MS, TimeUnit.MILLISECONDS); -- } -- -- /** -- * Stop receiving callbacks about coordinate. -- */ -- public void stop() { -- scheduler.shutdown(); -- -- synchronized (instanceLock) { -- stopped = true; -- for (TrackedCoordinate trackedCoordinate : coordinateByPath.values()) { -- trackedCoordinate.stop(); -- } -- coordinateByPath.clear(); -- } -- } -- -- private void scheduleRefresh(String path, long delayMs) { -- try { -- scheduler.schedule(new NodeScanner(path), delayMs, TimeUnit.MILLISECONDS); -- } catch (RejectedExecutionException e) { -- if (scheduler.isShutdown()) { -- return; -- } -- log.log(Level.SEVERE, ""Got exception while scheduling new refresh"", e); -- } -- } -- -- @Override -- public void connectionUp() { -- } -- -- @Override -- public void connectionDown() { -- } -- -- @Override -- public void shutDown() { -- scheduler.shutdown(); -- } -- -- /** -- * The method will try to resolve the expression and find new nodes. -- */ -- private class NodeScanner implements Runnable { -- final String path; -- -- public NodeScanner(final String path) { -- this.path = path; -- } -- -- @Override -- public void run() { -- if (path.isEmpty()) { -- resolve(); -- } else { -- refreshPathWithWatcher(path); -- } -- notifyClient(); -- } -- } -- -- /** -- * Callback from zookeeper watcher. -- */ -- @Override -- public void process(WatchedEvent watchedEvent) { -- synchronized (instanceLock) { -- if (stopped) { -- return; -- } -- } -- String path = watchedEvent.getPath(); -- Event.KeeperState state = watchedEvent.getState(); -- Event.EventType type = watchedEvent.getType(); -- -- switch (state) { -- case Expired: -- case AuthFailed: -- case Disconnected: -- // Something bad happened to the path, try again later. -- scheduleRefresh(path, RETRY_INTERVAL_ZOOKEEPER_ERROR_MS); -- break; -- } -- switch (type) { -- case NodeChildrenChanged: -- case None: -- case NodeCreated: -- scheduleRefresh(path, REFRESH_NODE_AFTER_CHANGE_MS); -- break; -- case NodeDeleted: -- synchronized (instanceLock) { -- coordinateByPath.remove(path); -- notifyClient(); -- return; -- } -- case NodeDataChanged: -- refreshPathWithWatcher(path); -- break; -- } -- -- } -- -- /** -- * Implements interface TrackedCoordinate.ExpressionResolverNotify -- */ -- @Override -- public void nodeDead(final String path) { -- synchronized (instanceLock) { -- TrackedCoordinate trackedCoordinate = coordinateByPath.remove(path); -- if (trackedCoordinate == null) { -- return; -- } -- trackedCoordinate.stop(); -- // Triggers a new scan, and potential client updates. -- scheduleRefresh("""" /** scan for all nodes */, 50 /* ms*/); -- } -- } -- -- /** -- * Implements interface TrackedCoordinate.ExpressionResolverNotify -- */ -- @Override -- public void stateChanged(final String path) { -- // Something happened to a path, schedule a refetch. -- scheduleRefresh(path, 50); -- } -- -- private void resolve() { -- final List endpoints; -- try { -- endpoints = zkResolver.resolve(parameters.getExpression()); -- } catch (CloudnameException e) { -- log.warning(""Exception from cloudname: "" + e.toString()); -- return; -- } -- -- final Set validEndpointsPaths = new HashSet(); -- -- for (Endpoint endpoint : endpoints) { -- -- final String statusPath = ZkCoordinatePath.getStatusPath(endpoint.getCoordinate()); -- validEndpointsPaths.add(statusPath); -- -- final TrackedCoordinate trackedCoordinate; -- -- synchronized (instanceLock) { -- -- // If already discovered, do nothing. -- if (coordinateByPath.containsKey(statusPath)) { -- continue; -- } -- trackedCoordinate = new TrackedCoordinate(this, statusPath, zkClient); -- coordinateByPath.put(statusPath, trackedCoordinate); -- } -- // Tracked coordinate has to be in coordinateByPath before start is called, or events -- // gets lost. -- trackedCoordinate.start(); -- try { -- trackedCoordinate.waitForFirstData(); -- } catch (InterruptedException e) { -- log.log(Level.SEVERE, ""Got interrupt while waiting for data."", e); -- return; -- } -- } -- -- // Remove tracked coordinates that does not resolve. -- synchronized (instanceLock) { -- for (Iterator > it = -- coordinateByPath.entrySet().iterator(); -- it.hasNext(); /* nop */) { -- Map.Entry entry = it.next(); -- -- if (! validEndpointsPaths.contains(entry.getKey())) { -- log.info(""Killing endpoint "" + entry.getKey() + "": No longer resolved.""); -- entry.getValue().stop(); -- it.remove(); -- } -- } -- } -- } -- -- private String getEndpointKey(final Endpoint endpoint) { -- return endpoint.getCoordinate().asString() + ""@"" + endpoint.getName(); -- } -- -- -- private List getNewEndpoints() { -- final List newEndpoints = new ArrayList(); -- for (TrackedCoordinate trackedCoordinate : coordinateByPath.values()) { -- if (trackedCoordinate.getCoordinatedata() != null) { -- ZkResolver.addEndpoints( -- trackedCoordinate.getCoordinatedata(), -- newEndpoints, parameters.getEndpointName()); -- } -- } -- return newEndpoints; -- } -- -- private void notifyClient() { -- synchronized (instanceLock) { -- if (stopped) { -- return; -- } -- } -- // First generate a fresh list of endpoints. -- final List newEndpoints = getNewEndpoints(); -- -- synchronized (instanceLock) { -- final Map newEndpointsByName = new HashMap(); -- for (final Endpoint endpoint : newEndpoints) { -- newEndpointsByName.put(getEndpointKey(endpoint), endpoint); -- } -- final Iterator> it = clientPicture.entrySet().iterator(); -- while (it.hasNext()) { -- -- final Map.Entry endpointEntry = it.next(); -- -- final String key = endpointEntry.getKey(); -- if (! newEndpointsByName.containsKey(key)) { -- it.remove(); -- clientCallback.endpointEvent( -- Resolver.ResolverListener.Event.REMOVED_ENDPOINT, -- endpointEntry.getValue()); -- } -- } -- for (final Endpoint endpoint : newEndpoints) { -- final String key = getEndpointKey(endpoint); -- -- if (! clientPicture.containsKey(key)) { -- clientCallback.endpointEvent( -- Resolver.ResolverListener.Event.NEW_ENDPOINT, endpoint); -- clientPicture.put(key, endpoint); -- continue; -- } -- final Endpoint clientEndpoint = clientPicture.get(key); -- if (endpoint.equals(clientEndpoint)) { continue; } -- if (endpoint.getHost().equals(clientEndpoint.getHost()) && -- endpoint.getName().equals(clientEndpoint.getName()) && -- endpoint.getPort() == clientEndpoint.getPort() && -- endpoint.getProtocol().equals(clientEndpoint.getProtocol())) { -- clientCallback.endpointEvent( -- Resolver.ResolverListener.Event.MODIFIED_ENDPOINT_DATA, endpoint); -- clientPicture.put(key, endpoint); -- continue; -- } -- clientCallback.endpointEvent( -- Resolver.ResolverListener.Event.REMOVED_ENDPOINT, -- clientPicture.get(key)); -- clientCallback.endpointEvent( -- Resolver.ResolverListener.Event.NEW_ENDPOINT, endpoint); -- clientPicture.put(key, endpoint); -- } -- } -- } -- -- private void refreshPathWithWatcher(String path) { -- synchronized (instanceLock) { -- TrackedCoordinate trackedCoordinate = coordinateByPath.get(path); -- if (trackedCoordinate == null) { -- // Endpoint has been removed while waiting for refresh. -- return; -- } -- trackedCoordinate.refreshAsync(); -- } -- } -- --} -\ No newline at end of file -diff --git a/cn/src/main/java/org/cloudname/zk/TrackedConfig.java b/cn/src/main/java/org/cloudname/zk/TrackedConfig.java -deleted file mode 100644 -index aacd1908..00000000 ---- a/cn/src/main/java/org/cloudname/zk/TrackedConfig.java -+++ /dev/null -@@ -1,220 +0,0 @@ --package org.cloudname.zk; -- --import org.apache.zookeeper.KeeperException; --import org.apache.zookeeper.WatchedEvent; --import org.apache.zookeeper.Watcher; --import org.apache.zookeeper.data.Stat; --import org.cloudname.CloudnameException; --import org.cloudname.ConfigListener; -- --import java.io.IOException; --import java.io.UnsupportedEncodingException; --import java.util.concurrent.Executors; --import java.util.concurrent.ScheduledExecutorService; --import java.util.concurrent.TimeUnit; --import java.util.concurrent.atomic.AtomicBoolean; --import java.util.logging.Logger; -- -- --/** -- * This class keeps track of config for a coordinate. -- * -- * @author dybdahl -- */ --public class TrackedConfig implements Watcher, ZkObjectHandler.ConnectionStateChanged { -- -- private String configData = null; -- private final Object configDataMonitor = new Object(); -- private final ConfigListener configListener; -- -- private static final Logger log = Logger.getLogger(TrackedConfig.class.getName()); -- -- private final String path; -- -- private final AtomicBoolean isSynchronizedWithZookeeper = new AtomicBoolean(false); -- -- private final ZkObjectHandler.Client zkClient; -- private final ScheduledExecutorService scheduler = -- Executors.newSingleThreadScheduledExecutor(); -- /** -- * Constructor, the ZooKeeper instances is retrieved from ZkObjectHandler.Client, -- * so we won't get it until the client reports we have a Zk Instance in the handler. -- * @param path is the path of the configuration of the coordinate. -- */ -- public TrackedConfig( -- String path, ConfigListener configListener, ZkObjectHandler.Client zkClient) { -- this.path = path; -- this.configListener = configListener; -- this.zkClient = zkClient; -- } -- -- -- @Override -- public void connectionUp() { -- } -- -- @Override -- public void connectionDown() { -- isSynchronizedWithZookeeper.set(false); -- } -- -- @Override -- public void shutDown() { -- scheduler.shutdown(); -- } -- -- /** -- * Starts tracking the config. -- */ -- public void start() { -- zkClient.registerListener(this); -- final long periodicDelayMs = 2000; -- scheduler.scheduleWithFixedDelay(new ReloadConfigOnErrors(), 1 /* initial delay ms */, -- periodicDelayMs, TimeUnit.MILLISECONDS); -- } -- -- /** -- * Stops the tracker. -- */ -- public void stop() { -- scheduler.shutdown(); -- zkClient.deregisterListener(this); -- } -- -- /** -- * If connection to zookeeper is away, we need to reload because messages might have been -- * lost. This class has a method for checking this. -- */ -- private class ReloadConfigOnErrors implements Runnable { -- @Override -- public void run() { -- -- if (isSynchronizedWithZookeeper.get()) -- return; -- -- try { -- if (refreshConfigData()) { -- configListener.onConfigEvent(ConfigListener.Event.UPDATED, getConfigData()); -- } -- } catch (CloudnameException e) { -- // No worries, we try again later -- } -- } -- } -- -- /** -- * Returns current config. -- * @return config -- */ -- public String getConfigData() { -- synchronized (configDataMonitor) { -- return configData; -- } -- } -- -- /** -- * Creates a string for debugging etc -- * @return serialized version of the instance data. -- */ -- public String toString() { -- return ""Config: "" + getConfigData(); -- } -- -- -- /** -- * Handles event from ZooKeeper for this coordinate. -- * @param event Event to handle -- */ -- @Override public void process(WatchedEvent event) { -- log.severe(""Got an event from ZooKeeper "" + event.toString() + "" path: "" + path); -- -- switch (event.getType()) { -- case None: -- switch (event.getState()) { -- case SyncConnected: -- break; -- case Disconnected: -- case AuthFailed: -- case Expired: -- default: -- isSynchronizedWithZookeeper.set(false); -- // If we lost connection, we don't attempt to register another watcher as -- // this might be blocking forever. Parent might try to reconnect. -- return; -- } -- break; -- case NodeDeleted: -- synchronized (configDataMonitor) { -- isSynchronizedWithZookeeper.set(false); -- configData = null; -- } -- configListener.onConfigEvent(ConfigListener.Event.DELETED, """"); -- return; -- case NodeDataChanged: -- isSynchronizedWithZookeeper.set(false); -- return; -- case NodeChildrenChanged: -- case NodeCreated: -- break; -- } -- // We are only interested in registering a watcher in a few cases. E.g. if the event is lost -- // connection, registerWatcher does not make sense as it is blocking. In NodeDataChanged -- // above, a watcher is registerred in refreshConfigData(). -- try { -- registerWatcher(); -- } catch (CloudnameException e) { -- log.info(""Got cloudname exception: "" + e.getMessage()); -- return; -- } catch (InterruptedException e) { -- log.info(""Got interrupted exception: "" + e.getMessage()); -- return; -- } -- } -- -- -- /** -- * Loads the config from ZooKeeper. In case of failure, we keep the old data. -- * -- * @return Returns true if data has changed. -- */ -- private boolean refreshConfigData() throws CloudnameException { -- if (! zkClient.isConnected()) { -- throw new CloudnameException(""No connection to storage.""); -- } -- synchronized (configDataMonitor) { -- -- String oldConfig = configData; -- Stat stat = new Stat(); -- try { -- byte[] data; -- -- data = zkClient.getZookeeper().getData(path, this, stat); -- if (data == null) { -- configData = """"; -- } else { -- configData = new String(data, Util.CHARSET_NAME); -- } -- isSynchronizedWithZookeeper.set(true); -- return oldConfig == null || ! oldConfig.equals(configData); -- } catch (KeeperException e) { -- throw new CloudnameException(e); -- } catch (UnsupportedEncodingException e) { -- throw new CloudnameException(e); -- } catch (InterruptedException e) { -- throw new CloudnameException(e); -- } catch (IOException e) { -- throw new CloudnameException(e); -- } -- } -- } -- -- private void registerWatcher() throws CloudnameException, InterruptedException { -- try { -- zkClient.getZookeeper().exists(path, this); -- -- } catch (KeeperException e) { -- throw new CloudnameException(e); -- } -- } -- --} -\ No newline at end of file -diff --git a/cn/src/main/java/org/cloudname/zk/TrackedCoordinate.java b/cn/src/main/java/org/cloudname/zk/TrackedCoordinate.java -deleted file mode 100644 -index 37b314c0..00000000 ---- a/cn/src/main/java/org/cloudname/zk/TrackedCoordinate.java -+++ /dev/null -@@ -1,220 +0,0 @@ --package org.cloudname.zk; -- --import org.apache.zookeeper.*; --import org.cloudname.*; -- --import java.util.concurrent.CountDownLatch; --import java.util.concurrent.Executors; --import java.util.concurrent.ScheduledExecutorService; --import java.util.concurrent.TimeUnit; --import java.util.concurrent.atomic.AtomicBoolean; --import java.util.logging.Level; --import java.util.logging.Logger; -- --/** -- * This class keeps track of serviceStatus and endpoints for a coordinate. -- * -- * @author dybdahl -- */ --public class TrackedCoordinate implements Watcher, ZkObjectHandler.ConnectionStateChanged { -- -- -- /** -- * The client can implement this to get notified on changes. -- */ -- public interface ExpressionResolverNotify { -- /** -- * This is called when the state has changed, it might have become unavailable. -- * @param statusPath path of the coordinate in zookeeper. -- */ -- void stateChanged(final String statusPath); -- -- /** -- * This is called when node is deleted, or it can not be read. -- * @param statusPath path of the coordinate in zookeeper. -- */ -- void nodeDead(final String statusPath); -- } -- -- private ZkCoordinateData.Snapshot coordinateData = null; -- private final Object coordinateDataMonitor = new Object(); -- -- private static final Logger LOG = Logger.getLogger(TrackedCoordinate.class.getName()); -- private final String path; -- private final ExpressionResolverNotify client; -- private final AtomicBoolean isSynchronizedWithZookeeper = new AtomicBoolean(false); -- private final ZkObjectHandler.Client zkClient; -- -- private final ScheduledExecutorService scheduler = -- Executors.newSingleThreadScheduledExecutor(); -- -- private final CountDownLatch firstRound = new CountDownLatch(1); -- /** -- * Constructor, the ZooKeeper instances is retrieved from implementing the ZkUserInterface so -- * the object is not ready to be used before the ZooKeeper instance is received. -- * @param path is the path of the status of the coordinate. -- */ -- public TrackedCoordinate( -- final ExpressionResolverNotify client, final String path, -- final ZkObjectHandler.Client zkClient) { -- LOG.finest(""Tracking coordinate with path "" + path); -- this.path = path; -- this.client = client; -- this.zkClient = zkClient; -- } -- -- // Implementation of ZkObjectHandler.ConnectionStateChanged. -- @Override -- public void connectionUp() { -- } -- -- // Implementation of ZkObjectHandler.ConnectionStateChanged. -- @Override -- public void connectionDown() { -- isSynchronizedWithZookeeper.set(false); -- } -- -- @Override -- public void shutDown() { -- scheduler.shutdown(); -- } -- -- /** -- * Signalize that the class should reload its data. -- */ -- public void refreshAsync() { -- isSynchronizedWithZookeeper.set(false); -- } -- -- public void start() { -- zkClient.registerListener(this); -- final long periodicDelayMs = 2000; -- scheduler.scheduleWithFixedDelay(new ReloadCoordinateData(), 1 /* initial delay ms */, -- periodicDelayMs, TimeUnit.MILLISECONDS); -- } -- -- public void stop() { -- scheduler.shutdown(); -- zkClient.deregisterListener(this); -- } -- -- public void waitForFirstData() throws InterruptedException { -- firstRound.await(); -- } -- -- -- -- /** -- * This class handles reloading new data from zookeeper if we are out of synch. -- */ -- class ReloadCoordinateData implements Runnable { -- @Override -- public void run() { -- if (! isSynchronizedWithZookeeper.getAndSet(true)) { return; } -- try { -- refreshCoordinateData(); -- } catch (CloudnameException e) { -- LOG.log(Level.INFO, ""exception on reloading coordinate data."", e); -- isSynchronizedWithZookeeper.set(false); -- } -- firstRound.countDown(); -- } -- } -- -- -- public ZkCoordinateData.Snapshot getCoordinatedata() { -- synchronized (coordinateDataMonitor) { -- return coordinateData; -- } -- } -- -- -- /** -- * Creates a string for debugging etc -- * @return serialized version of the instance data. -- */ -- public String toString() { -- synchronized (coordinateDataMonitor) { -- return coordinateData.toString(); -- } -- } -- -- -- /** -- * Handles event from ZooKeeper for this coordinate. -- * @param event Event to handle -- */ -- @Override public void process(WatchedEvent event) { -- LOG.fine(""Got an event from ZooKeeper "" + event.toString() + "" path: "" + path); -- -- switch (event.getType()) { -- case None: -- switch (event.getState()) { -- case SyncConnected: -- break; -- case Disconnected: -- case AuthFailed: -- case Expired: -- default: -- // If we lost connection, we don't attempt to register another watcher as -- // this might be blocking forever. Parent might try to reconnect. -- return; -- } -- break; -- case NodeDeleted: -- synchronized (coordinateDataMonitor) { -- coordinateData = new ZkCoordinateData().snapshot(); -- } -- client.nodeDead(path); -- return; -- case NodeDataChanged: -- isSynchronizedWithZookeeper.set(false); -- return; -- case NodeChildrenChanged: -- case NodeCreated: -- break; -- } -- try { -- registerWatcher(); -- } catch (CloudnameException e) { -- LOG.log(Level.INFO, ""Got cloudname exception."", e); -- } catch (InterruptedException e) { -- LOG.log(Level.INFO, ""Got interrupted exception."", e); -- } -- } -- -- -- /** -- * Loads the coordinate from ZooKeeper. In case of failure, we keep the old data. -- * Notifies the client if state changes. -- */ -- private void refreshCoordinateData() throws CloudnameException { -- -- if (! zkClient.isConnected()) { -- throw new CloudnameException(""No connection to storage.""); -- } -- synchronized (coordinateDataMonitor) { -- String oldDataSerialized = (null == coordinateData) ? """" : coordinateData.serialize(); -- try { -- coordinateData = ZkCoordinateData.loadCoordinateData( -- path, zkClient.getZookeeper(), this).snapshot(); -- } catch (CloudnameException e) { -- client.nodeDead(path); -- LOG.log(Level.FINER, ""Exception while reading path "" + path, e); -- return; -- } -- isSynchronizedWithZookeeper.set(true); -- if (! oldDataSerialized.equals(coordinateData.toString())) { -- client.stateChanged(path); -- } -- } -- } -- -- private void registerWatcher() throws CloudnameException, InterruptedException { -- try { -- zkClient.getZookeeper().exists(path, this); -- } catch (KeeperException e) { -- throw new CloudnameException(e); -- } -- } --} -\ No newline at end of file -diff --git a/cn/src/main/java/org/cloudname/zk/Util.java b/cn/src/main/java/org/cloudname/zk/Util.java -deleted file mode 100644 -index 6bcd21e0..00000000 ---- a/cn/src/main/java/org/cloudname/zk/Util.java -+++ /dev/null -@@ -1,177 +0,0 @@ --package org.cloudname.zk; -- --import org.cloudname.CloudnameException; -- --import org.apache.zookeeper.ZooKeeper; --import org.apache.zookeeper.CreateMode; --import org.apache.zookeeper.data.ACL; --import org.apache.zookeeper.KeeperException; --import org.cloudname.CoordinateMissingException; -- --import java.util.ArrayList; --import java.util.List; -- --/** -- * Various ZooKeeper utilities. -- * -- * @author borud -- */ --public final class Util { -- // Constants -- public static final String CHARSET_NAME = ""UTF-8""; -- -- /** -- * Create a path in ZooKeeper. We just start at the top and work -- * our way down. Nodes that exist will throw an exception but we -- * will just ignore those. The result should be a path consisting -- * of ZooKeeper nodes with the names specified by the path and -- * with their data element set to null. -- * @throws CloudnameException if problems talking with ZooKeeper. -- */ -- public static void mkdir(final ZooKeeper zk, String path, final List acl) -- throws CloudnameException, InterruptedException { -- if (path.startsWith(""/"")) { -- path = path.substring(1); -- } -- -- String[] parts = path.split(""/""); -- -- String createPath = """"; -- for (String p : parts) { -- // Sonar will complain about this. Usually it would be -- // right but in this case it isn't. -- createPath += ""/"" + p; -- try { -- zk.create(createPath, null, acl, CreateMode.PERSISTENT); -- } catch (KeeperException.NodeExistsException e) { -- // This is okay. Ignore. -- } catch (KeeperException e) { -- throw new CloudnameException(e); -- } -- } -- } -- -- /** -- * Lists sub nodes of a path in a ZooKeeper instance. -- * @param path starts from this path -- * @param nodeList put sub-nodes in this list -- */ -- public static void listRecursively( -- final ZooKeeper zk, final String path, final List nodeList) -- throws CloudnameException, InterruptedException { -- -- List children = null; -- try { -- children = zk.getChildren(path, false); -- } catch (KeeperException e) { -- throw new CloudnameException(e); -- } -- if (children.size() == 0) { -- nodeList.add(path); -- return; -- } -- for (String childPath : children) { -- listRecursively(zk, path + ""/"" +childPath, nodeList); -- } -- } -- -- /** -- * Figures out if there are sub-nodes under the path in a ZooKeeper instance. -- * @return true if the node exists and has children. -- * @throws CoordinateMissingException if the path does not exist in ZooKeeper. -- */ -- public static boolean hasChildren(final ZooKeeper zk, final String path) -- throws CloudnameException, CoordinateMissingException, InterruptedException { -- if (! exist(zk, path)) { -- throw new CoordinateMissingException(""Could not get children due to non-existing path "" -- + path); -- } -- List children = null; -- try { -- children = zk.getChildren(path, false); -- } catch (KeeperException e) { -- throw new CloudnameException(e); -- } -- return ((children != null) && (children.size() > 0)); -- } -- -- /** -- * Figures out if a path exists in a ZooKeeper instance. -- * @throws CloudnameException if there are problems taking to the ZooKeeper instance. -- * @return true if the path exists. -- */ -- public static boolean exist(final ZooKeeper zk, final String path) -- throws CloudnameException, InterruptedException { -- try { -- return zk.exists(path, false) != null; -- } catch (KeeperException e) { -- throw new CloudnameException(e); -- } -- } -- -- /** -- * Returns the version of the path. -- * @param zk -- * @param path -- * @return version number -- */ -- public static int getVersionForDeletion(final ZooKeeper zk, final String path) -- throws CloudnameException, InterruptedException { -- -- try { -- int version = zk.exists(path, false).getVersion(); -- if (version < 0) { -- throw new CloudnameException( -- new RuntimeException(""Got negative version for path "" + path)); -- } -- return version; -- } catch (KeeperException e) { -- throw new CloudnameException(e); -- } -- } -- -- /** -- * Deletes nodes from a path from the right to the left. -- * @param zk -- * @param path to be deleted -- * @param keepMinLevels is the minimum number of levels (depths) to keep in the path. -- * @return the number of deleted levels. -- */ -- public static int deletePathKeepRootLevels( -- final ZooKeeper zk, String path, int keepMinLevels) -- throws CloudnameException, CoordinateMissingException, InterruptedException { -- if (path.startsWith(""/"")) { -- path = path.substring(1); -- } -- -- String[] parts = path.split(""/""); -- -- // We are happy if only the first two deletions went through. The other deletions are just -- // cleaning up if there are no more coordinates on the same rootPath. -- int deletedNodes = 0; -- List paths = new ArrayList(); -- String incrementalPath = """"; -- for (String p : parts) { -- incrementalPath += ""/"" + p; -- paths.add(incrementalPath); -- } -- -- for (int counter = paths.size() - 1; counter >= keepMinLevels; counter--) { -- String deletePath = paths.get(counter); -- int version = getVersionForDeletion(zk, deletePath); -- if (hasChildren(zk, deletePath)) { -- return deletedNodes; -- } -- try { -- zk.delete(deletePath, version); -- } catch (KeeperException e) { -- throw new CloudnameException(e); -- } -- deletedNodes++; -- } -- return deletedNodes; -- } -- -- // Should not be instantiated. -- private Util() {} --} -diff --git a/cn/src/main/java/org/cloudname/zk/ZkCloudname.java b/cn/src/main/java/org/cloudname/zk/ZkCloudname.java -deleted file mode 100644 -index c14e5e7b..00000000 ---- a/cn/src/main/java/org/cloudname/zk/ZkCloudname.java -+++ /dev/null -@@ -1,419 +0,0 @@ --package org.cloudname.zk; -- --import org.apache.zookeeper.data.Stat; --import org.cloudname.*; -- --import org.apache.zookeeper.WatchedEvent; --import org.apache.zookeeper.Watcher; --import org.apache.zookeeper.ZooKeeper; --import org.apache.zookeeper.CreateMode; --import org.apache.zookeeper.ZooDefs.Ids; --import org.apache.zookeeper.KeeperException; -- --import java.io.UnsupportedEncodingException; --import java.util.List; -- --import java.util.concurrent.Executors; --import java.util.concurrent.ScheduledExecutorService; --import java.util.concurrent.TimeUnit; -- --import java.util.logging.Level; --import java.util.logging.Logger; -- --import java.util.concurrent.CountDownLatch; -- --import java.io.IOException; -- -- --/** -- * An implementation of Cloudname using ZooKeeper. -- * -- * This implementation assumes that the path prefix defined by -- * CN_PATH_PREFIX is only used by Cloudname. The structure and -- * semantics of things under this prefix are defined by this library -- * and will be subject to change. -- * -- * -- * @author borud -- * @author dybdahl -- * @author storsveen -- */ --public final class ZkCloudname implements Cloudname, Watcher, Runnable { -- -- private static final int SESSION_TIMEOUT = 5000; -- -- private static final Logger log = Logger.getLogger(ZkCloudname.class.getName()); -- -- private ZkObjectHandler zkObjectHandler = null; -- -- private final String connectString; -- -- // Latches that count down when ZooKeeper is connected -- private final CountDownLatch connectedSignal = new CountDownLatch(1); -- -- private ZkResolver resolver = null; -- -- private int connectingCounter = 0; -- -- private final ScheduledExecutorService scheduler = -- Executors.newSingleThreadScheduledExecutor(); -- -- private ZkCloudname(final Builder builder) { -- connectString = builder.getConnectString(); -- -- } -- -- /** -- * Checks state of zookeeper connection and try to keep it running. -- */ -- @Override -- public void run() { -- final ZooKeeper.States state = zkObjectHandler.getClient().getZookeeper().getState(); -- -- if (state == ZooKeeper.States.CONNECTED) { -- connectingCounter = 0; -- return; -- } -- -- if (state == ZooKeeper.States.CONNECTING) { -- connectingCounter++; -- if (connectingCounter > 10) { -- log.fine(""Long time in connecting, forcing a close of zookeeper client.""); -- zkObjectHandler.close(); -- connectingCounter = 0; -- } -- return; -- } -- -- if (state == ZooKeeper.States.CLOSED) { -- log.fine(""Retrying connection to ZooKeeper.""); -- try { -- zkObjectHandler.setZooKeeper( -- new ZooKeeper(connectString, SESSION_TIMEOUT, this)); -- } catch (IOException e) { -- log.log(Level.SEVERE, ""RetryConnection failed for some reason:"" -- + e.getMessage(), e); -- } -- return; -- } -- -- log.severe(""Unknown state "" + state + "" closing....""); -- zkObjectHandler.close(); -- } -- -- -- /** -- * Connect to ZooKeeper instance with time-out value. -- * @param waitTime time-out value for establishing connection. -- * @param waitUnit time unit for time-out when establishing connection. -- * @throws CloudnameException if connection can not be established -- * @return -- */ -- public ZkCloudname connectWithTimeout(long waitTime, TimeUnit waitUnit) -- throws CloudnameException { -- boolean connected = false; -- try { -- -- zkObjectHandler = new ZkObjectHandler( -- new ZooKeeper(connectString, SESSION_TIMEOUT, this)); -- -- if (! connectedSignal.await(waitTime, waitUnit)) { -- throw new CloudnameException(""Connecting to ZooKeeper timed out.""); -- } -- log.fine(""Connected to ZooKeeper "" + connectString); -- connected = true; -- } catch (IOException e) { -- throw new CloudnameException(e); -- } catch (InterruptedException e) { -- throw new CloudnameException(e); -- } finally { -- if (!connected && zkObjectHandler != null) { -- zkObjectHandler.close(); -- } -- } -- resolver = new ZkResolver.Builder().addStrategy(new StrategyAll()) -- .addStrategy(new StrategyAny()).build(zkObjectHandler.getClient()); -- scheduler.scheduleWithFixedDelay(this, 1 /* initial delay ms */, -- 1000 /* check state every second */, TimeUnit.MILLISECONDS); -- return this; -- } -- -- /** -- * Connect to ZooKeeper instance with long time-out, however, it might fail fast. -- * @return connected ZkCloudname object -- * @throws CloudnameException if connection can not be established. -- */ -- public ZkCloudname connect() throws CloudnameException { -- // We wait up to 100 years. -- return connectWithTimeout(365 * 100, TimeUnit.DAYS); -- } -- -- -- -- @Override -- public void process(WatchedEvent event) { -- log.fine(""Got event in ZkCloudname: "" + event.toString()); -- if (event.getState() == Event.KeeperState.Disconnected -- || event.getState() == Event.KeeperState.Expired) { -- zkObjectHandler.connectionDown(); -- } -- -- // Initial connection to ZooKeeper is completed. -- if (event.getState() == Event.KeeperState.SyncConnected) { -- zkObjectHandler.connectionUp(); -- // The first connection set up is blocking, this will unblock the connection. -- connectedSignal.countDown(); -- } -- } -- -- /** -- * Create a given coordinate in the ZooKeeper node tree. -- * -- * Just blindly creates the entire path. Elements of the path may -- * exist already, but it seems wasteful to -- * @throws CoordinateExistsException if coordinate already exists- -- * @throws CloudnameException if problems with zookeeper connection. -- */ -- @Override -- public void createCoordinate(final Coordinate coordinate) -- throws CloudnameException, CoordinateExistsException { -- // Create the root path for the coordinate. We do this -- // blindly, meaning that if the path already exists, then -- // that's ok -- so a more correct name for this method would -- // be ensureCoordinate(), but that might confuse developers. -- String root = ZkCoordinatePath.getCoordinateRoot(coordinate); -- final ZooKeeper zk = zkObjectHandler.getClient().getZookeeper(); -- try { -- if (Util.exist(zk, root)) { -- throw new CoordinateExistsException(""Coordinate already created:"" +root); -- } -- } catch (InterruptedException e) { -- throw new CloudnameException(e); -- } -- -- try { -- Util.mkdir(zk, root, Ids.OPEN_ACL_UNSAFE); -- } catch (InterruptedException e) { -- throw new CloudnameException(e); -- } -- -- // Create the nodes that represent subdirectories. -- String configPath = ZkCoordinatePath.getConfigPath(coordinate, null); -- try { -- log.fine(""Creating config node "" + configPath); -- zk.create(configPath, null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); -- } catch (KeeperException e) { -- throw new CloudnameException(e); -- } catch (InterruptedException e) { -- throw new CloudnameException(e); -- } -- } -- -- /** -- * Deletes a coordinate in the persistent service store. This includes deletion -- * of config. It will fail if the coordinate is claimed. -- * @param coordinate the coordinate we wish to destroy. -- */ -- @Override -- public void destroyCoordinate(final Coordinate coordinate) -- throws CoordinateDeletionException, CoordinateMissingException, CloudnameException { -- String statusPath = ZkCoordinatePath.getStatusPath(coordinate); -- String configPath = ZkCoordinatePath.getConfigPath(coordinate, null); -- String rootPath = ZkCoordinatePath.getCoordinateRoot(coordinate); -- final ZooKeeper zk = zkObjectHandler.getClient().getZookeeper(); -- try { -- if (! Util.exist(zk, rootPath)) { -- throw new CoordinateMissingException(""Coordinate not found: "" + rootPath); -- } -- } catch (InterruptedException e) { -- throw new CloudnameException(e); -- } -- -- -- // Do this early to raise the error before anything is deleted. However, there might be a -- // race condition if someone claims while we delete configPath and instance (root) node. -- try { -- if (Util.exist(zk, configPath) && Util.hasChildren(zk, configPath)) { -- throw new CoordinateDeletionException(""Coordinate has config node.""); -- } -- } catch (InterruptedException e) { -- throw new CloudnameException(e); -- } -- -- try { -- if (Util.exist(zk, statusPath)) { -- throw new CoordinateDeletionException(""Coordinate is claimed.""); -- } -- } catch (InterruptedException e) { -- throw new CloudnameException(e); -- } -- -- // Delete config, the instance node, and continue with as much as possible. -- // We might have a raise condition if someone is creating a coordinate with a shared path -- // in parallel. We want to keep 3 levels of nodes (/cn/%CELL%/%USER%). -- int deletedNodes = 0; -- try { -- deletedNodes = Util.deletePathKeepRootLevels(zk, configPath, 3); -- } catch (InterruptedException e) { -- throw new CloudnameException(e); -- } -- if (deletedNodes == 0) { -- throw new CoordinateDeletionException(""Failed deleting config node, nothing deleted..""); -- } -- if (deletedNodes == 1) { -- throw new CoordinateDeletionException(""Failed deleting instance node.""); -- } -- } -- -- /** -- * Claim a coordinate. -- * -- * In this implementation a coordinate is claimed by creating an -- * ephemeral with the name defined in CN_STATUS_NAME. If the node -- * already exists the coordinate has already been claimed. -- */ -- @Override -- public ServiceHandle claim(final Coordinate coordinate) { -- String statusPath = ZkCoordinatePath.getStatusPath(coordinate); -- log.fine(""Claiming "" + coordinate.asString() + "" ("" + statusPath + "")""); -- -- ClaimedCoordinate statusAndEndpoints = new ClaimedCoordinate( -- coordinate, zkObjectHandler.getClient()); -- -- // If we have come thus far we have succeeded in creating the -- // CN_STATUS_NAME node within the service coordinate directory -- // in ZooKeeper and we can give the client a ServiceHandle. -- ZkServiceHandle handle = new ZkServiceHandle( -- statusAndEndpoints, coordinate, zkObjectHandler.getClient()); -- statusAndEndpoints.start(); -- return handle; -- } -- -- @Override -- public Resolver getResolver() { -- -- return resolver; -- } -- -- @Override -- public ServiceStatus getStatus(Coordinate coordinate) throws CloudnameException { -- String statusPath = ZkCoordinatePath.getStatusPath(coordinate); -- ZkCoordinateData zkCoordinateData = ZkCoordinateData.loadCoordinateData( -- statusPath, zkObjectHandler.getClient().getZookeeper(), null); -- return zkCoordinateData.snapshot().getServiceStatus(); -- } -- -- @Override -- public void setConfig( -- final Coordinate coordinate, final String newConfig, final String oldConfig) -- throws CoordinateMissingException, CloudnameException { -- String configPath = ZkCoordinatePath.getConfigPath(coordinate, null); -- int version = -1; -- final ZooKeeper zk = zkObjectHandler.getClient().getZookeeper(); -- if (oldConfig != null) { -- Stat stat = new Stat(); -- byte [] data = null; -- try { -- data = zk.getData(configPath, false, stat); -- } catch (KeeperException e) { -- throw new CloudnameException(e); -- } catch (InterruptedException e) { -- throw new CloudnameException(e); -- } -- try { -- String stringData = new String(data, Util.CHARSET_NAME); -- if (! stringData.equals(oldConfig)) { -- throw new CloudnameException(""Data did not match old config. Actual old "" -- + stringData + "" specified old "" + oldConfig); -- } -- } catch (UnsupportedEncodingException e) { -- throw new CloudnameException(e); -- } -- version = stat.getVersion(); -- } -- try { -- zk.setData(configPath, newConfig.getBytes(Util.CHARSET_NAME), version); -- } catch (KeeperException e) { -- throw new CloudnameException(e); -- } catch (InterruptedException e) { -- throw new CloudnameException(e); -- } catch (UnsupportedEncodingException e) { -- throw new CloudnameException(e); -- } -- } -- -- -- @Override -- public String getConfig(final Coordinate coordinate) -- throws CoordinateMissingException, CloudnameException { -- String configPath = ZkCoordinatePath.getConfigPath(coordinate, null); -- Stat stat = new Stat(); -- try { -- byte[] data = zkObjectHandler.getClient().getZookeeper().getData( -- configPath, false, stat); -- if (data == null) { -- return null; -- } -- return new String(data, Util.CHARSET_NAME); -- } catch (KeeperException e) { -- throw new CloudnameException(e); -- } catch (InterruptedException e) { -- throw new CloudnameException(e); -- } catch (UnsupportedEncodingException e) { -- throw new CloudnameException(e); -- } -- } -- -- /** -- * Close the connection to ZooKeeper. -- */ -- @Override -- public void close() { -- zkObjectHandler.shutdown(); -- log.fine(""ZooKeeper session closed for "" + connectString); -- scheduler.shutdown(); -- } -- -- /** -- * List the sub-nodes in ZooKeeper owned by Cloudname. -- * @param nodeList -- */ -- public void listRecursively(List nodeList) -- throws CloudnameException, InterruptedException { -- Util.listRecursively(zkObjectHandler.getClient().getZookeeper(), -- ZkCoordinatePath.getCloudnameRoot(), nodeList); -- } -- -- /** -- * This class builds parameters for ZkCloudname. -- */ -- public static class Builder { -- private String connectString; -- -- public Builder setConnectString(String connectString) { -- this.connectString = connectString; -- return this; -- } -- -- // TODO(borud, dybdahl): Make this smarter, some ideas: -- // Connect to one node and read from a magic path -- // how many zookeepers that are running and build -- // the path based on this information. -- public Builder setDefaultConnectString() { -- this.connectString = ""z1:2181,z2:2181,z3:2181""; -- return this; -- } -- -- public String getConnectString() { -- return connectString; -- } -- -- public ZkCloudname build() { -- if (connectString.isEmpty()) { -- throw new RuntimeException( -- ""You need to specify connection string before you can build.""); -- } -- return new ZkCloudname(this); -- } -- } --} -diff --git a/cn/src/main/java/org/cloudname/zk/ZkCoordinateData.java b/cn/src/main/java/org/cloudname/zk/ZkCoordinateData.java -deleted file mode 100644 -index 622b6482..00000000 ---- a/cn/src/main/java/org/cloudname/zk/ZkCoordinateData.java -+++ /dev/null -@@ -1,228 +0,0 @@ --package org.cloudname.zk; -- --import org.apache.zookeeper.KeeperException; --import org.apache.zookeeper.Watcher; --import org.apache.zookeeper.ZooKeeper; --import org.apache.zookeeper.data.Stat; --import org.cloudname.CloudnameException; --import org.cloudname.Endpoint; --import org.cloudname.ServiceState; --import org.cloudname.ServiceStatus; --import com.fasterxml.jackson.core.JsonFactory; --import com.fasterxml.jackson.core.JsonGenerator; --import com.fasterxml.jackson.core.JsonParser; --import com.fasterxml.jackson.core.type.TypeReference; --import com.fasterxml.jackson.databind.ObjectMapper; -- --import java.io.IOException; --import java.io.StringWriter; --import java.io.UnsupportedEncodingException; --import java.util.Collection; --import java.util.HashMap; --import java.util.HashSet; --import java.util.List; --import java.util.Map; --import java.util.Set; -- --/** -- * ZkCoordinateData represent the data regarding a coordinate. It can return an immutable snapshot. -- * The class has support for deserializing and serializing the data and methods for accessing -- * endpoints. The class is fully thread-safe. -- * -- * @auther dybdahl -- */ --public final class ZkCoordinateData { -- /** -- * The status of the coordinate, is it running etc. -- */ -- private ServiceStatus serviceStatus = new ServiceStatus(ServiceState.UNASSIGNED, -- ""No service state has been assigned""); -- -- /** -- * The endpoints registered at the coordinate mapped by endpoint name. -- */ -- private final Map endpointsByName = new HashMap(); -- -- // Used for deserializing. -- private final ObjectMapper objectMapper = new ObjectMapper(); -- -- private final Object localVariablesMonitor = new Object(); -- -- /** -- * Create a new immutable snapshot object. -- */ -- public Snapshot snapshot() { -- synchronized (localVariablesMonitor) { -- return new Snapshot(serviceStatus, endpointsByName); -- } -- } -- -- /** -- * Sets status, overwrite any existing status information. -- */ -- public ZkCoordinateData setStatus(ServiceStatus status) { -- synchronized (localVariablesMonitor) { -- this.serviceStatus = status; -- return this; -- } -- } -- -- /** -- * Adds new endpoints to the builder. It is legal to add a new endpoint with an endpoint -- * that already exists. -- */ -- public ZkCoordinateData putEndpoints(final List newEndpoints) { -- synchronized (localVariablesMonitor) { -- for (Endpoint endpoint : newEndpoints) { -- endpointsByName.put(endpoint.getName(), endpoint); -- } -- } -- return this; -- } -- -- /** -- * Remove endpoints from the Dynamic object. -- */ -- public ZkCoordinateData removeEndpoints(final List names) { -- synchronized (localVariablesMonitor) { -- for (String name : names) { -- if (! endpointsByName.containsKey(name)) { -- throw new IllegalArgumentException(""endpoint does not exist: "" + name); -- } -- if (null == endpointsByName.remove(name)) { -- throw new IllegalArgumentException( -- ""Endpoint does not exists, null in internal structure."" + name); -- } -- } -- } -- return this; -- } -- -- /** -- * Sets the state of the Dynamic object based on a serialized byte string. -- * Any old data is overwritten. -- * @throws IOException if something went wrong, should not happen on valid data. -- */ -- public ZkCoordinateData deserialize(byte[] data) throws IOException { -- synchronized (localVariablesMonitor) { -- final String stringData = new String(data, Util.CHARSET_NAME); -- final JsonFactory jsonFactory = new JsonFactory(); -- final JsonParser jp = jsonFactory.createJsonParser(stringData); -- final String statusString = objectMapper.readValue(jp, new TypeReference() {}); -- serviceStatus = ServiceStatus.fromJson(statusString); -- endpointsByName.clear(); -- endpointsByName.putAll((Map)objectMapper.readValue(jp, -- new TypeReference >() {})); -- } -- return this; -- } -- -- /** -- * An immutable representation of the coordinate data. -- */ -- public static class Snapshot { -- /** -- * The status of the coordinate, is it running etc. -- */ -- private final ServiceStatus serviceStatus; -- -- /** -- * The endpoints registered at the coordinate mapped by endpoint name. -- */ -- private final Map endpointsByName; -- -- /** -- * Getter for status of coordinate. -- * @return the service status of the coordinate. -- */ -- public ServiceStatus getServiceStatus() { -- return serviceStatus; -- } -- -- /** -- * Getter for endpoint of the coordinate given the endpoint name. -- * @param name of the endpoint. -- * @return the endpoint or null if non-existing. -- */ -- public Endpoint getEndpoint(final String name) { -- return endpointsByName.get(name); -- } -- -- /** -- * Returns all the endpoints. -- * @return set of endpoints. -- */ -- public Set getEndpoints() { -- Set endpoints = new HashSet(); -- endpoints.addAll(endpointsByName.values()); -- return endpoints; -- } -- -- /** -- * A method for getting all endpoints. -- * @param endpoints The endpoints are put in this list. -- */ -- public void appendAllEndpoints(final Collection endpoints) { -- endpoints.addAll(endpointsByName.values()); -- } -- -- /** -- * Return a serialized string representing the status and endpoint. It can be de-serialize -- * by the inner class. -- * @return The serialized string. -- * @throws IOException if something goes wrong, should not be a common problem though. -- */ -- public String serialize() { -- final StringWriter stringWriter = new StringWriter(); -- final JsonGenerator generator; -- -- try { -- generator = new JsonFactory(new ObjectMapper()).createJsonGenerator(stringWriter); -- generator.writeString(serviceStatus.toJson()); -- generator.writeObject(endpointsByName); -- -- generator.flush(); -- } catch (IOException e) { -- throw new RuntimeException( -- ""Got IOException while serializing coordinate data."" , e); -- } -- return new String(stringWriter.getBuffer()); -- } -- -- /** -- * Private constructor, only ZkCoordinateData can build this. -- */ -- private Snapshot(ServiceStatus serviceStatus, Map endpointsByName) { -- this.serviceStatus = serviceStatus; -- this.endpointsByName = endpointsByName; -- } -- } -- -- /** -- * Utility function to create and load a ZkCoordinateData from ZooKeeper. -- * @param watcher for callbacks from ZooKeeper. It is ok to pass null. -- * @throws CloudnameException when problems loading data. -- */ -- static public ZkCoordinateData loadCoordinateData( -- final String statusPath, final ZooKeeper zk, final Watcher watcher) -- throws CloudnameException { -- Stat stat = new Stat(); -- try { -- byte[] data; -- if (watcher == null) { -- data = zk.getData(statusPath, false, stat); -- } else { -- data = zk.getData(statusPath, watcher, stat); -- } -- return new ZkCoordinateData().deserialize(data); -- } catch (KeeperException e) { -- throw new CloudnameException(e); -- } catch (UnsupportedEncodingException e) { -- throw new CloudnameException(e); -- } catch (InterruptedException e) { -- throw new CloudnameException(e); -- } catch (IOException e) { -- throw new CloudnameException(e); -- } -- } --} -diff --git a/cn/src/main/java/org/cloudname/zk/ZkCoordinatePath.java b/cn/src/main/java/org/cloudname/zk/ZkCoordinatePath.java -deleted file mode 100644 -index 6e4eb847..00000000 ---- a/cn/src/main/java/org/cloudname/zk/ZkCoordinatePath.java -+++ /dev/null -@@ -1,87 +0,0 @@ --package org.cloudname.zk; -- --import org.cloudname.Coordinate; -- -- --/** -- * A class for creating paths for ZooKeeper. -- * The semantic of a path is string of the form /cn/%cell%/%user%/%service%/%instance%/[status]|[config/%name%] -- -- * The prefix /cn indicates that the content is owned by the CloudName library. -- * Anything that lives under this prefix can only be touched by the Cloudname library. -- * If clients begin to fiddle with nodes under this prefix directly, all deals are off. -- * @author: dybdahl -- */ --public final class ZkCoordinatePath { -- private static final String CN_PATH_PREFIX = ""/cn""; -- private static final String CN_STATUS_NAME = ""status""; -- private static final String CN_CONFIG_NAME = ""config""; -- -- public static String getCloudnameRoot() { -- return CN_PATH_PREFIX; -- } -- /** -- * Builds the root path of a coordinate. -- * @param coordinate -- * @return the path of the coordinate in ZooKeeper (/cn/%cell%/%user%/%service%/%instance%). -- */ -- public static String getCoordinateRoot(final Coordinate coordinate) { -- return coordinateAsPath(coordinate.getCell(), coordinate.getUser(), coordinate.getService(), -- coordinate.getInstance()); -- } -- -- /** -- * Builds the status path of a coordinate. -- * @param coordinate -- * @return full status path (/cn/%cell%/%user%/%service%/%instance%/status) -- */ -- public static String getStatusPath(final Coordinate coordinate) { -- return getCoordinateRoot(coordinate) + ""/"" + CN_STATUS_NAME; -- } -- -- /** -- * Builds the config path of a coordinate. -- * @param coordinate -- * @param name if null, the last path of the path (/%name%) is not included. -- * @return config path /cn/%cell%/%user%/%service%/%instance%/config or -- * /cn/%cell%/%user%/%service%/%instance%/config/%name% -- */ -- public static String getConfigPath(final Coordinate coordinate, final String name) { -- if (name == null) { -- return getCoordinateRoot(coordinate) + ""/"" + CN_CONFIG_NAME; -- } -- return getCoordinateRoot(coordinate) + ""/"" + CN_CONFIG_NAME + ""/"" + name; -- } -- -- /** -- * Builds first part of a ZooKeeper path. -- * @param cell -- * @param user -- * @param service -- * @return path (/cn/%cell%/%user%/%service%) -- */ -- public static String coordinateWithoutInstanceAsPath( -- final String cell, final String user, final String service) { -- return CN_PATH_PREFIX + ""/"" + cell + ""/"" + user + ""/"" + service; -- } -- -- public static String getStatusPath(String cell, String user, String service, Integer instance) { -- return coordinateAsPath(cell, user, service, instance) + ""/"" + CN_STATUS_NAME; -- } -- -- /** -- * Builds first part of a ZooKeeper path. -- * @param cell -- * @param user -- * @param service -- * @param instance -- * @return path (/cn/%cell%/%user%/%service%/%instance%) -- */ -- private static String coordinateAsPath( -- final String cell, final String user, final String service, Integer instance) { -- return coordinateWithoutInstanceAsPath(cell, user, service) + ""/"" + instance.toString(); -- } -- -- // Should not be instantiated. -- private ZkCoordinatePath() {} --} -diff --git a/cn/src/main/java/org/cloudname/zk/ZkObjectHandler.java b/cn/src/main/java/org/cloudname/zk/ZkObjectHandler.java -deleted file mode 100644 -index e17e0009..00000000 ---- a/cn/src/main/java/org/cloudname/zk/ZkObjectHandler.java -+++ /dev/null -@@ -1,157 +0,0 @@ --package org.cloudname.zk; -- --import org.apache.zookeeper.ZooKeeper; -- --import java.util.HashSet; --import java.util.Set; --import java.util.concurrent.atomic.AtomicBoolean; -- --/** -- * Class that keeps an instance of zookeeper. It has a sub-class with read access and -- * a listener service. -- * @author dybdahl -- */ --public class ZkObjectHandler { -- private ZooKeeper zooKeeper = null; -- private final Object zooKeeperMonitor = new Object(); -- -- private final Set registeredCallbacks = -- new HashSet(); -- private final Object callbacksMonitor = new Object(); -- -- private final AtomicBoolean isConnected = new AtomicBoolean(true); -- -- /** -- * Constructor -- * @param zooKeeper first zooKeeper to use, should not be null. -- */ -- public ZkObjectHandler(final ZooKeeper zooKeeper) { -- this.zooKeeper = zooKeeper; -- } -- -- /** -- * Interface for notification of connection state changes. -- */ -- public interface ConnectionStateChanged { -- void connectionUp(); -- void connectionDown(); -- void shutDown(); -- } -- -- /** -- * Indicate that zookeeper connection is working by calling this method. -- */ -- public void connectionUp() { -- boolean previous = isConnected.getAndSet(true); -- if (previous == true) { return; } -- synchronized (callbacksMonitor) { -- for (ConnectionStateChanged connectionStateChanged : registeredCallbacks) { -- connectionStateChanged.connectionUp(); -- } -- } -- } -- -- /** -- * Indicate that zookeeper connection is broken by calling this method. -- */ -- public void connectionDown() { -- boolean previous = isConnected.getAndSet(false); -- if (previous == false) { return; } -- synchronized (callbacksMonitor) { -- for (ConnectionStateChanged connectionStateChanged : registeredCallbacks) { -- connectionStateChanged.connectionDown(); -- } -- } -- } -- -- /** -- * Every class using Zookeeper has an instance of this Client class -- * to check the connection and fetch the instance. -- */ -- public class Client { -- public ZooKeeper getZookeeper() { -- synchronized (zooKeeperMonitor) { -- return zooKeeper; -- } -- } -- -- /** -- * Check if we are connected to Zookeeper -- * @return True if zkCloudname confirmed connection <1000ms ago. -- */ -- public boolean isConnected() { -- return isConnected.get(); -- } -- -- /** -- * Register a callback. -- * @param connectionStateChanged Callback to register -- * @return true if this is a new callback. -- */ -- public boolean registerListener(ConnectionStateChanged connectionStateChanged) { -- synchronized (callbacksMonitor) { -- return registeredCallbacks.add(connectionStateChanged); -- } -- } -- -- /** -- * Deregister a callback. -- * @param connectionStateChanged Callback to deregister. -- * @return true if the callback was registered. -- */ -- public boolean deregisterListener(ConnectionStateChanged connectionStateChanged) { -- synchronized (callbacksMonitor) { -- return registeredCallbacks.remove(connectionStateChanged); -- } -- } -- } -- -- /** -- * Returns client -- * @return client object. -- */ -- public Client getClient() { -- return new Client(); -- } -- -- /** -- * Update zooKeeper instance. -- * @param zooKeeper -- */ -- public void setZooKeeper(final ZooKeeper zooKeeper) { -- synchronized (zooKeeperMonitor) { -- this.zooKeeper = zooKeeper; -- } -- } -- -- /** -- * Closes zooKeeper object. -- */ -- public void close() { -- synchronized (zooKeeperMonitor) { -- if (zooKeeper == null) { return; } -- -- try { -- zooKeeper.close(); -- } catch (InterruptedException e) { -- // ignore -- } -- } -- } -- -- /** -- * Shut down all listeners. -- */ -- public void shutdown() { -- synchronized (callbacksMonitor) { -- for (ConnectionStateChanged connectionStateChanged : registeredCallbacks) { -- connectionStateChanged.shutDown(); -- } -- } -- try { -- zooKeeper.close(); -- } catch (InterruptedException e) { -- // ignore -- } -- } --} -\ No newline at end of file -diff --git a/cn/src/main/java/org/cloudname/zk/ZkResolver.java b/cn/src/main/java/org/cloudname/zk/ZkResolver.java -deleted file mode 100644 -index 80b49197..00000000 ---- a/cn/src/main/java/org/cloudname/zk/ZkResolver.java -+++ /dev/null -@@ -1,460 +0,0 @@ --package org.cloudname.zk; -- --import org.apache.zookeeper.KeeperException; --import org.apache.zookeeper.ZooKeeper; --import org.cloudname.*; -- --import java.util.*; --import java.util.logging.Level; --import java.util.logging.Logger; --import java.util.regex.Pattern; --import java.util.regex.Matcher; -- -- --/** -- * This class is used to resolve Cloudname coordinates into endpoints. -- * -- * @author borud -- */ --public final class ZkResolver implements Resolver, ZkObjectHandler.ConnectionStateChanged { -- -- private static final Logger log = Logger.getLogger(ZkResolver.class.getName()); -- -- private Map strategies; -- -- private final ZkObjectHandler.Client zkGetter; -- -- private final Object dynamicAddressMonitor = new Object(); -- -- private Map dynamicAddressesByListener = new HashMap(); -- -- @Override -- public void connectionUp() { -- synchronized (dynamicAddressMonitor) { -- for (ResolverListener listener : dynamicAddressesByListener.keySet()) { -- listener.endpointEvent(ResolverListener.Event.CONNECTION_OK, null); -- } -- } -- } -- -- @Override -- public void connectionDown() { -- synchronized (dynamicAddressMonitor) { -- for (ResolverListener listener : dynamicAddressesByListener.keySet()) { -- listener.endpointEvent(ResolverListener.Event.LOST_CONNECTION, null); -- } -- } -- } -- -- @Override -- public void shutDown() { -- // Nothing to shut down here. -- } -- -- public static class Builder { -- -- final private Map strategies = new HashMap(); -- -- public Builder addStrategy(ResolverStrategy strategy) { -- strategies.put(strategy.getName(), strategy); -- return this; -- } -- -- public Map getStrategies() { -- return strategies; -- } -- -- public ZkResolver build(ZkObjectHandler.Client zkGetter) { -- return new ZkResolver(this, zkGetter); -- } -- -- } -- -- -- // Matches coordinate with endpoint of the form: -- // endpoint.instance.service.user.cell -- public static final Pattern endpointPattern -- = Pattern.compile( ""^([a-z][a-z0-9-_]*)\\."" // endpoint -- + ""(\\d+)\\."" // instance -- + ""([a-z][a-z0-9-_]*)\\."" // service -- + ""([a-z][a-z0-9-_]*)\\."" // user -- + ""([a-z][a-z-_]*)\\z""); // cell -- -- // Parses abstract coordinate of the form: -- // strategy.service.user.cell. This pattern is useful for -- // resolving hosts, but not endpoints. -- public static final Pattern strategyPattern -- = Pattern.compile( ""^([a-z][a-z0-9-_]*)\\."" // strategy -- + ""([a-z][a-z0-9-_]*)\\."" // service -- + ""([a-z][a-z0-9-_]*)\\."" // user -- + ""([a-z][a-z0-9-_]*)\\z""); // cell -- -- // Parses abstract coordinate of the form: -- // strategy.service.user.cell. This pattern is useful for -- // resolving hosts, but not endpoints. -- public static final Pattern instancePattern -- = Pattern.compile( ""^([a-z0-9-_]*)\\."" // strategy -- + ""([a-z][a-z0-9-_]*)\\."" // service -- + ""([a-z][a-z0-9-_]*)\\."" // user -- + ""([a-z][a-z0-9-_]*)\\z""); // cell -- -- // Parses abstract coordinate of the form: -- // endpoint.strategy.service.user.cell. -- public static final Pattern endpointStrategyPattern -- = Pattern.compile( ""^([a-z][a-z0-9-_]*)\\."" // endpoint -- + ""([a-z][a-z0-9-_]*)\\."" // strategy -- + ""([a-z][a-z0-9-_]*)\\."" // service -- + ""([a-z][a-z0-9-_]*)\\."" // user -- + ""([a-z][a-z0-9-_]*)\\z""); // cell -- -- -- /** -- * Inner class to keep track of parameters parsed from addressExpression. -- */ -- static class Parameters { -- private String endpointName = null; -- private Integer instance = null; -- private String service = null; -- private String user = null; -- private String cell = null; -- private String strategy = null; -- private String expression = null; -- -- /** -- * Constructor that takes an addressExperssion and sets the inner variables. -- * @param addressExpression -- */ -- public Parameters(String addressExpression) { -- this.expression = addressExpression; -- if (! (trySetEndPointPattern(addressExpression) || -- trySetStrategyPattern(addressExpression) || -- trySetInstancePattern(addressExpression) || -- trySetEndpointStrategyPattern(addressExpression))) { -- throw new IllegalStateException( -- ""Could not parse addressExpression:"" + addressExpression); -- } -- -- } -- -- /** -- * Returns the original expression set in the constructor of Parameters. -- * @return expression to be resolved. -- */ -- public String getExpression() { -- return expression; -- } -- -- /** -- * Returns strategy. -- * @return the string (e.g. ""all"" or ""any"", or """" if there is no strategy -- * (but instance is specified). -- */ -- public String getStrategy() { -- return strategy; -- } -- -- /** -- * Returns endpoint name if set or """" if not set. -- * @return endpointname. -- */ -- public String getEndpointName() { -- return endpointName; -- } -- -- /** -- * Returns instance if set or negative number if not set. -- * @return instance number. -- */ -- public Integer getInstance() { -- return instance; -- } -- -- /** -- * Returns service -- * @return service name. -- */ -- public String getService() { -- return service; -- } -- -- /** -- * Returns user -- * @return user. -- */ -- public String getUser() { -- return user; -- } -- -- /** -- * Returns cell. -- * @return cell. -- */ -- public String getCell() { -- return cell; -- } -- -- private boolean trySetEndPointPattern(String addressExperssion) { -- Matcher m = endpointPattern.matcher(addressExperssion); -- if (! m.matches()) { -- return false; -- } -- endpointName = m.group(1); -- instance = Integer.parseInt(m.group(2)); -- strategy = """"; -- service = m.group(3); -- user = m.group(4); -- cell = m.group(5); -- return true; -- -- } -- -- private boolean trySetStrategyPattern(String addressExpression) { -- Matcher m = strategyPattern.matcher(addressExpression); -- if (! m.matches()) { -- return false; -- } -- endpointName = """"; -- strategy = m.group(1); -- service = m.group(2); -- user = m.group(3); -- cell = m.group(4); -- instance = -1; -- return true; -- } -- -- private boolean trySetInstancePattern(String addressExpression) { -- Matcher m = instancePattern.matcher(addressExpression); -- if (! m.matches()) { -- return false; -- } -- endpointName = """"; -- instance = Integer.parseInt(m.group(1)); -- service = m.group(2); -- user = m.group(3); -- cell = m.group(4); -- strategy = """"; -- return true; -- } -- -- private boolean trySetEndpointStrategyPattern(String addressExperssion) { -- Matcher m = endpointStrategyPattern.matcher(addressExperssion); -- if (! m.matches()) { -- return false; -- } -- endpointName = m.group(1); -- strategy = m.group(2); -- service = m.group(3); -- user = m.group(4); -- cell = m.group(5); -- instance = -1; -- return true; -- } -- -- } -- -- /** -- * Constructor, to be called from the inner Dynamic class. -- * @param builder -- */ -- private ZkResolver(Builder builder, ZkObjectHandler.Client zkGetter) { -- this.strategies = builder.getStrategies(); -- this.zkGetter = zkGetter; -- zkGetter.registerListener(this); -- } -- -- -- @Override -- public List resolve(String addressExpression) throws CloudnameException { -- Parameters parameters = new Parameters(addressExpression); -- // TODO(borud): add some comments on the decision logic. I'm -- // not sure I am too fond of the check for negative values to -- // have some particular semantics. That smells like a problem -- // waiting to happen. -- -- ZooKeeper localZkPointer = zkGetter.getZookeeper(); -- if (localZkPointer == null) { -- throw new CloudnameException(""No connection to ZooKeeper.""); -- } -- List instances = resolveInstances(parameters, localZkPointer); -- -- List endpoints = new ArrayList(); -- for (Integer instance : instances) { -- String statusPath = ZkCoordinatePath.getStatusPath( -- parameters.getCell(), parameters.getUser(), -- parameters.getService(), instance); -- -- try { -- if (! Util.exist(localZkPointer, statusPath)) { -- continue; -- } -- } catch (InterruptedException e) { -- throw new CloudnameException(e); -- -- } -- final ZkCoordinateData zkCoordinateData = ZkCoordinateData.loadCoordinateData( -- statusPath, localZkPointer, null); -- addEndpoints(zkCoordinateData.snapshot(), endpoints, parameters.getEndpointName()); -- -- } -- if (parameters.getStrategy().equals("""")) { -- return endpoints; -- } -- ResolverStrategy strategy = strategies.get(parameters.getStrategy()); -- return strategy.order(strategy.filter(endpoints)); -- } -- -- @Override -- public void removeResolverListener(final ResolverListener listener) { -- synchronized (dynamicAddressMonitor) { -- DynamicExpression expression = dynamicAddressesByListener.remove(listener); -- if (expression == null) { -- throw new IllegalArgumentException(""Do not have the listener in my list.""); -- } -- expression.stop(); -- } -- log.fine(""Removed listener.""); -- } -- -- /** -- * The implementation does filter while listing out nodes. In this way paths that are not of -- * interest are not traversed. -- * @param filter class for filtering out endpoints -- * @return the endpoints that passes the filter -- */ -- @Override -- public Set getEndpoints(final Resolver.CoordinateDataFilter filter) -- throws CloudnameException, InterruptedException { -- -- final Set endpointsIncluded = new HashSet(); -- final String cellPath = ZkCoordinatePath.getCloudnameRoot(); -- final ZooKeeper zk = zkGetter.getZookeeper(); -- try { -- final List cells = zk.getChildren(cellPath, false); -- for (final String cell : cells) { -- if (! filter.includeCell(cell)) { -- continue; -- } -- final String userPath = cellPath + ""/"" + cell; -- final List users = zk.getChildren(userPath, false); -- -- for (final String user : users) { -- if (! filter.includeUser(user)) { -- continue; -- } -- final String servicePath = userPath + ""/"" + user; -- final List services = zk.getChildren(servicePath, false); -- -- for (final String service : services) { -- if (! filter.includeService(service)) { -- continue; -- } -- final String instancePath = servicePath + ""/"" + service; -- final List instances = zk.getChildren(instancePath, false); -- -- for (final String instance : instances) { -- final String statusPath; -- try { -- statusPath = ZkCoordinatePath.getStatusPath( -- cell, user, service, Integer.parseInt(instance)); -- } catch (NumberFormatException e) { -- log.log( -- Level.WARNING, -- ""Got non-number as instance in cn path: "" + instancePath + ""/"" -- + instance + "" skipping."", -- e); -- continue; -- } -- -- ZkCoordinateData zkCoordinateData = null; -- try { -- zkCoordinateData = ZkCoordinateData.loadCoordinateData( -- statusPath, zk, null); -- } catch (CloudnameException e) { -- // This is ok, an unclaimed node will not have status data, we -- // ignore it even though there might also be other exception -- // (this should be rare). The advantage is that we don't need to -- // check if the node exists and hence reduce the load on zookeeper. -- continue; -- } -- final Set endpoints = zkCoordinateData.snapshot().getEndpoints(); -- for (final Endpoint endpoint : endpoints) { -- if (filter.includeEndpointname(endpoint.getName())) { -- if (filter.includeServiceState( -- zkCoordinateData.snapshot().getServiceStatus().getState())) { -- endpointsIncluded.add(endpoint); -- } -- } -- } -- } -- } -- } -- } -- } catch (KeeperException e) { -- throw new CloudnameException(e); -- } -- return endpointsIncluded; -- } -- -- @Override -- public void addResolverListener(String expression, ResolverListener listener) -- throws CloudnameException { -- final DynamicExpression dynamicExpression = -- new DynamicExpression(expression, listener, this, zkGetter); -- -- synchronized (dynamicAddressMonitor) { -- DynamicExpression previousExpression = dynamicAddressesByListener.put( -- listener, dynamicExpression); -- if (previousExpression != null) { -- throw new IllegalArgumentException(""It is not legal to register a listener twice.""); -- } -- } -- dynamicExpression.start(); -- } -- -- public static void addEndpoints( -- ZkCoordinateData.Snapshot statusAndEndpoints, List endpoints, -- String endpointname) { -- if (statusAndEndpoints.getServiceStatus().getState() != ServiceState.RUNNING) { -- return; -- } -- if (endpointname.equals("""")) { -- statusAndEndpoints.appendAllEndpoints(endpoints); -- } else { -- Endpoint e = statusAndEndpoints.getEndpoint(endpointname); -- if (e != null) { -- endpoints.add(e); -- } -- } -- } -- -- private List resolveInstances(Parameters parameters, ZooKeeper zk) -- throws CloudnameException { -- List instances = new ArrayList(); -- if (parameters.getInstance() > -1) { -- instances.add(parameters.getInstance()); -- } else { -- try { -- instances = getInstances(zk, -- ZkCoordinatePath.coordinateWithoutInstanceAsPath(parameters.getCell(), -- parameters.getUser(), parameters.getService())); -- } catch (InterruptedException e) { -- throw new CloudnameException(e); -- } -- } -- return instances; -- } -- -- private List getInstances(ZooKeeper zk, String path) -- throws CloudnameException, InterruptedException { -- List paths = new ArrayList(); -- try { -- List children = zk.getChildren(path, false /* watcher */); -- for (String child : children) { -- paths.add(Integer.parseInt(child)); -- } -- } catch (KeeperException e) { -- throw new CloudnameException(e); -- } -- return paths; -- } --} -diff --git a/cn/src/main/java/org/cloudname/zk/ZkServiceHandle.java b/cn/src/main/java/org/cloudname/zk/ZkServiceHandle.java -deleted file mode 100644 -index eb8fb837..00000000 ---- a/cn/src/main/java/org/cloudname/zk/ZkServiceHandle.java -+++ /dev/null -@@ -1,116 +0,0 @@ --package org.cloudname.zk; -- --import org.cloudname.*; -- --import java.util.ArrayList; --import java.util.concurrent.CountDownLatch; --import java.util.concurrent.TimeUnit; --import java.util.logging.Logger; -- --import java.util.List; -- --/** -- * A service handle implementation. It does not have a lot of logic, it wraps ClaimedCoordinate, and -- * handles some config logic. -- * -- * @author borud -- */ --public class ZkServiceHandle implements ServiceHandle { -- private final ClaimedCoordinate claimedCoordinate; -- private static final Logger LOG = Logger.getLogger(ZkServiceHandle.class.getName()); -- -- private final ZkObjectHandler.Client zkClient; -- -- private final Coordinate coordinate; -- -- /** -- * Create a ZkServiceHandle for a given coordinate. -- * -- * @param claimedCoordinate the claimed coordinate for this service handle. -- */ -- public ZkServiceHandle( -- ClaimedCoordinate claimedCoordinate, Coordinate coordinate, -- ZkObjectHandler.Client zkClient) { -- this.claimedCoordinate = claimedCoordinate; -- this.coordinate = coordinate; -- this.zkClient = zkClient; -- } -- -- -- @Override -- public boolean waitForCoordinateOkSeconds(int seconds) throws InterruptedException { -- final CountDownLatch latch = new CountDownLatch(1); -- -- CoordinateListener listner = new CoordinateListener() { -- -- @Override -- public void onCoordinateEvent(Event event, String message) { -- if (event == Event.COORDINATE_OK) { -- latch.countDown(); -- } -- } -- }; -- registerCoordinateListener(listner); -- boolean result = latch.await(seconds, TimeUnit.SECONDS); -- claimedCoordinate.deregisterCoordinateListener(listner); -- return result; -- } -- -- -- @Override -- public void setStatus(ServiceStatus status) -- throws CoordinateMissingException, CloudnameException { -- claimedCoordinate.updateStatus(status); -- } -- -- @Override -- public void putEndpoints(List endpoints) -- throws CoordinateMissingException, CloudnameException { -- claimedCoordinate.putEndpoints(endpoints); -- } -- -- @Override -- public void putEndpoint(Endpoint endpoint) -- throws CoordinateMissingException, CloudnameException { -- List endpoints = new ArrayList(); -- endpoints.add(endpoint); -- putEndpoints(endpoints); -- } -- -- @Override -- public void removeEndpoints(List names) -- throws CoordinateMissingException, CloudnameException { -- claimedCoordinate.removeEndpoints(names); -- } -- -- @Override -- public void removeEndpoint(String name) -- throws CoordinateMissingException, CloudnameException { -- List names = new ArrayList(); -- names.add(name); -- removeEndpoints(names); -- } -- -- @Override -- public void registerConfigListener(ConfigListener listener) { -- TrackedConfig trackedConfig = new TrackedConfig( -- ZkCoordinatePath.getConfigPath(coordinate, null), listener, zkClient); -- claimedCoordinate.registerTrackedConfig(trackedConfig); -- trackedConfig.start(); -- } -- -- @Override -- public void registerCoordinateListener(CoordinateListener listener) { -- claimedCoordinate.registerCoordinateListener(listener); -- } -- -- @Override -- public void close() throws CloudnameException { -- claimedCoordinate.releaseClaim(); -- } -- -- @Override -- public String toString() { -- return ""Claimed coordinate instance: ""+ claimedCoordinate.toString(); -- } --} -diff --git a/cn/src/main/java/org/cloudname/zk/ZkTool.java b/cn/src/main/java/org/cloudname/zk/ZkTool.java -deleted file mode 100644 -index 7be0f8cf..00000000 ---- a/cn/src/main/java/org/cloudname/zk/ZkTool.java -+++ /dev/null -@@ -1,359 +0,0 @@ --package org.cloudname.zk; -- --import org.apache.log4j.BasicConfigurator; --import org.apache.log4j.ConsoleAppender; --import org.apache.log4j.Level; --import org.apache.log4j.PatternLayout; --import org.cloudname.*; --import org.cloudname.Resolver.ResolverListener; --import org.cloudname.flags.Flag; --import org.cloudname.flags.Flags; --import java.io.BufferedReader; --import java.io.FileNotFoundException; --import java.io.FileReader; --import java.io.IOException; --import java.io.InputStreamReader; --import java.util.ArrayList; --import java.util.List; --import java.util.regex.Matcher; --import java.util.regex.Pattern; -- -- --/** -- * Command line tool for using the Cloudname library. Run with -- * --help option to see available flags. -- * -- * @author dybdahl -- */ --public final class ZkTool { -- @Flag(name=""zookeeper"", description=""A list of host:port for connecting to ZooKeeper."") -- private static String zooKeeperFlag = null; -- -- @Flag(name=""coordinate"", description=""The coordinate to work on."") -- private static String coordinateFlag = null; -- -- @Flag(name=""operation"", options = Operation.class, -- description = ""The operation to do on coordinate."") -- private static Operation operationFlag = Operation.STATUS; -- -- @Flag(name = ""setup-file"", -- description = ""Path to file containing a list of coordinates to create (1 coordinate per line)."") -- private static String filePath = null; -- -- @Flag(name = ""config"", -- description = ""New config if setting new config."") -- private static String configFlag = """"; -- -- @Flag(name = ""resolver-expression"", -- description = ""The resolver expression to listen to events for."") -- private static String resolverExpression = null; -- -- @Flag(name = ""list"", -- description = ""Print the coordinates in ZooKeeper."") -- private static Boolean listFlag = null; -- -- /** -- * List of flag names for flags that select which action the tool should -- * perform. These flags are mutually exclusive. -- */ -- private static String actionSelectingFlagNames = -- ""--setup-file, --resolver, --coordinate, --list""; -- -- /** -- * The possible operations to do on a coordinate. -- */ -- public enum Operation { -- /** -- * Create a new coordinate. -- */ -- CREATE, -- /** -- * Delete a coordinate. -- */ -- DELETE, -- /** -- * Print out some status about a coordinate. -- */ -- STATUS, -- /** -- * Print the host of a coordinate. -- */ -- HOST, -- /** -- * Set config -- */ -- SET_CONFIG, -- /** -- * Read config -- */ -- READ_CONFIG; -- } -- -- /** -- * Matches coordinate of type: cell.user.service.instance.config. -- */ -- public static final Pattern instanceConfigPattern -- = Pattern.compile(""\\/cn\\/([a-z][a-z-_]*)\\/"" // cell -- + ""([a-z][a-z0-9-_]*)\\/"" // user -- + ""([a-z][a-z0-9-_]*)\\/"" // service -- + ""(\\d+)\\/config\\z""); // instance -- -- private static ZkCloudname cloudname = null; -- -- public static void main(final String[] args) { -- -- // Disable log system, we want full control over what is sent to console. -- final ConsoleAppender consoleAppender = new ConsoleAppender(); -- consoleAppender.activateOptions(); -- consoleAppender.setLayout(new PatternLayout(""%p %t %C:%M %m%n"")); -- consoleAppender.setThreshold(Level.OFF); -- BasicConfigurator.configure(consoleAppender); -- -- // Parse the flags. -- Flags flags = new Flags() -- .loadOpts(ZkTool.class) -- .parse(args); -- -- // Check if we wish to print out help text -- if (flags.helpFlagged()) { -- flags.printHelp(System.out); -- System.out.println(""Must specify one of the following options:""); -- System.out.println(actionSelectingFlagNames); -- return; -- } -- -- checkArgumentCombinationValid(flags); -- -- ZkCloudname.Builder builder = new ZkCloudname.Builder(); -- if (zooKeeperFlag == null) { -- builder.setDefaultConnectString(); -- } else { -- builder.setConnectString(zooKeeperFlag); -- } -- try { -- cloudname = builder.build().connect(); -- } catch (CloudnameException e) { -- System.err.println(""Could not connect to zookeeper "" + e.getMessage()); -- return; -- } -- -- try { -- if (filePath != null) { -- handleFilepath(); -- } else if (coordinateFlag != null) { -- handleCoordinateOperation(); -- } else if (resolverExpression != null) { -- handleResolverExpression(); -- } else if (listFlag) { -- listCoordinates(); -- } else { -- System.err.println(""No action specified""); -- } -- } catch (Exception e) { -- System.err.println(""An error occurred: "" + e.getMessage()); -- e.printStackTrace(); -- } finally { -- cloudname.close(); -- } -- } -- -- private static void checkArgumentCombinationValid(final Flags flags) { -- int actionSelectedCount = 0; -- final Object[] actionSelectingFlags = { -- filePath, coordinateFlag, resolverExpression, listFlag -- }; -- for (Object flag: actionSelectingFlags) { -- if (flag != null) { -- actionSelectedCount++; -- } -- } -- if (actionSelectedCount != 1) { -- System.err.println(""Must specify exactly one of the following options:""); -- System.err.println(actionSelectingFlagNames); -- flags.printHelp(System.err); -- System.exit(1); -- } -- } -- -- private static void handleResolverExpression() { -- final Resolver resolver = cloudname.getResolver(); -- try { -- System.out.println(""Added a resolver listener for expression: "" + resolverExpression + "". Will print out all events for the given expression.""); -- resolver.addResolverListener(resolverExpression, new ResolverListener() { -- @Override -- public void endpointEvent(Event event, Endpoint endpoint) { -- System.out.println(""Received event: "" + event + "" for endpoint: "" + endpoint); -- } -- }); -- } catch (CloudnameException e) { -- System.err.println(""Problem with cloudname: "" + e.getMessage()); -- } -- final BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); -- while(true) { -- System.out.println(""Press enter to exit""); -- String s = null; -- try { -- s = br.readLine(); -- } catch (IOException e) { -- e.printStackTrace(); -- } -- if (s.length() == 0) { -- System.out.println(""Exiting""); -- System.exit(0); -- } -- } -- } -- -- private static void handleCoordinateOperation() { -- final Resolver resolver = cloudname.getResolver(); -- final Coordinate coordinate = Coordinate.parse(coordinateFlag); -- switch (operationFlag) { -- case CREATE: -- try { -- cloudname.createCoordinate(coordinate); -- } catch (CloudnameException e) { -- System.err.println(""Got error: "" + e.getMessage()); -- break; -- } catch (CoordinateExistsException e) { -- e.printStackTrace(); -- break; -- } -- System.err.println(""Created coordinate.""); -- break; -- case DELETE: -- try { -- cloudname.destroyCoordinate(coordinate); -- } catch (CoordinateDeletionException e) { -- System.err.println(""Got error: "" + e.getMessage()); -- return; -- } catch (CoordinateMissingException e) { -- System.err.println(""Got error: "" + e.getMessage()); -- break; -- } catch (CloudnameException e) { -- System.err.println(""Got error: "" + e.getMessage()); -- break; -- } -- System.err.println(""Deleted coordinate.""); -- break; -- case STATUS: { -- ServiceStatus status; -- try { -- status = cloudname.getStatus(coordinate); -- } catch (CloudnameException e) { -- System.err.println(""Problems loading status, is service running? Error:\n"" + e.getMessage()); -- break; -- } -- System.err.println(""Status:\n"" + status.getState().toString() + "" "" + status.getMessage()); -- List endpoints = null; -- try { -- endpoints = resolver.resolve(""all."" + coordinate.getService() -- + ""."" + coordinate.getUser() + ""."" + coordinate.getCell()); -- } catch (CloudnameException e) { -- System.err.println(""Got error: "" + e.getMessage()); -- break; -- } -- System.err.println(""Endpoints:""); -- for (Endpoint endpoint : endpoints) { -- if (endpoint.getCoordinate().getInstance() == coordinate.getInstance()) { -- System.err.println(endpoint.getName() + ""-->"" + endpoint.getHost() + "":"" + endpoint.getPort() -- + "" protocol:"" + endpoint.getProtocol()); -- System.err.println(""Endpoint data:\n"" + endpoint.getEndpointData()); -- } -- } -- break; -- } -- case HOST: { -- List endpoints = null; -- try { -- endpoints = resolver.resolve(coordinate.asString()); -- } catch (CloudnameException e) { -- System.err.println(""Could not resolve "" + coordinate.asString() + "" Error:\n"" + e.getMessage()); -- break; -- } -- for (Endpoint endpoint : endpoints) { -- System.out.println(""Host: "" + endpoint.getHost()); -- } -- } -- break; -- case SET_CONFIG: -- try { -- cloudname.setConfig(coordinate, configFlag, null); -- } catch (CloudnameException e) { -- System.err.println(""Got error: "" + e.getMessage()); -- break; -- -- } catch (CoordinateMissingException e) { -- System.err.println(""Non-existing coordinate.""); -- } -- System.err.println(""Config updated.""); -- break; -- -- case READ_CONFIG: -- try { -- System.out.println(""Config is:"" + cloudname.getConfig(coordinate)); -- } catch (CoordinateMissingException e) { -- System.err.println(""Non-existing coordinate.""); -- } catch (CloudnameException e) { -- System.err.println(""Problem with cloudname: "" + e.getMessage()); -- } -- break; -- default: -- System.out.println(""Unknown command "" + operationFlag); -- } -- } -- -- private static void listCoordinates() { -- try { -- final List nodeList = new ArrayList(); -- cloudname.listRecursively(nodeList); -- for (final String node : nodeList) { -- final Matcher m = instanceConfigPattern.matcher(node); -- -- /* -- * We only parse config paths, and we convert these to -- * Cloudname coordinates to not confuse the user. -- */ -- if (m.matches()) { -- System.out.printf(""%s.%s.%s.%s\n"", -- m.group(4), m.group(3), m.group(2), m.group(1)); -- } -- } -- } catch (final CloudnameException e) { -- System.err.println(""Got error: "" + e.getMessage()); -- } catch (final InterruptedException e) { -- System.err.println(""Got error: "" + e.getMessage()); -- } -- } -- -- private static void handleFilepath() { -- final BufferedReader br; -- try { -- br = new BufferedReader(new FileReader(filePath)); -- } catch (FileNotFoundException e) { -- throw new RuntimeException(""File not found: "" + filePath, e); -- } -- String line; -- try { -- while ((line = br.readLine()) != null) { -- try { -- cloudname.createCoordinate(Coordinate.parse(line)); -- System.out.println(""Created "" + line); -- } catch (Exception e) { -- System.err.println(""Could not create: "" + line + ""Got error: "" + e.getMessage()); -- } -- } -- } catch (IOException e) { -- throw new RuntimeException(""Failed to read coordinate from file. "" + e.getMessage(), e); -- } finally { -- cloudname.close(); -- try { -- br.close(); -- } catch (IOException e) { -- System.err.println(""Failed while trying to close file reader. "" + e.getMessage()); -- } -- } -- } -- -- // Should not be instantiated. -- private ZkTool() {} --} -diff --git a/cn/src/test/java/org/cloudname/CoordinateTest.java b/cn/src/test/java/org/cloudname/CoordinateTest.java -deleted file mode 100644 -index 42ee09cc..00000000 ---- a/cn/src/test/java/org/cloudname/CoordinateTest.java -+++ /dev/null -@@ -1,74 +0,0 @@ --package org.cloudname; -- --import org.junit.*; --import static org.junit.Assert.*; -- --/** -- * Unit tests for Coordinate. -- * -- * @author borud -- */ --public class CoordinateTest { -- @Test -- public void testSimple() throws Exception { -- Coordinate c = Coordinate.parse(""1.service.user.cell""); -- assertNotNull(c); -- assertEquals(1, c.getInstance()); -- assertEquals(""service"", c.getService()); -- assertEquals(""user"", c.getUser()); -- assertEquals(""cell"", c.getCell()); -- } -- -- @Test (expected = IllegalArgumentException.class) -- public void testInvalidInstanceNumber() throws Exception { -- new Coordinate(-1, ""service"", ""user"", ""cell""); -- } -- -- @Test -- public void testEquals() throws Exception { -- assertEquals( -- new Coordinate(1,""foo"", ""bar"", ""baz""), -- new Coordinate(1, ""foo"", ""bar"", ""baz"") -- ); -- } -- -- @Test -- public void testSymmetry() throws Exception { -- String s = ""0.fooservice.baruser.bazcell""; -- assertEquals(s, Coordinate.parse(s).asString()); -- assertEquals(s, new Coordinate(0, -- ""fooservice"", -- ""baruser"", -- ""bazcell"").asString()); -- -- System.out.println(Coordinate.parse(s)); -- } -- -- @Test (expected = IllegalArgumentException.class) -- public void testInvalidInstance() throws Exception { -- Coordinate.parse(""invalid.service.user.cell""); -- } -- -- @Test (expected = IllegalArgumentException.class) -- public void testInvalidCharacters() throws Exception { -- Coordinate.parse(""0.ser!vice.user.cell""); -- } -- -- @Test -- public void testLegalCharacters() throws Exception { -- Coordinate.parse(""0.service-test.user.cell""); -- Coordinate.parse(""0.service_test.user.cell""); -- Coordinate.parse(""0.service.user-foo.cell""); -- Coordinate.parse(""0.service.user_foo.ce_ll""); -- } -- -- @Test (expected = IllegalArgumentException.class) -- public void testRequireStartsWithLetter() throws Exception { -- Coordinate.parse(""0._aaa._bbb._ccc""); -- } -- -- @Test (expected = IllegalArgumentException.class) -- public void testIllegalArgumentsConstructor() throws Exception { -- new Coordinate(1, ""service"", ""_user"", ""cell""); -- } --} -\ No newline at end of file -diff --git a/cn/src/test/java/org/cloudname/EndpointTest.java b/cn/src/test/java/org/cloudname/EndpointTest.java -deleted file mode 100644 -index 0769ac51..00000000 ---- a/cn/src/test/java/org/cloudname/EndpointTest.java -+++ /dev/null -@@ -1,31 +0,0 @@ --package org.cloudname; -- --import org.junit.*; --import static org.junit.Assert.*; -- --/** -- * Unit tests for Endpoint. -- * -- * @author borud -- */ --public class EndpointTest { -- @Test -- public void testSimple() throws Exception { -- Endpoint endpoint = new Endpoint(Coordinate.parse(""1.foo.bar.zot""), -- ""rest-api"", -- ""somehost"", -- 4711, -- ""http"", -- null); -- String json = endpoint.toJson(); -- Endpoint endpoint2 = Endpoint.fromJson(json); -- -- assertEquals(endpoint.getCoordinate(), endpoint2.getCoordinate()); -- assertEquals(endpoint.getName(), endpoint2.getName()); -- assertEquals(endpoint.getHost(), endpoint2.getHost()); -- assertEquals(endpoint.getPort(), endpoint2.getPort()); -- assertEquals(endpoint.getEndpointData(), endpoint2.getEndpointData()); -- -- System.out.println(json); -- } --} -\ No newline at end of file -diff --git a/cn/src/test/java/org/cloudname/ServiceStatusTest.java b/cn/src/test/java/org/cloudname/ServiceStatusTest.java -deleted file mode 100644 -index ec7fe5b3..00000000 ---- a/cn/src/test/java/org/cloudname/ServiceStatusTest.java -+++ /dev/null -@@ -1,24 +0,0 @@ --package org.cloudname; -- --import org.junit.*; --import static org.junit.Assert.*; -- --/** -- * Unit tests for ServiceStatus. -- * -- * @author borud -- */ --public class ServiceStatusTest { -- @Test -- public void testSimple() throws Exception { -- ServiceStatus status = new ServiceStatus(ServiceState.STARTING, -- ""Loading hamster into wheel""); -- String json = status.toJson(); -- assertNotNull(json); -- -- ServiceStatus status2 = ServiceStatus.fromJson(json); -- -- assertEquals(status.getMessage(), status2.getMessage()); -- assertSame(status.getState(), status2.getState()); -- } --} -\ No newline at end of file -diff --git a/cn/src/test/java/org/cloudname/StrategyAnyTest.java b/cn/src/test/java/org/cloudname/StrategyAnyTest.java -deleted file mode 100644 -index d6f56f54..00000000 ---- a/cn/src/test/java/org/cloudname/StrategyAnyTest.java -+++ /dev/null -@@ -1,88 +0,0 @@ --package org.cloudname; -- --import static org.hamcrest.Matchers.is; --import static org.hamcrest.Matchers.lessThan; --import org.junit.Before; --import org.junit.Test; -- --import java.util.ArrayList; --import java.util.List; -- --import static org.junit.Assert.assertThat; --import static org.junit.Assert.assertTrue; -- -- --/** -- * Unit tests for StrategyAny. -- * @author dybdahl -- */ --public class StrategyAnyTest { -- private List endpoints; -- -- /** -- * Adds a list endpoints with even instance number to the endpoints list. -- */ -- @Before -- public void setup() { -- endpoints = new ArrayList(); -- // Only even instance numbers. -- for (int i = 0; i < 100; i+= 2) { -- endpoints.add(new Endpoint(Coordinate.parse(String.valueOf(i) + "".foo.bar.zot""), -- ""rest-api"", -- ""somehost"", -- 4711, -- ""http"", -- null)); -- } -- } -- -- /** -- * Different clients should have different lists. -- */ -- @Test -- public void testDifferentLists() { -- StrategyAny strategyAny = new StrategyAny(); -- -- List sortedResult = strategyAny.order(new ArrayList(endpoints)); -- -- // Try with up tp 150 clients, if they all have the same first element, something is wrong. -- // In each iteration there is 1/50 probability for this. For 150 runs, the probability for -- // false negative is 1,42724769 × 10^-255 (e.g. zero). -- for (int z = 0; z < 150; z++) { -- StrategyAny strategyAny2 = new StrategyAny(); -- List sortedResult2 = strategyAny2.order(new ArrayList(endpoints)); -- if (sortedResult.get(0).getCoordinate().getInstance() != -- sortedResult2.get(0).getCoordinate().getInstance()) { -- return; -- } -- } -- assertTrue(false); -- } -- -- /** -- * Test that insertion does only create a new first element now and then. -- */ -- @Test -- public void testInsertions() { -- StrategyAny strategyAny = new StrategyAny(); -- -- List sortedResult = strategyAny.order(new ArrayList(endpoints)); -- int newFrontEndpoint = 0; -- for (int c = 1; c < 30; c +=2) { -- int headInstance = sortedResult.get(0).getCoordinate().getInstance(); -- sortedResult.add(new Endpoint(Coordinate.parse(String.valueOf(c) + "".foo.bar.zot""), -- ""rest-api"", -- ""somehost"", -- 4711, -- ""http"", -- null)); -- sortedResult = strategyAny.order(sortedResult); -- if (headInstance != sortedResult.get(0).getCoordinate().getInstance()) { -- ++newFrontEndpoint; -- } -- } -- // For each insertion it a probability of less than 1/50 that front element is changed. The probability -- // that more than 10 front elements are changed should be close to zero. -- assertThat(newFrontEndpoint, is(lessThan(10))); -- } --} -diff --git a/cn/src/test/java/org/cloudname/zk/ZkCloudnameTest.java b/cn/src/test/java/org/cloudname/zk/ZkCloudnameTest.java -deleted file mode 100644 -index 5bccc3f6..00000000 ---- a/cn/src/test/java/org/cloudname/zk/ZkCloudnameTest.java -+++ /dev/null -@@ -1,324 +0,0 @@ --package org.cloudname.zk; -- --import org.cloudname.*; -- --import org.apache.zookeeper.WatchedEvent; --import org.apache.zookeeper.Watcher; --import org.apache.zookeeper.ZooKeeper; -- --import java.io.IOException; --import java.util.ArrayList; --import java.util.List; --import java.util.concurrent.*; -- --import org.junit.*; --import org.junit.rules.TemporaryFolder; --import static org.junit.Assert.*; --import static org.junit.Assert.assertTrue; -- --import org.cloudname.testtools.Net; --import org.cloudname.testtools.zookeeper.EmbeddedZooKeeper; -- --import java.io.File; --import java.util.logging.Logger; -- --/** -- * Unit test for the ZkCloudname class. -- * -- * @author borud, dybdahl -- */ --public class ZkCloudnameTest { -- private static final Logger LOG = Logger.getLogger(ZkCloudnameTest.class.getName()); -- -- private ZooKeeper zk; -- private int zkport; -- -- @Rule public TemporaryFolder temp = new TemporaryFolder(); -- -- /** -- * Set up an embedded ZooKeeper instance backed by a temporary -- * directory. The setup procedure also allocates a port that is -- * free for the ZooKeeper server so that you should be able to run -- * multiple instances of this test. -- */ -- @Before -- public void setup() throws Exception { -- File rootDir = temp.newFolder(""zk-test""); -- zkport = Net.getFreePort(); -- -- LOG.info(""EmbeddedZooKeeper rootDir="" + rootDir.getCanonicalPath() + "", port="" + zkport -- ); -- -- // Set up and initialize the embedded ZooKeeper -- final EmbeddedZooKeeper ezk = new EmbeddedZooKeeper(rootDir, zkport); -- ezk.init(); -- -- // Set up a zookeeper client that we can use for inspection -- final CountDownLatch connectedLatch = new CountDownLatch(1); -- -- zk = new ZooKeeper(""localhost:"" + zkport, 1000, new Watcher() { -- public void process(WatchedEvent event) { -- if (event.getState() == Event.KeeperState.SyncConnected) { -- connectedLatch.countDown(); -- } -- } -- }); -- connectedLatch.await(); -- -- LOG.info(""ZooKeeper port is "" + zkport); -- } -- -- @After -- public void tearDown() throws Exception { -- zk.close(); -- } -- -- /** -- * Tests that the time-out mechanism on connecting to ZooKeeper works. -- */ -- @Test -- public void testTimeout() throws IOException, InterruptedException { -- int deadPort = Net.getFreePort(); -- try { -- new ZkCloudname.Builder().setConnectString(""localhost:"" + deadPort).build() -- .connectWithTimeout(1000, TimeUnit.NANOSECONDS); -- fail(""Expected time-out exception.""); -- } catch (CloudnameException e) { -- // Expected. -- } -- } -- -- /** -- * A relatively simple voyage through a typical lifecycle. -- */ -- @Test -- public void testSimple() throws Exception { -- final Coordinate c = Coordinate.parse(""1.service.user.cell""); -- final ZkCloudname cn = makeLocalZkCloudname(); -- -- assertFalse(pathExists(""/cn/cell/user/service/1"")); -- cn.createCoordinate(c); -- -- // Coordinate should exist, but no status node -- assertTrue(pathExists(""/cn/cell/user/service/1"")); -- assertTrue(pathExists(""/cn/cell/user/service/1/config"")); -- assertFalse(pathExists(""/cn/cell/user/service/1/status"")); -- -- // Claiming the coordinate creates the status node -- final ServiceHandle handle = cn.claim(c); -- assertTrue(handle.waitForCoordinateOkSeconds(3)); -- assertNotNull(handle); -- final CountDownLatch latch = new CountDownLatch(1); -- handle.registerCoordinateListener(new CoordinateListener() { -- -- @Override -- public void onCoordinateEvent(Event event, String message) { -- if (event == Event.COORDINATE_OK) { -- latch.countDown(); -- } -- } -- }); -- assertTrue(latch.await(2, TimeUnit.SECONDS)); -- -- final CountDownLatch configLatch1 = new CountDownLatch(1); -- final CountDownLatch configLatch2 = new CountDownLatch(2); -- final StringBuilder buffer = new StringBuilder(); -- handle.registerConfigListener(new ConfigListener() { -- @Override -- public void onConfigEvent(Event event, String data) { -- buffer.append(data); -- configLatch1.countDown(); -- configLatch2.countDown(); -- } -- }); -- assertTrue(configLatch1.await(5, TimeUnit.SECONDS)); -- assertEquals(buffer.toString(), """"); -- zk.setData(""/cn/cell/user/service/1/config"", ""hello"".getBytes(), -1); -- assertTrue(configLatch2.await(5, TimeUnit.SECONDS)); -- assertEquals(buffer.toString(), ""hello""); -- -- assertTrue(pathExists(""/cn/cell/user/service/1/status"")); -- -- List nodes = new ArrayList(); -- cn.listRecursively(nodes); -- assertEquals(2, nodes.size()); -- assertEquals(nodes.get(0), ""/cn/cell/user/service/1/config""); -- assertEquals(nodes.get(1), ""/cn/cell/user/service/1/status""); -- -- // Try to set the status to something else -- String msg = ""Hamster getting quite eager now""; -- handle.setStatus(new ServiceStatus(ServiceState.STARTING,msg)); -- ServiceStatus status = cn.getStatus(c); -- assertEquals(msg, status.getMessage()); -- assertSame(ServiceState.STARTING, status.getState()); -- -- // Publish two endpoints -- handle.putEndpoint(new Endpoint(c, ""foo"", ""localhost"", 1234, ""http"", null)); -- handle.putEndpoint(new Endpoint(c, ""bar"", ""localhost"", 1235, ""http"", null)); -- -- handle.setStatus(new ServiceStatus(ServiceState.RUNNING, msg)); -- -- // Remove one of them -- handle.removeEndpoint(""bar""); -- -- List endpointList = cn.getResolver().resolve(""bar.1.service.user.cell""); -- assertEquals(0, endpointList.size()); -- -- endpointList = cn.getResolver().resolve(""foo.1.service.user.cell""); -- assertEquals(1, endpointList.size()); -- Endpoint endpointFoo = endpointList.get(0); -- -- String fooData = endpointFoo.getName(); -- assertEquals(""foo"", fooData); -- assertEquals(""foo"", endpointFoo.getName()); -- assertEquals(""localhost"", endpointFoo.getHost()); -- assertEquals(1234, endpointFoo.getPort()); -- assertEquals(""http"", endpointFoo.getProtocol()); -- assertNull(endpointFoo.getEndpointData()); -- -- // Close handle just invalidates handle -- handle.close(); -- -- // These nodes are ephemeral and will be cleaned out when we -- // call cn.releaseClaim(), but calling handle.releaseClaim() explicitly -- // cleans out the ephemeral nodes. -- assertFalse(pathExists(""/cn/cell/user/service/1/status"")); -- -- // Closing Cloudname instance disconnects the zk client -- // connection and thus should kill all ephemeral nodes. -- cn.close(); -- -- // But the coordinate and its persistent subnodes should -- assertTrue(pathExists(""/cn/cell/user/service/1"")); -- assertFalse(pathExists(""/cn/cell/user/service/1/endpoints"")); -- assertTrue(pathExists(""/cn/cell/user/service/1/config"")); -- } -- -- /** -- * Claim non-existing coordinate -- */ -- @Test -- public void testCoordinateNotFound() throws CloudnameException, InterruptedException { -- final Coordinate c = Coordinate.parse(""3.service.user.cell""); -- final Cloudname cn = makeLocalZkCloudname(); -- -- final ExecutorService executor = Executors.newCachedThreadPool(); -- final Callable task = new Callable() { -- public Object call() throws InterruptedException { -- return cn.claim(c); -- } -- }; -- final Future future = executor.submit(task); -- try { -- future.get(300, TimeUnit.MILLISECONDS); -- } catch (TimeoutException ex) { -- // handle the timeout -- LOG.info(""Got time out, nice!""); -- } catch (InterruptedException e) { -- fail(""Interrupted""); -- } catch (ExecutionException e) { -- fail(""Some error "" + e.getMessage()); -- // handle other exceptions -- } finally { -- future.cancel(true); -- } -- } -- -- /** -- * Try to claim coordinate twice -- */ -- @Test -- public void testDoubleClaim() throws CloudnameException, InterruptedException { -- final Coordinate c = Coordinate.parse(""2.service.user.cell""); -- final CountDownLatch okCounter = new CountDownLatch(1); -- final CountDownLatch failCounter = new CountDownLatch(1); -- -- final CoordinateListener listener = new CoordinateListener() { -- @Override -- public void onCoordinateEvent(Event event, String message) { -- switch (event) { -- case COORDINATE_OK: -- okCounter.countDown(); -- break; -- case NOT_OWNER: -- failCounter.countDown(); -- default: //Any other Event is unexpected. -- assert(false); -- break; -- } -- } -- }; -- final Cloudname cn; -- try { -- cn = makeLocalZkCloudname(); -- } catch (CloudnameException e) { -- fail(""connecting to localhost failed.""); -- return; -- } -- -- try { -- cn.createCoordinate(c); -- } catch (CoordinateExistsException e) { -- fail(""should not happen.""); -- } -- final ServiceHandle handle1 = cn.claim(c); -- assert(handle1.waitForCoordinateOkSeconds(4)); -- handle1.registerCoordinateListener(listener); -- ServiceHandle handle2 = cn.claim(c); -- assertFalse(handle2.waitForCoordinateOkSeconds(1)); -- handle2.registerCoordinateListener(listener); -- assert(okCounter.await(4, TimeUnit.SECONDS)); -- assert(failCounter.await(2, TimeUnit.SECONDS)); -- } -- -- -- @Test -- public void testDestroyBasic() throws Exception { -- final Coordinate c = Coordinate.parse(""1.service.user.cell""); -- final Cloudname cn = makeLocalZkCloudname(); -- cn.createCoordinate(c); -- assertTrue(pathExists(""/cn/cell/user/service/1/config"")); -- cn.destroyCoordinate(c); -- assertFalse(pathExists(""/cn/cell/user/service"")); -- assertTrue(pathExists(""/cn/cell/user"")); -- } -- -- @Test -- public void testDestroyTwoInstances() throws Exception { -- final Coordinate c1 = Coordinate.parse(""1.service.user.cell""); -- final Coordinate c2 = Coordinate.parse(""2.service.user.cell""); -- final Cloudname cn = makeLocalZkCloudname(); -- cn.createCoordinate(c1); -- cn.createCoordinate(c2); -- assertTrue(pathExists(""/cn/cell/user/service/1/config"")); -- assertTrue(pathExists(""/cn/cell/user/service/2/config"")); -- cn.destroyCoordinate(c1); -- assertFalse(pathExists(""/cn/cell/user/service/1"")); -- assertTrue(pathExists(""/cn/cell/user/service/2/config"")); -- } -- -- @Test -- public void testDestroyClaimed() throws Exception { -- final Coordinate c = Coordinate.parse(""1.service.user.cell""); -- final Cloudname cn = makeLocalZkCloudname(); -- cn.createCoordinate(c); -- ServiceHandle handle = cn.claim(c); -- handle.waitForCoordinateOkSeconds(1); -- try { -- cn.destroyCoordinate(c); -- fail(""Expected exception to happen""); -- } catch (CoordinateException e) { -- } -- } -- -- private boolean pathExists(String path) throws Exception { -- return (null != zk.exists(path, false)); -- } -- -- /** -- * Makes a local ZkCloudname instance with the port given by zkPort. -- */ -- private ZkCloudname makeLocalZkCloudname() throws CloudnameException { -- return new ZkCloudname.Builder().setConnectString(""localhost:"" + zkport).build().connect(); -- } --} -diff --git a/cn/src/test/java/org/cloudname/zk/ZkCoordinatePathTest.java b/cn/src/test/java/org/cloudname/zk/ZkCoordinatePathTest.java -deleted file mode 100644 -index 9e9f6dcf..00000000 ---- a/cn/src/test/java/org/cloudname/zk/ZkCoordinatePathTest.java -+++ /dev/null -@@ -1,31 +0,0 @@ --package org.cloudname.zk; -- --import org.cloudname.Coordinate; --import org.junit.Test; -- --import static org.junit.Assert.assertEquals; -- --/** -- * Unit tests for class ZkCoordinatePathTest. -- * @author dybdahl -- */ --public class ZkCoordinatePathTest { -- @Test -- public void testSimple() throws Exception { -- final Coordinate coordinate = new Coordinate( -- 42 /*instance*/, ""service"", ""user"", ""cell"", false /*validate*/); -- assertEquals(""/cn/cell/user/service/42/config"", -- ZkCoordinatePath.getConfigPath(coordinate, null)); -- assertEquals(""/cn/cell/user/service/42/config/name"", -- ZkCoordinatePath.getConfigPath(coordinate, ""name"")); -- assertEquals(""/cn/cell/user/service/42"", -- ZkCoordinatePath.getCoordinateRoot(coordinate)); -- assertEquals(""/cn/cell/user/service/42/status"", -- ZkCoordinatePath.getStatusPath(coordinate)); -- assertEquals(""/cn/cell/user/service"", -- ZkCoordinatePath.coordinateWithoutInstanceAsPath( -- ""cell"", ""user"", ""service"")); -- assertEquals(""/cn/cell/user/service/42/status"", -- ZkCoordinatePath.getStatusPath(""cell"", ""user"", ""service"", 42)); -- } --} -diff --git a/cn/src/test/java/org/cloudname/zk/ZkResolverTest.java b/cn/src/test/java/org/cloudname/zk/ZkResolverTest.java -deleted file mode 100644 -index 3dd868f6..00000000 ---- a/cn/src/test/java/org/cloudname/zk/ZkResolverTest.java -+++ /dev/null -@@ -1,134 +0,0 @@ --package org.cloudname.zk; -- --import org.cloudname.*; --import org.junit.Before; --import org.junit.Rule; --import org.junit.Test; --import org.junit.rules.TemporaryFolder; -- --import static org.junit.Assert.*; -- -- --/** -- * This class contains the unit tests for the ZkResolver class. -- * -- * TODO(borud): add tests for when the input is a coordinate. -- * -- * @author borud -- */ --public class ZkResolverTest { -- private Resolver resolver; -- -- @Rule -- public TemporaryFolder temp = new TemporaryFolder(); -- -- /** -- * Set up an embedded ZooKeeper instance backed by a temporary -- * directory. The setup procedure also allocates a port that is -- * free for the ZooKeeper server so that you should be able to run -- * multiple instances of this test. -- */ -- @Before -- public void setup() throws Exception { -- resolver = new ZkResolver.Builder() -- .addStrategy(new StrategyAll()) -- .addStrategy(new StrategyAny()) -- .build(new ZkObjectHandler(null).getClient()); -- } -- -- // Valid endpoints. -- public static final String[] validEndpointPatterns = new String[] { -- ""http.1.service.user.cell"", -- ""foo-bar.3245.service.user.cell"", -- ""foo_bar.3245.service.user.cell"", -- }; -- -- // Valid strategy. -- public static final String[] validStrategyPatterns = new String[] { -- ""any.service.user.cell"", -- ""all.service.user.cell"", -- ""somestrategy.service.user.cell"", -- }; -- -- // Valid endpoint strategy. -- public static final String[] validEndpointStrategyPatterns = new String[] { -- ""http.any.service.user.cell"", -- ""thrift.all.service.user.cell"", -- ""some-endpoint.somestrategy.service.user.cell"", -- }; -- -- @Test(expected=IllegalArgumentException.class) -- public void testRegisterSameListenerTwice() throws Exception { -- Resolver.ResolverListener resolverListener = new Resolver.ResolverListener() { -- @Override -- public void endpointEvent(Event event, Endpoint endpoint) { -- -- } -- }; -- resolver.addResolverListener(""foo.all.service.user.cell"", resolverListener); -- resolver.addResolverListener(""bar.all.service.user.cell"", resolverListener); -- } -- -- @Test -- public void testEndpointPatterns() throws Exception { -- // Test input that should match -- for (String s : validEndpointPatterns) { -- assertTrue(""Didn't match '"" + s + ""'"", -- ZkResolver.endpointPattern.matcher(s).matches()); -- } -- -- // Test input that should not match -- for (String s : validStrategyPatterns) { -- assertFalse(""Matched '"" + s + ""'"", -- ZkResolver.endpointPattern.matcher(s).matches()); -- } -- -- // Test input that should not match -- for (String s : validEndpointStrategyPatterns) { -- assertFalse(""Matched '"" + s + ""'"", -- ZkResolver.endpointPattern.matcher(s).matches()); -- } -- } -- -- @Test -- public void testStrategyPatterns() throws Exception { -- // Test input that should match -- for (String s : validStrategyPatterns) { -- assertTrue(""Didn't match '"" + s + ""'"", -- ZkResolver.strategyPattern.matcher(s).matches()); -- } -- -- // Test input that should not match -- for (String s : validEndpointPatterns) { -- assertFalse(""Matched '"" + s + ""'"", -- ZkResolver.strategyPattern.matcher(s).matches()); -- } -- // Test input that should not match -- for (String s : validEndpointStrategyPatterns) { -- assertFalse(""Matched '"" + s + ""'"", -- ZkResolver.endpointPattern.matcher(s).matches()); -- } -- } -- -- @Test -- public void testEndpointStrategyPatterns() throws Exception { -- // Test input that should match -- for (String s : validEndpointStrategyPatterns) { -- assertTrue(""Didn't match '"" + s + ""'"", -- ZkResolver.endpointStrategyPattern.matcher(s).matches()); -- } -- -- // Test input that should not match -- for (String s : validStrategyPatterns) { -- assertFalse(""Matched '"" + s + ""'"", -- ZkResolver.endpointStrategyPattern.matcher(s).matches()); -- } -- -- -- // Test input that should not match -- for (String s : validEndpointPatterns) { -- assertFalse(""Matched '"" + s + ""'"", -- ZkResolver.endpointStrategyPattern.matcher(s).matches()); -- } -- } --} -\ No newline at end of file -diff --git a/flags/src/test/java/org/cloudname/flags/FlagsTest.java b/flags/src/test/java/org/cloudname/flags/FlagsTest.java -index 7db55632..4cc3aa75 100644 ---- a/flags/src/test/java/org/cloudname/flags/FlagsTest.java -+++ b/flags/src/test/java/org/cloudname/flags/FlagsTest.java -@@ -4,7 +4,7 @@ - import java.io.File; - import java.io.FileOutputStream; - import javax.annotation.PostConstruct; --import junit.framework.Assert; -+import org.junit.Assert; - import org.junit.Rule; - import org.junit.Test; - import org.junit.rules.ExpectedException; -diff --git a/pom.xml b/pom.xml -index bde06f53..19b35348 100644 ---- a/pom.xml -+++ b/pom.xml -@@ -39,7 +39,7 @@ - 2.9.0 - 2.1.1 - 4.11 -- 4.0.32.Final -+ 3.7.0.Final - 2.6.1 - 0.9.94 - 1.2.1 -@@ -47,13 +47,14 @@ - 0.3m - 2.1 - 1.16 -- src/integrationtest -- target/integrationtest-classes - - - - a3 -- cn -+ cn-core -+ cn-service -+ cn-memory -+ cn-zookeeper - testtools - log - timber -@@ -82,15 +83,6 @@ - false - - -- -- org.codehaus.mojo -- cobertura-maven-plugin -- 2.5.2 -- -- xml -- true -- -- - - - -@@ -108,7 +100,13 @@ - - - org.cloudname -- cn -+ cn-core -+ ${project.version} -+ -+ -+ -+ org.cloudname -+ cn-memory - ${project.version} - - -@@ -139,7 +137,7 @@ - - - io.netty -- netty-all -+ netty - ${cn.netty.version} - - -@@ -157,34 +155,6 @@ - ${cn.protobuf.version} - - -- -- -- org.apache.zookeeper -- zookeeper -- ${cn.zookeeper.version} -- -- -- com.sun.jmx -- jmxri -- -- -- com.sun.jdmk -- jmxtools -- -- -- javax.jms -- jms -- -- -- -- -- -- -- org.apache.curator -- curator-framework -- ${cn.curator.version} -- -- - - - com.fasterxml.jackson.core -@@ -254,135 +224,4 @@ - - - -- -- -- -- src/integrationtest -- -- it -- -- -- -- -- org.apache.maven.plugins -- maven-surefire-plugin -- 2.10 -- -- **/*.java -- -- -- -- org.apache.maven.plugins -- maven-antrun-plugin -- -- -- create-directory -- pre-integration-test -- -- run -- -- -- -- -- -- -- -- -- -- -- -- org.codehaus.mojo -- build-helper-maven-plugin -- 1.5 -- -- -- add-test-sources -- pre-integration-test -- -- add-test-source -- -- -- -- ${integrationSourceDirectory}/java -- -- -- -- -- add-test-resources -- pre-integration-test -- -- add-test-resource -- -- -- -- -- ${integrationSourceDirectory}/java -- ${integrationOutputDirectory} -- -- -- -- -- -- add-empty-directory -- pre-integration-test -- -- add-test-resource -- -- -- -- -- ${integrationSourceDirectory}/java -- ${integrationOutputDirectory} -- -- **/* -- -- -- -- -- -- -- -- -- org.apache.maven.plugins -- maven-compiler-plugin -- 2.3.2 -- -- -- pre-integration-test -- -- testCompile -- -- -- -- ${basedir}/${integrationOutputDirectory} -- -- -- -- -- -- -- maven-failsafe-plugin -- 2.8 -- -- ${integrationOutputDirectory} -- ${integrationOutputDirectory}/failsafe-reports -- **/*.java -- -- ${integrationSourceDirectory}/resources -- -- -- -- -- -- integration-test -- verify -- -- -- -- -- -- -- -- -- - -diff --git a/testtools/pom.xml b/testtools/pom.xml -index c50037e1..817a9a87 100644 ---- a/testtools/pom.xml -+++ b/testtools/pom.xml -@@ -15,17 +15,16 @@ - https://github.com/Cloudname/cloudname - - -- -- org.apache.curator -- curator-test -- ${cn.curator.version} -- - - - junit - junit -- test -+ compile - - -+ -+ org.cloudname -+ cn-core -+ - - -diff --git a/testtools/src/main/java/org/cloudname/testtools/backend/CoreBackendTest.java b/testtools/src/main/java/org/cloudname/testtools/backend/CoreBackendTest.java -new file mode 100644 -index 00000000..3525b4d6 ---- /dev/null -+++ b/testtools/src/main/java/org/cloudname/testtools/backend/CoreBackendTest.java -@@ -0,0 +1,635 @@ -+package org.cloudname.testtools.backend; -+ -+import org.cloudname.core.CloudnameBackend; -+import org.cloudname.core.CloudnamePath; -+import org.cloudname.core.LeaseHandle; -+import org.cloudname.core.LeaseListener; -+import org.junit.Test; -+ -+import java.util.ArrayList; -+import java.util.HashSet; -+import java.util.List; -+import java.util.Random; -+import java.util.Set; -+import java.util.concurrent.CountDownLatch; -+import java.util.concurrent.Executor; -+import java.util.concurrent.Executors; -+import java.util.concurrent.TimeUnit; -+import java.util.concurrent.atomic.AtomicInteger; -+ -+import static org.hamcrest.CoreMatchers.equalTo; -+import static org.hamcrest.CoreMatchers.is; -+import static org.hamcrest.CoreMatchers.notNullValue; -+import static org.hamcrest.CoreMatchers.nullValue; -+import static org.junit.Assert.assertFalse; -+import static org.junit.Assert.assertThat; -+import static org.junit.Assert.assertTrue; -+import static org.junit.Assert.fail; -+ -+/** -+ * Core backend tests. This ensures the backend implementation works as expected on the most -+ * basic level. Override this class in your backend implementation to test it. -+ * -+ * @author stalehd@gmail.com -+ */ -+public abstract class CoreBackendTest { -+ private final CloudnamePath serviceA = new CloudnamePath( -+ new String[] { ""local"", ""test"", ""service-a"" }); -+ private final CloudnamePath serviceB = new CloudnamePath( -+ new String[] { ""local"", ""test"", ""service-b"" }); -+ -+ private final Random random = new Random(); -+ -+ /** -+ * Max data propagation time (in ms) for notifications from the backend. Override if your -+ * backend implementation is slow. 5 ms is a lot of time though so do it carefully. -+ */ -+ protected int getBackendPropagationTime() { -+ return 5; -+ } -+ /** -+ * Ensure multiple clients can connect and that leases get an unique path for each client. -+ */ -+ @Test -+ public void temporaryLeaseCreation() throws Exception { -+ try (final CloudnameBackend backend = getBackend()) { -+ final String data = Long.toHexString(random.nextLong()); -+ final LeaseHandle lease = backend.createTemporaryLease(serviceA, data); -+ assertThat(""Expected lease to be not null"", lease, is(notNullValue())); -+ -+ assertTrue(""Expected lease path to be a subpath of the supplied lease ("" + serviceA -+ + "") but it is "" + lease.getLeasePath(), -+ serviceA.isSubpathOf(lease.getLeasePath())); -+ -+ assertThat(""The temporary lease data can be read"", -+ backend.readTemporaryLeaseData(lease.getLeasePath()), is(data)); -+ -+ final String newData = Long.toHexString(random.nextLong()); -+ assertThat(""Expected to be able to write lease data but didn't"", -+ lease.writeLeaseData(newData), is(true)); -+ -+ assertThat(""Expected to be able to read data back but didn't"", -+ backend.readTemporaryLeaseData(lease.getLeasePath()), is(newData)); -+ lease.close(); -+ -+ assertThat(""Expect the lease path to be null"", lease.getLeasePath(), is(nullValue())); -+ -+ assertFalse(""Did not expect to be able to write lease data for a closed lease"", -+ lease.writeLeaseData(Long.toHexString(random.nextLong()))); -+ assertThat(""The temporary lease data can not be read"", -+ backend.readTemporaryLeaseData(lease.getLeasePath()), is(nullValue())); -+ -+ -+ final int numberOfLeases = 50; -+ -+ final Set leasePaths = new HashSet<>(); -+ for (int i = 0; i < numberOfLeases; i++) { -+ final String randomData = Long.toHexString(random.nextLong()); -+ final LeaseHandle handle = backend.createTemporaryLease(serviceB, randomData); -+ leasePaths.add(handle.getLeasePath().join(':')); -+ handle.close(); -+ } -+ -+ assertThat(""Expected "" + numberOfLeases + "" unique paths but it was "" + leasePaths.size(), -+ leasePaths.size(), is(numberOfLeases)); -+ } -+ } -+ -+ /** -+ * A very simple single-threaded notification. Make sure this works before implementing -+ * the multiple notifications elsewhere in this test. -+ */ -+ @Test -+ public void simpleTemporaryNotification() throws Exception { -+ -+ try (final CloudnameBackend backend = getBackend()) { -+ -+ final CloudnamePath rootPath = new CloudnamePath(new String[]{""simple""}); -+ final CountDownLatch createCounter = new CountDownLatch(1); -+ final CountDownLatch removeCounter = new CountDownLatch(1); -+ final CountDownLatch dataCounter = new CountDownLatch(1); -+ -+ final String firstData = ""first data""; -+ final String lastData = ""last data""; -+ final LeaseListener listener = new LeaseListener() { -+ @Override -+ public void leaseCreated(CloudnamePath path, String data) { -+ createCounter.countDown(); -+ if (data.equals(lastData)) { -+ dataCounter.countDown(); -+ } -+ } -+ -+ @Override -+ public void leaseRemoved(CloudnamePath path) { -+ removeCounter.countDown(); -+ } -+ -+ @Override -+ public void dataChanged(CloudnamePath path, String data) { -+ dataCounter.countDown(); -+ } -+ }; -+ backend.addTemporaryLeaseListener(rootPath, listener); -+ final LeaseHandle handle = backend.createTemporaryLease(rootPath, firstData); -+ assertThat(handle, is(notNullValue())); -+ Thread.sleep(getBackendPropagationTime()); -+ -+ handle.writeLeaseData(lastData); -+ Thread.sleep(getBackendPropagationTime()); -+ -+ handle.close(); -+ -+ assertTrue(""Expected create notification but didn't get one"", -+ createCounter.await(getBackendPropagationTime(), TimeUnit.MILLISECONDS)); -+ assertTrue(""Expected remove notification but didn't get one"", -+ removeCounter.await(getBackendPropagationTime(), TimeUnit.MILLISECONDS)); -+ assertTrue(""Expected data notification but didn't get one"", -+ dataCounter.await(getBackendPropagationTime(), TimeUnit.MILLISECONDS)); -+ -+ backend.removeTemporaryLeaseListener(listener); -+ } -+ } -+ -+ /** -+ * Ensure permanent leases can be created and that they can't be overwritten by clients using -+ * the library. -+ */ -+ @Test -+ public void permanentLeaseCreation() throws Exception { -+ final CloudnamePath leasePath = new CloudnamePath(new String[]{""some"", ""path""}); -+ final String dataString = ""some data string""; -+ final String newDataString = ""new data string""; -+ -+ -+ try (final CloudnameBackend backend = getBackend()) { -+ backend.removePermanentLease(leasePath); -+ -+ assertThat(""Permanent lease can be created"", -+ backend.createPermanantLease(leasePath, dataString), is(true)); -+ -+ assertThat(""Permanent lease data can be read"", -+ backend.readPermanentLeaseData(leasePath), is(dataString)); -+ -+ assertThat(""Permanent lease can't be created twice"", -+ backend.createPermanantLease(leasePath, dataString), is(false)); -+ -+ assertThat(""Permanent lease can be updated"", -+ backend.writePermanentLeaseData(leasePath, newDataString), is(true)); -+ -+ assertThat(""Permanent lease data can be read after update"", -+ backend.readPermanentLeaseData(leasePath), is(newDataString)); -+ } -+ -+ try (final CloudnameBackend backend = getBackend()) { -+ assertThat(""Permanent lease data can be read from another backend"", -+ backend.readPermanentLeaseData(leasePath), is(newDataString)); -+ assertThat(""Permanent lease can be removed"", -+ backend.removePermanentLease(leasePath), is(true)); -+ assertThat(""Lease can't be removed twice"", -+ backend.removePermanentLease(leasePath), is(false)); -+ assertThat(""Lease data can't be read from deleted lease"", -+ backend.readPermanentLeaseData(leasePath), is(nullValue())); -+ } -+ } -+ -+ /** -+ * Ensure clients are notified of changes -+ */ -+ @Test -+ public void multipleTemporaryNotifications() throws Exception { -+ try (final CloudnameBackend backend = getBackend()) { -+ final CloudnamePath rootPath = new CloudnamePath(new String[]{""root"", ""lease""}); -+ final String clientData = ""client data here""; -+ -+ final LeaseHandle lease = backend.createTemporaryLease(rootPath, clientData); -+ assertThat(""Handle to lease is returned"", lease, is(notNullValue())); -+ assertThat(""Lease is a child of the root lease"", -+ rootPath.isSubpathOf(lease.getLeasePath()), is(true)); -+ -+ int numListeners = 10; -+ final int numUpdates = 10; -+ -+ // Add some listeners to the temporary lease. Each should be notified once on -+ // creation, once on removal and once every time the data is updated -+ final CountDownLatch createNotifications = new CountDownLatch(numListeners); -+ final CountDownLatch dataNotifications = new CountDownLatch(numListeners * numUpdates); -+ final CountDownLatch removeNotifications = new CountDownLatch(numListeners); -+ -+ final List listeners = new ArrayList<>(); -+ for (int i = 0; i < numListeners; i++) { -+ final LeaseListener listener = new LeaseListener() { -+ private AtomicInteger lastData = new AtomicInteger(-1); -+ -+ @Override -+ public void leaseCreated(final CloudnamePath path, final String data) { -+ createNotifications.countDown(); -+ } -+ -+ @Override -+ public void leaseRemoved(final CloudnamePath path) { -+ removeNotifications.countDown(); -+ } -+ -+ @Override -+ public void dataChanged(final CloudnamePath path, final String data) { -+ assertThat(lastData.incrementAndGet(), is(Integer.parseInt(data))); -+ dataNotifications.countDown(); -+ } -+ }; -+ listeners.add(listener); -+ backend.addTemporaryLeaseListener(rootPath, listener); -+ } -+ -+ // Change the data a few times. Every change should be propagated to the listeners -+ // in the same order they have changed -+ for (int i = 0; i < numUpdates; i++) { -+ lease.writeLeaseData(Integer.toString(i)); -+ Thread.sleep(getBackendPropagationTime()); -+ } -+ -+ // Remove the lease. Removal notifications will be sent to the clients -+ -+ assertThat(""All create notifications are received but "" + createNotifications.getCount() -+ + "" remains out of "" + numListeners, -+ createNotifications.await(getBackendPropagationTime(), TimeUnit.MICROSECONDS), is(true)); -+ -+ assertThat(""All data notifications are received but "" + dataNotifications.getCount() -+ + "" remains out of "" + (numListeners * numUpdates), -+ dataNotifications.await(getBackendPropagationTime(), TimeUnit.MILLISECONDS), is(true)); -+ -+ lease.close(); -+ assertThat(""All remove notifications are received but "" + removeNotifications.getCount() -+ + "" remains out of "" + numListeners, -+ removeNotifications.await(getBackendPropagationTime(), TimeUnit.MILLISECONDS), is(true)); -+ -+ // Remove the listeners -+ for (final LeaseListener listener : listeners) { -+ lease.close(); -+ backend.removeTemporaryLeaseListener(listener); -+ } -+ } -+ } -+ -+ /** -+ * Test a simple peer to peer scheme; all clients grabbing a lease and listening on other -+ * clients. -+ */ -+ @Test -+ public void multipleServicesWithMultipleClients() throws Exception { -+ try (final CloudnameBackend backend = getBackend()) { -+ -+ final CloudnamePath rootLease = new CloudnamePath(new String[]{""multi"", ""multi""}); -+ final int numberOfClients = 5; -+ -+ // All clients will be notified of all other clients (including themselves) -+ final CountDownLatch createNotifications -+ = new CountDownLatch(numberOfClients * numberOfClients); -+ // All clients will write one change each -+ final CountDownLatch dataNotifications = new CountDownLatch(numberOfClients); -+ // There will be 99 + 98 + 97 + 96 ... 1 notifications, in all n (n + 1) / 2 -+ // remove notifications -+ final int n = numberOfClients - 1; -+ final CountDownLatch removeNotifications = new CountDownLatch(n * (n + 1) / 2); -+ -+ final Runnable clientProcess = new Runnable() { -+ @Override -+ public void run() { -+ final String myData = Long.toHexString(random.nextLong()); -+ final LeaseHandle handle = backend.createTemporaryLease(rootLease, myData); -+ assertThat(""Got a valid handle back"", handle, is(notNullValue())); -+ backend.addTemporaryLeaseListener(rootLease, new LeaseListener() { -+ @Override -+ public void leaseCreated(final CloudnamePath path, final String data) { -+ assertThat(""Notification belongs to root path"", -+ rootLease.isSubpathOf(path), is(true)); -+ createNotifications.countDown(); -+ } -+ -+ @Override -+ public void leaseRemoved(final CloudnamePath path) { -+ removeNotifications.countDown(); -+ } -+ -+ @Override -+ public void dataChanged(final CloudnamePath path, final String data) { -+ dataNotifications.countDown(); -+ } -+ }); -+ -+ try { -+ assertThat(createNotifications.await( -+ getBackendPropagationTime(), TimeUnit.MILLISECONDS), -+ is(true)); -+ } catch (InterruptedException ie) { -+ throw new RuntimeException(ie); -+ } -+ -+ // Change the data for my own lease, wait for it to propagate -+ assertThat(handle.writeLeaseData(Long.toHexString(random.nextLong())), -+ is(true)); -+ try { -+ Thread.sleep(getBackendPropagationTime()); -+ } catch (final InterruptedException ie) { -+ throw new RuntimeException(ie); -+ } -+ -+ try { -+ assertThat(dataNotifications.await( -+ getBackendPropagationTime(), TimeUnit.MILLISECONDS), -+ is(true)); -+ } catch (InterruptedException ie) { -+ throw new RuntimeException(ie); -+ } -+ -+ // ..and close my lease -+ try { -+ handle.close(); -+ } catch (Exception ex) { -+ throw new RuntimeException(ex); -+ } -+ } -+ }; -+ -+ final Executor executor = Executors.newCachedThreadPool(); -+ for (int i = 0; i < numberOfClients; i++) { -+ executor.execute(clientProcess); -+ } -+ -+ removeNotifications.await(getBackendPropagationTime(), TimeUnit.SECONDS); -+ } -+ } -+ -+ -+ /** -+ * Just make sure unknown listeners doesn't throw exceptions -+ */ -+ @Test -+ public void removeInvalidListener() throws Exception { -+ try (final CloudnameBackend backend = getBackend()) { -+ final LeaseListener unknownnListener = new LeaseListener() { -+ @Override -+ public void leaseCreated(final CloudnamePath path, final String data) { -+ } -+ -+ @Override -+ public void leaseRemoved(final CloudnamePath path) { -+ } -+ -+ @Override -+ public void dataChanged(final CloudnamePath path, final String data) { -+ } -+ }; -+ backend.removeTemporaryLeaseListener(unknownnListener); -+ } -+ } -+ -+ -+ /** -+ * Create a whole set of different listener pairs that runs in parallel. They won't -+ * receive notifications from any other lease - listener pairs. -+ */ -+ @Test -+ public void multipleIndependentListeners() throws Exception { -+ try (final CloudnameBackend backend = getBackend()) { -+ final int leasePairs = 10; -+ -+ class LeaseWorker { -+ private final String id; -+ private final CloudnamePath rootPath; -+ private final LeaseListener listener; -+ private final AtomicInteger createNotifications = new AtomicInteger(0); -+ private final AtomicInteger dataNotifications = new AtomicInteger(0); -+ private LeaseHandle handle; -+ -+ public LeaseWorker(final String id) { -+ this.id = id; -+ rootPath = new CloudnamePath(new String[]{""pair"", id}); -+ listener = new LeaseListener() { -+ -+ @Override -+ public void leaseCreated(final CloudnamePath path, final String data) { -+ createNotifications.incrementAndGet(); -+ } -+ -+ @Override -+ public void leaseRemoved(final CloudnamePath path) { -+ } -+ -+ @Override -+ public void dataChanged(final CloudnamePath path, final String data) { -+ dataNotifications.incrementAndGet(); -+ } -+ }; -+ } -+ -+ public void createLease() { -+ backend.addTemporaryLeaseListener(rootPath, listener); -+ try { -+ Thread.sleep(getBackendPropagationTime()); -+ } catch (final InterruptedException ie) { -+ throw new RuntimeException(ie); -+ } -+ handle = backend.createTemporaryLease(rootPath, id); -+ } -+ -+ public void writeData() { -+ handle.writeLeaseData(id); -+ } -+ -+ public void checkNumberOfNotifications() { -+ // There will be two notifications; one for this lease, one for the other -+ assertThat(""Expected 2 create notifications"", createNotifications.get(), is(2)); -+ // There will be two notifications; one for this lease, one for the other -+ assertThat(""Expected 2 data notifications"", dataNotifications.get(), is(2)); -+ } -+ -+ public void closeLease() { -+ try { -+ handle.close(); -+ } catch (Exception ex) { -+ throw new RuntimeException(ex); -+ } -+ } -+ } -+ -+ final List workers = new ArrayList<>(); -+ -+ for (int i = 0; i < leasePairs; i++) { -+ final String id = Long.toHexString(random.nextLong()); -+ final LeaseWorker leaseWorker1 = new LeaseWorker(id); -+ leaseWorker1.createLease(); -+ workers.add(leaseWorker1); -+ final LeaseWorker leaseWorker2 = new LeaseWorker(id); -+ leaseWorker2.createLease(); -+ workers.add(leaseWorker2); -+ } -+ -+ for (final LeaseWorker worker : workers) { -+ worker.writeData(); -+ } -+ Thread.sleep(getBackendPropagationTime()); -+ for (final LeaseWorker worker : workers) { -+ worker.checkNumberOfNotifications(); -+ } -+ for (final LeaseWorker worker : workers) { -+ worker.closeLease(); -+ } -+ } -+ } -+ -+ /** -+ * Ensure permanent leases distribute notifications as well -+ */ -+ @Test -+ public void permanentLeaseNotifications() throws Exception { -+ final CloudnamePath rootLease = new CloudnamePath(new String[] { ""permanent"", ""vacation"" }); -+ final String leaseData = ""the aero smiths""; -+ final String newLeaseData = ""popcultural reference""; -+ -+ try (final CloudnameBackend backend = getBackend()) { -+ backend.removePermanentLease(rootLease); -+ assertThat(""Can create permanent node"", -+ backend.createPermanantLease(rootLease, leaseData), is(true)); -+ } -+ -+ final AtomicInteger numberOfNotifications = new AtomicInteger(0); -+ final CountDownLatch createLatch = new CountDownLatch(1); -+ final CountDownLatch removeLatch = new CountDownLatch(1); -+ final CountDownLatch dataLatch = new CountDownLatch(1); -+ -+ final LeaseListener listener = new LeaseListener() { -+ @Override -+ public void leaseCreated(final CloudnamePath path, final String data) { -+ assertThat(path, is(equalTo(rootLease))); -+ assertThat(data, is(equalTo(leaseData))); -+ numberOfNotifications.incrementAndGet(); -+ createLatch.countDown(); -+ } -+ -+ @Override -+ public void leaseRemoved(final CloudnamePath path) { -+ assertThat(path, is(equalTo(rootLease))); -+ numberOfNotifications.incrementAndGet(); -+ removeLatch.countDown(); -+ } -+ -+ @Override -+ public void dataChanged(final CloudnamePath path, final String data) { -+ assertThat(path, is(equalTo(rootLease))); -+ assertThat(data, is(equalTo(newLeaseData))); -+ numberOfNotifications.incrementAndGet(); -+ dataLatch.countDown(); -+ } -+ }; -+ -+ try (final CloudnameBackend backend = getBackend()) { -+ -+ assertThat(""Lease still exists"", -+ backend.readPermanentLeaseData(rootLease), is(leaseData)); -+ -+ // Add the lease back -+ backend.addPermanentLeaseListener(rootLease, listener); -+ -+ assertThat(""New data can be written"", -+ backend.writePermanentLeaseData(rootLease, newLeaseData), is(true)); -+ -+ // Write new data -+ assertThat(""Lease can be removed"", backend.removePermanentLease(rootLease), is(true)); -+ -+ assertTrue(createLatch.await(getBackendPropagationTime(), TimeUnit.MILLISECONDS)); -+ assertTrue(dataLatch.await(getBackendPropagationTime(), TimeUnit.MILLISECONDS)); -+ assertTrue(removeLatch.await(getBackendPropagationTime(), TimeUnit.MILLISECONDS)); -+ // This includes one created, one data, one close -+ assertThat(""One notifications is expected but only got "" -+ + numberOfNotifications.get(), numberOfNotifications.get(), is(3)); -+ -+ backend.removePermanentLeaseListener(listener); -+ // just to be sure - this won't upset anything -+ backend.removePermanentLeaseListener(listener); -+ } -+ } -+ -+ -+ /** -+ * Set up two listeners listening to different permanent leases. There should be no crosstalk -+ * between the listeners. -+ */ -+ @Test -+ public void multiplePermanentListeners() throws Exception { -+ final CloudnamePath permanentA = new CloudnamePath(new String[] { ""primary"" }); -+ final CloudnamePath permanentB = new CloudnamePath(new String[] { ""secondary"" }); -+ final CloudnamePath permanentC = new CloudnamePath( -+ new String[] { ""tertiary"", ""permanent"", ""lease"" }); -+ -+ try (final CloudnameBackend backend = getBackend()) { -+ backend.addPermanentLeaseListener(permanentA, new LeaseListener() { -+ @Override -+ public void leaseCreated(final CloudnamePath path, final String data) { -+ assertThat(path, is(equalTo(permanentA))); -+ } -+ -+ @Override -+ public void leaseRemoved(final CloudnamePath path) { -+ assertThat(path, is(equalTo(permanentA))); -+ } -+ -+ @Override -+ public void dataChanged(final CloudnamePath path, final String data) { -+ assertThat(path, is(equalTo(permanentA))); -+ } -+ }); -+ -+ backend.addPermanentLeaseListener(permanentB, new LeaseListener() { -+ @Override -+ public void leaseCreated(final CloudnamePath path, final String data) { -+ assertThat(path, is(equalTo(permanentB))); -+ } -+ -+ @Override -+ public void leaseRemoved(final CloudnamePath path) { -+ assertThat(path, is(equalTo(permanentB))); -+ } -+ -+ @Override -+ public void dataChanged(final CloudnamePath path, final String data) { -+ assertThat(path, is(equalTo(permanentB))); -+ } -+ }); -+ -+ backend.addPermanentLeaseListener(permanentC, new LeaseListener() { -+ @Override -+ public void leaseCreated(final CloudnamePath path, final String data) { -+ fail(""Did not expect any leases to be created at "" + permanentC); -+ } -+ -+ @Override -+ public void leaseRemoved(final CloudnamePath path) { -+ fail(""Did not expect any leases to be created at "" + permanentC); -+ } -+ -+ @Override -+ public void dataChanged(final CloudnamePath path, final String data) { -+ fail(""Did not expect any leases to be created at "" + permanentC); -+ } -+ }); -+ -+ backend.createPermanantLease(permanentA, ""Some data that belongs to A""); -+ backend.createPermanantLease(permanentB, ""Some data that belongs to B""); -+ -+ // Some might say this is a dirty trick but permanent and temporary leases should not -+ // interfere with eachother. -+ final LeaseHandle handle = backend.createTemporaryLease( -+ permanentC, ""Some data that belongs to C""); -+ assertThat(handle, is(notNullValue())); -+ handle.writeLeaseData(""Some other data that belongs to C""); -+ try { -+ handle.close(); -+ } catch (Exception ex) { -+ fail(ex.getMessage()); -+ } -+ } -+ } -+ -+ protected abstract CloudnameBackend getBackend(); -+} -diff --git a/testtools/src/main/java/org/cloudname/testtools/network/ClientThread.java b/testtools/src/main/java/org/cloudname/testtools/network/ClientThread.java -deleted file mode 100644 -index cadbb470..00000000 ---- a/testtools/src/main/java/org/cloudname/testtools/network/ClientThread.java -+++ /dev/null -@@ -1,121 +0,0 @@ --package org.cloudname.testtools.network; -- --import java.io.IOException; --import java.io.InputStream; --import java.io.OutputStream; --import java.net.Socket; --import java.util.logging.Level; --import java.util.logging.Logger; -- --/** -- * ClientThread forwards communication for one pair of sockets. -- * TODO(borud): this class lacks unit tests -- * -- * @author dybdahl -- */ --class ClientThread { -- private final static Logger log = Logger.getLogger(ClientThread.class.getName()); -- -- private Socket serverSocket = null; -- private Socket clientSocket = null; -- private Object threadMonitor = new Object(); -- -- -- /** -- * Constructor -- * @param clientSocket socket crated for incomming call -- * @param hostName destination host name -- * @param hostPort destination host port -- */ -- public ClientThread(final Socket clientSocket, final String hostName, final int hostPort) { -- this.clientSocket = clientSocket; -- Runnable myRunnable = new Runnable() { -- @Override -- public void run() { -- final InputStream clientIn, serverIn; -- final OutputStream clientOut, serverOut; -- -- try { -- synchronized (threadMonitor) { -- serverSocket = new Socket(hostName, hostPort); -- } -- clientIn = clientSocket.getInputStream(); -- clientOut = clientSocket.getOutputStream(); -- serverIn = serverSocket.getInputStream(); -- serverOut = serverSocket.getOutputStream(); -- } catch (IOException ioe) { -- log.severe(""Portforwarder: Can not connect to "" + hostName + "":"" + hostPort); -- try { -- if (serverSocket != null) { -- serverSocket.close(); -- } -- } catch (IOException e) { -- log.severe(""Could not close server socket""); -- } -- return; -- } -- synchronized (threadMonitor) { -- startForwarderThread(clientIn, serverOut); -- startForwarderThread(serverIn, clientOut); -- } -- } -- }; -- Thread fireAndForget = new Thread(myRunnable); -- fireAndForget.start(); -- } -- -- /** -- * Closes sockets, which again closes the running threads. -- */ -- public void close() { -- synchronized (threadMonitor) { -- try { -- if (serverSocket != null) { -- serverSocket.close(); -- serverSocket = null; -- } -- } catch (Exception e) { -- log.log(Level.SEVERE, ""Error while closing server socket"", e); -- } -- try { -- if (clientSocket != null) { -- clientSocket.close(); -- clientSocket = null; -- } -- } catch (Exception e) { -- log.log(Level.SEVERE, ""Error while closing client socket"", e); -- } -- } -- } -- -- private Thread startForwarderThread( -- final InputStream inputStream, final OutputStream outputStream) { -- final int BUFFER_SIZE = 4096; -- Runnable myRunnable = new Runnable() { -- @Override -- public void run() { -- byte[] buffer = new byte[BUFFER_SIZE]; -- try { -- while (true) { -- int bytesRead = inputStream.read(buffer); -- -- if (bytesRead == -1) -- // End of stream is reached --> exit -- break; -- -- outputStream.write(buffer, 0, bytesRead); -- outputStream.flush(); -- } -- } catch (IOException e) { -- // Read/write failed --> connection is broken -- log.log(Level.SEVERE, ""Forwarding in loop died.""); -- } -- // Notify parent thread that the connection is broken -- close(); -- } -- }; -- Thread forwarder = new Thread(myRunnable); -- forwarder.start(); -- return forwarder; -- } --} -diff --git a/testtools/src/main/java/org/cloudname/testtools/network/PortForwarder.java b/testtools/src/main/java/org/cloudname/testtools/network/PortForwarder.java -deleted file mode 100644 -index 35c2ec5e..00000000 ---- a/testtools/src/main/java/org/cloudname/testtools/network/PortForwarder.java -+++ /dev/null -@@ -1,145 +0,0 @@ --package org.cloudname.testtools.network; -- --import java.io.IOException; --import java.net.InetSocketAddress; --import java.net.ServerSocket; --import java.net.Socket; --import java.util.ArrayList; --import java.util.List; --import java.util.concurrent.atomic.AtomicBoolean; --import java.util.logging.Level; --import java.util.logging.Logger; -- --/** -- * Simple class for setting up port forwarding in unit tests. This -- * enables killing the connection. -- * -- * TODO(stalehd): Remove? Replace with TestCluster class. Makes for better integration tests. -- * TODO(borud): this class lacks unit tests -- * -- * @author dybdahl -- */ --public class PortForwarder { -- private final static Logger log = Logger.getLogger(PortForwarder.class.getName()); -- -- private final int myPort; -- private final AtomicBoolean isAlive = new AtomicBoolean(true); -- private ServerSocket serverSocket = null; -- -- private Thread portThread; -- private final Object threadMonitor = new Object(); -- -- private final List clientThreadList = new ArrayList(); -- private final AtomicBoolean pause = new AtomicBoolean(false); -- -- private final String hostName; -- private final int hostPort; -- -- /** -- * Constructor for port-forwarder. Does stat the forwarder. -- * @param myPort client port -- * @param hostName name of host to forward to. -- * @param hostPort port of host to forward to. -- * @throws IOException if unable to open server socket -- */ -- public PortForwarder(final int myPort, final String hostName, final int hostPort) throws IOException { -- this.myPort = myPort; -- this.hostName = hostName; -- this.hostPort = hostPort; -- log.info(""Starting port forwarder "" + myPort + "" -> "" + hostPort); -- startServerSocketThread(); -- } -- -- private void startServerSocketThread() -- throws IOException { -- openServerSocket(); -- Runnable myRunnable = new Runnable() { -- @Override -- public void run() { -- log.info(""Forwarder running""); -- while (isAlive.get() && !pause.get()) { -- try { -- final Socket clientSocket = serverSocket.accept(); -- synchronized (threadMonitor) { -- if (isAlive.get() && !pause.get()) { -- clientThreadList.add(new ClientThread(clientSocket, hostName, hostPort)); -- } else { -- clientSocket.close(); -- } -- } -- } catch (IOException e) { -- log.log(Level.SEVERE, ""Got exception in forwarder"", e); -- // Keep going, maybe later connections will succeed. -- } -- } -- log.info(""Forwarder stopped""); -- } -- }; -- portThread = new Thread(myRunnable); -- // Make this a daemon thread, so it won't keep the VM running at shutdown. -- portThread.setDaemon(true); -- portThread.start(); -- } -- -- private void openServerSocket() throws IOException { -- serverSocket = new ServerSocket(); -- serverSocket.setReuseAddress(true); -- serverSocket.bind(new InetSocketAddress(""localhost"", myPort)); -- } -- -- /** -- * Forces client to loose connection and refuses to create new (closing attempts to connect). -- * @throws IOException -- * @throws InterruptedException -- */ -- public void pause() throws IOException, InterruptedException { -- final Thread currentServerThread; -- synchronized (threadMonitor) { -- if (!pause.compareAndSet(false, true)) { -- return; -- } -- for (ClientThread clientThread: clientThreadList) { -- clientThread.close(); -- -- } -- clientThreadList.clear(); -- serverSocket.close(); -- /* -- * Make a copy of the server socket thread, so we can wait for it -- * to complete outside any monitor. -- */ -- currentServerThread = portThread; -- } -- currentServerThread.join(); -- } -- -- /** -- * Lets client start connecting again. -- * @throws IOException -- */ -- public void unpause() throws IOException { -- synchronized (threadMonitor) { -- if (pause.compareAndSet(true, false)) { -- startServerSocketThread(); -- } -- } -- } -- -- /** -- * Shuts down the forwarder. -- */ -- public void close() { -- isAlive.set(false); -- try { -- pause(); -- } catch (final IOException e) { -- // Ignore this -- log.severe(""Could not close server socket.""); -- } catch (InterruptedException e) { -- log.severe(""Interrupted while waiting for server thread to finish.""); -- // Reassert interrupt. -- Thread.currentThread().interrupt(); -- } -- } --} -- -diff --git a/testtools/src/main/java/org/cloudname/testtools/zookeeper/EmbeddedZooKeeper.java b/testtools/src/main/java/org/cloudname/testtools/zookeeper/EmbeddedZooKeeper.java -deleted file mode 100644 -index c081434f..00000000 ---- a/testtools/src/main/java/org/cloudname/testtools/zookeeper/EmbeddedZooKeeper.java -+++ /dev/null -@@ -1,88 +0,0 @@ --package org.cloudname.testtools.zookeeper; -- --import org.apache.curator.test.TestingServer; -- --import java.io.File; --import java.io.IOException; -- --/** -- * Utility class to fire up an embedded ZooKeeper server in the -- * current JVM for testing purposes. -- * -- * @author borud -- * @author stalehd -- */ --public final class EmbeddedZooKeeper { -- private final File rootDir; -- private final int port; -- private TestingServer server; -- -- /** -- * @param rootDir the root directory of where the ZooKeeper -- * instance will keep its files. If null, a temporary directory is created -- * @param port the port where ZooKeeper will listen for client -- * connections. -- */ -- public EmbeddedZooKeeper(File rootDir, int port) { -- this.rootDir = rootDir; -- this.port = port; -- } -- -- private void delDir(File path) throws IOException { -- for(File f : path.listFiles()) -- { -- if(f.isDirectory()) { -- delDir(f); -- } else { -- if (!f.delete() && f.exists()) { -- throw new IOException(""Failed to delete file "" + f); -- } -- } -- } -- if (!path.delete() && path.exists()) { -- throw new IOException(""Failed to delete directory "" + path); -- } -- -- } -- -- /** -- * Delete all data owned by the ZooKeeper instance. -- * @throws IOException if some file could not be deleted -- */ -- public void del() throws IOException { -- File path = new File(rootDir, ""data""); -- delDir(path); -- } -- -- -- /** -- * Set up the ZooKeeper instance. -- */ -- public void init() throws Exception { -- this.server = new TestingServer(this.port, this.rootDir); -- // Create the data directory -- File dataDir = new File(rootDir, ""data""); -- dataDir.mkdir(); -- -- this.server.start(); -- } -- -- /** -- * Shut the ZooKeeper instance down. -- * @throws IOException if shutdown encountered I/O errors -- */ -- public void shutdown() throws IOException { -- this.server.stop(); -- del(); -- } -- -- /** -- * Get the client connection string for the ZooKeeper instance. -- * -- * @return a String containing a comma-separated list of host:port -- * entries for use as a parameter to the ZooKeeper client class. -- */ -- public String getClientConnectionString() { -- return ""127.0.0.1:"" + port; -- } --} -diff --git a/timber/pom.xml b/timber/pom.xml -index 3ed79030..6dca5385 100644 ---- a/timber/pom.xml -+++ b/timber/pom.xml -@@ -62,7 +62,7 @@ - - - io.netty -- netty-all -+ netty - - - -@@ -75,11 +75,13 @@ - junit - test - -+ - - org.cloudname - idgen - test - -+ - - joda-time - joda-time" -3990e8b478b1d958479c173c74946e38360cfd17,hadoop,Merge r1503933 from trunk to branch-2 for YARN-513.- Create common proxy client for communicating with RM (Xuan Gong & Jian He via- bikas)--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1503935 13f79535-47bb-0310-9956-ffa450edef68-,a,https://github.com/apache/hadoop,"diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt -index 65d19bff9d839..4d6cb00b23eca 100644 ---- a/hadoop-yarn-project/CHANGES.txt -+++ b/hadoop-yarn-project/CHANGES.txt -@@ -465,6 +465,9 @@ Release 2.1.0-beta - 2013-07-02 - YARN-521. Augment AM - RM client module to be able to request containers - only at specific locations (Sandy Ryza via bikas) - -+ YARN-513. Create common proxy client for communicating with RM. (Xuan Gong -+ & Jian He via bikas) -+ - OPTIMIZATIONS - - YARN-512. Log aggregation root directory check is more expensive than it -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java -index 44c35c3d58b28..b14e65225200d 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java -@@ -655,17 +655,17 @@ public class YarnConfiguration extends Configuration { - public static final long DEFAULT_NM_PROCESS_KILL_WAIT_MS = - 2000; - -- /** Max time to wait to establish a connection to RM when NM starts -+ /** Max time to wait to establish a connection to RM - */ -- public static final String RESOURCEMANAGER_CONNECT_WAIT_SECS = -- NM_PREFIX + ""resourcemanager.connect.wait.secs""; -- public static final int DEFAULT_RESOURCEMANAGER_CONNECT_WAIT_SECS = -+ public static final String RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS = -+ RM_PREFIX + ""resourcemanager.connect.max.wait.secs""; -+ public static final int DEFAULT_RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS = - 15*60; - -- /** Time interval between each NM attempt to connect to RM -+ /** Time interval between each attempt to connect to RM - */ - public static final String RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS = -- NM_PREFIX + ""resourcemanager.connect.retry_interval.secs""; -+ RM_PREFIX + ""resourcemanager.connect.retry_interval.secs""; - public static final long DEFAULT_RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS - = 30; - -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/ClientRMProxy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/ClientRMProxy.java -new file mode 100644 -index 0000000000000..f70b44ce3a8db ---- /dev/null -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/ClientRMProxy.java -@@ -0,0 +1,65 @@ -+/** -+* Licensed to the Apache Software Foundation (ASF) under one -+* or more contributor license agreements. See the NOTICE file -+* distributed with this work for additional information -+* regarding copyright ownership. The ASF licenses this file -+* to you under the Apache License, Version 2.0 (the -+* ""License""); you may not use this file except in compliance -+* with the License. You may obtain a copy of the License at -+* -+* http://www.apache.org/licenses/LICENSE-2.0 -+* -+* Unless required by applicable law or agreed to in writing, software -+* distributed under the License is distributed on an ""AS IS"" BASIS, -+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -+* See the License for the specific language governing permissions and -+* limitations under the License. -+*/ -+ -+package org.apache.hadoop.yarn.client; -+ -+import java.io.IOException; -+import java.net.InetSocketAddress; -+ -+import org.apache.commons.logging.Log; -+import org.apache.commons.logging.LogFactory; -+import org.apache.hadoop.conf.Configuration; -+import org.apache.hadoop.yarn.api.ApplicationClientProtocol; -+import org.apache.hadoop.yarn.api.ApplicationMasterProtocol; -+import org.apache.hadoop.yarn.conf.YarnConfiguration; -+import org.apache.hadoop.yarn.server.api.ResourceManagerAdministrationProtocol; -+ -+public class ClientRMProxy extends RMProxy{ -+ -+ private static final Log LOG = LogFactory.getLog(ClientRMProxy.class); -+ -+ public static T createRMProxy(final Configuration conf, -+ final Class protocol) throws IOException { -+ InetSocketAddress rmAddress = getRMAddress(conf, protocol); -+ return createRMProxy(conf, protocol, rmAddress); -+ } -+ -+ private static InetSocketAddress getRMAddress(Configuration conf, Class protocol) { -+ if (protocol == ApplicationClientProtocol.class) { -+ return conf.getSocketAddr(YarnConfiguration.RM_ADDRESS, -+ YarnConfiguration.DEFAULT_RM_ADDRESS, -+ YarnConfiguration.DEFAULT_RM_PORT); -+ } else if (protocol == ResourceManagerAdministrationProtocol.class) { -+ return conf.getSocketAddr( -+ YarnConfiguration.RM_ADMIN_ADDRESS, -+ YarnConfiguration.DEFAULT_RM_ADMIN_ADDRESS, -+ YarnConfiguration.DEFAULT_RM_ADMIN_PORT); -+ } else if (protocol == ApplicationMasterProtocol.class) { -+ return conf.getSocketAddr( -+ YarnConfiguration.RM_SCHEDULER_ADDRESS, -+ YarnConfiguration.DEFAULT_RM_SCHEDULER_ADDRESS, -+ YarnConfiguration.DEFAULT_RM_SCHEDULER_PORT); -+ } else { -+ String message = ""Unsupported protocol found when creating the proxy "" + -+ ""connection to ResourceManager: "" + -+ ((protocol != null) ? protocol.getClass().getName() : ""null""); -+ LOG.error(message); -+ throw new IllegalStateException(message); -+ } -+ } -+} -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/YarnClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/YarnClient.java -index e8dca61d32a0b..22d80c6e8d90b 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/YarnClient.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/YarnClient.java -@@ -19,7 +19,6 @@ - package org.apache.hadoop.yarn.client.api; - - import java.io.IOException; --import java.net.InetSocketAddress; - import java.util.List; - import java.util.Set; - -@@ -54,25 +53,6 @@ public static YarnClient createYarnClient() { - return client; - } - -- /** -- * Create a new instance of YarnClient. -- */ -- @Public -- public static YarnClient createYarnClient(InetSocketAddress rmAddress) { -- YarnClient client = new YarnClientImpl(rmAddress); -- return client; -- } -- -- /** -- * Create a new instance of YarnClient. -- */ -- @Public -- public static YarnClient createYarnClient(String name, -- InetSocketAddress rmAddress) { -- YarnClient client = new YarnClientImpl(name, rmAddress); -- return client; -- } -- - @Private - protected YarnClient(String name) { - super(name); -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/AMRMClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/AMRMClientImpl.java -index 0f088a0604b6e..4119a0cb1de7e 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/AMRMClientImpl.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/AMRMClientImpl.java -@@ -19,8 +19,6 @@ - package org.apache.hadoop.yarn.client.api.impl; - - import java.io.IOException; --import java.net.InetSocketAddress; --import java.security.PrivilegedAction; - import java.util.ArrayList; - import java.util.Collection; - import java.util.Collections; -@@ -42,7 +40,6 @@ - import org.apache.hadoop.classification.InterfaceStability.Unstable; - import org.apache.hadoop.conf.Configuration; - import org.apache.hadoop.ipc.RPC; --import org.apache.hadoop.security.UserGroupInformation; - import org.apache.hadoop.yarn.api.ApplicationMasterProtocol; - import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest; - import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; -@@ -56,16 +53,16 @@ - import org.apache.hadoop.yarn.api.records.Priority; - import org.apache.hadoop.yarn.api.records.Resource; - import org.apache.hadoop.yarn.api.records.ResourceRequest; -+import org.apache.hadoop.yarn.client.ClientRMProxy; - import org.apache.hadoop.yarn.client.api.AMRMClient; -+import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest; - import org.apache.hadoop.yarn.client.api.InvalidContainerRequestException; - import org.apache.hadoop.yarn.client.api.NMTokenCache; --import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest; - import org.apache.hadoop.yarn.conf.YarnConfiguration; - import org.apache.hadoop.yarn.exceptions.YarnException; - import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; - import org.apache.hadoop.yarn.factories.RecordFactory; - import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; --import org.apache.hadoop.yarn.ipc.YarnRPC; - import org.apache.hadoop.yarn.util.RackResolver; - - import com.google.common.annotations.VisibleForTesting; -@@ -171,28 +168,11 @@ protected void serviceInit(Configuration conf) throws Exception { - @Override - protected void serviceStart() throws Exception { - final YarnConfiguration conf = new YarnConfiguration(getConfig()); -- final YarnRPC rpc = YarnRPC.create(conf); -- final InetSocketAddress rmAddress = conf.getSocketAddr( -- YarnConfiguration.RM_SCHEDULER_ADDRESS, -- YarnConfiguration.DEFAULT_RM_SCHEDULER_ADDRESS, -- YarnConfiguration.DEFAULT_RM_SCHEDULER_PORT); -- -- UserGroupInformation currentUser; - try { -- currentUser = UserGroupInformation.getCurrentUser(); -+ rmClient = ClientRMProxy.createRMProxy(conf, ApplicationMasterProtocol.class); - } catch (IOException e) { - throw new YarnRuntimeException(e); - } -- -- // CurrentUser should already have AMToken loaded. -- rmClient = currentUser.doAs(new PrivilegedAction() { -- @Override -- public ApplicationMasterProtocol run() { -- return (ApplicationMasterProtocol) rpc.getProxy(ApplicationMasterProtocol.class, rmAddress, -- conf); -- } -- }); -- LOG.debug(""Connecting to ResourceManager at "" + rmAddress); - super.serviceStart(); - } - -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java -index b3b8bdf4316bb..4398359862b06 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java -@@ -59,11 +59,12 @@ - import org.apache.hadoop.yarn.api.records.Token; - import org.apache.hadoop.yarn.api.records.YarnApplicationState; - import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; -+import org.apache.hadoop.yarn.client.ClientRMProxy; - import org.apache.hadoop.yarn.client.api.YarnClient; - import org.apache.hadoop.yarn.client.api.YarnClientApplication; - import org.apache.hadoop.yarn.conf.YarnConfiguration; - import org.apache.hadoop.yarn.exceptions.YarnException; --import org.apache.hadoop.yarn.ipc.YarnRPC; -+import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; - import org.apache.hadoop.yarn.util.Records; - - import com.google.common.annotations.VisibleForTesting; -@@ -81,16 +82,7 @@ public class YarnClientImpl extends YarnClient { - private static final String ROOT = ""root""; - - public YarnClientImpl() { -- this(null); -- } -- -- public YarnClientImpl(InetSocketAddress rmAddress) { -- this(YarnClientImpl.class.getName(), rmAddress); -- } -- -- public YarnClientImpl(String name, InetSocketAddress rmAddress) { -- super(name); -- this.rmAddress = rmAddress; -+ super(YarnClientImpl.class.getName()); - } - - private static InetSocketAddress getRmAddress(Configuration conf) { -@@ -100,9 +92,7 @@ private static InetSocketAddress getRmAddress(Configuration conf) { - - @Override - protected void serviceInit(Configuration conf) throws Exception { -- if (this.rmAddress == null) { -- this.rmAddress = getRmAddress(conf); -- } -+ this.rmAddress = getRmAddress(conf); - statePollIntervalMillis = conf.getLong( - YarnConfiguration.YARN_CLIENT_APP_SUBMISSION_POLL_INTERVAL_MS, - YarnConfiguration.DEFAULT_YARN_CLIENT_APP_SUBMISSION_POLL_INTERVAL_MS); -@@ -111,12 +101,11 @@ protected void serviceInit(Configuration conf) throws Exception { - - @Override - protected void serviceStart() throws Exception { -- YarnRPC rpc = YarnRPC.create(getConfig()); -- -- this.rmClient = (ApplicationClientProtocol) rpc.getProxy( -- ApplicationClientProtocol.class, rmAddress, getConfig()); -- if (LOG.isDebugEnabled()) { -- LOG.debug(""Connecting to ResourceManager at "" + rmAddress); -+ try { -+ rmClient = ClientRMProxy.createRMProxy(getConfig(), -+ ApplicationClientProtocol.class); -+ } catch (IOException e) { -+ throw new YarnRuntimeException(e); - } - super.serviceStart(); - } -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java -index 6426fe9dbc77e..11335c0d8f68d 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java -@@ -19,8 +19,6 @@ - package org.apache.hadoop.yarn.client.cli; - - import java.io.IOException; --import java.net.InetSocketAddress; --import java.security.PrivilegedAction; - import java.util.Arrays; - - import org.apache.hadoop.classification.InterfaceAudience.Private; -@@ -31,11 +29,11 @@ - import org.apache.hadoop.security.UserGroupInformation; - import org.apache.hadoop.util.Tool; - import org.apache.hadoop.util.ToolRunner; -+import org.apache.hadoop.yarn.client.ClientRMProxy; - import org.apache.hadoop.yarn.conf.YarnConfiguration; - import org.apache.hadoop.yarn.exceptions.YarnException; - import org.apache.hadoop.yarn.factories.RecordFactory; - import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; --import org.apache.hadoop.yarn.ipc.YarnRPC; - import org.apache.hadoop.yarn.server.api.ResourceManagerAdministrationProtocol; - import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshAdminAclsRequest; - import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshNodesRequest; -@@ -164,32 +162,10 @@ private static void printUsage(String cmd) { - } - } - -- private static UserGroupInformation getUGI(Configuration conf -- ) throws IOException { -- return UserGroupInformation.getCurrentUser(); -- } -- - private ResourceManagerAdministrationProtocol createAdminProtocol() throws IOException { - // Get the current configuration - final YarnConfiguration conf = new YarnConfiguration(getConf()); -- -- // Create the client -- final InetSocketAddress addr = conf.getSocketAddr( -- YarnConfiguration.RM_ADMIN_ADDRESS, -- YarnConfiguration.DEFAULT_RM_ADMIN_ADDRESS, -- YarnConfiguration.DEFAULT_RM_ADMIN_PORT); -- final YarnRPC rpc = YarnRPC.create(conf); -- -- ResourceManagerAdministrationProtocol adminProtocol = -- getUGI(conf).doAs(new PrivilegedAction() { -- @Override -- public ResourceManagerAdministrationProtocol run() { -- return (ResourceManagerAdministrationProtocol) rpc.getProxy(ResourceManagerAdministrationProtocol.class, -- addr, conf); -- } -- }); -- -- return adminProtocol; -+ return ClientRMProxy.createRMProxy(conf, ResourceManagerAdministrationProtocol.class); - } - - private int refreshQueues() throws IOException, YarnException { -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/RMProxy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/RMProxy.java -new file mode 100644 -index 0000000000000..e4493b5a469b9 ---- /dev/null -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/RMProxy.java -@@ -0,0 +1,125 @@ -+/** -+ * Licensed to the Apache Software Foundation (ASF) under one -+ * or more contributor license agreements. See the NOTICE file -+ * distributed with this work for additional information -+ * regarding copyright ownership. The ASF licenses this file -+ * to you under the Apache License, Version 2.0 (the -+ * ""License""); you may not use this file except in compliance -+ * with the License. You may obtain a copy of the License at -+ * -+ * http://www.apache.org/licenses/LICENSE-2.0 -+ * -+ * Unless required by applicable law or agreed to in writing, software -+ * distributed under the License is distributed on an ""AS IS"" BASIS, -+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -+ * See the License for the specific language governing permissions and -+ * limitations under the License. -+ */ -+ -+package org.apache.hadoop.yarn.client; -+ -+import java.io.IOException; -+import java.net.ConnectException; -+import java.net.InetSocketAddress; -+import java.security.PrivilegedAction; -+import java.util.HashMap; -+import java.util.Map; -+import java.util.concurrent.TimeUnit; -+ -+import org.apache.commons.logging.Log; -+import org.apache.commons.logging.LogFactory; -+import org.apache.hadoop.classification.InterfaceAudience; -+import org.apache.hadoop.classification.InterfaceStability; -+import org.apache.hadoop.conf.Configuration; -+import org.apache.hadoop.io.retry.RetryPolicies; -+import org.apache.hadoop.io.retry.RetryPolicy; -+import org.apache.hadoop.io.retry.RetryProxy; -+import org.apache.hadoop.security.UserGroupInformation; -+import org.apache.hadoop.yarn.conf.YarnConfiguration; -+import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; -+import org.apache.hadoop.yarn.ipc.YarnRPC; -+ -+@InterfaceAudience.Public -+@InterfaceStability.Evolving -+public class RMProxy { -+ -+ private static final Log LOG = LogFactory.getLog(RMProxy.class); -+ -+ @SuppressWarnings(""unchecked"") -+ public static T createRMProxy(final Configuration conf, -+ final Class protocol, InetSocketAddress rmAddress) throws IOException { -+ RetryPolicy retryPolicy = createRetryPolicy(conf); -+ T proxy = RMProxy.getProxy(conf, protocol, rmAddress); -+ LOG.info(""Connecting to ResourceManager at "" + rmAddress); -+ return (T) RetryProxy.create(protocol, proxy, retryPolicy); -+ } -+ -+ @SuppressWarnings(""unchecked"") -+ protected static T getProxy(final Configuration conf, -+ final Class protocol, final InetSocketAddress rmAddress) -+ throws IOException { -+ return (T) UserGroupInformation.getCurrentUser().doAs( -+ new PrivilegedAction() { -+ -+ @Override -+ public T run() { -+ return (T) YarnRPC.create(conf).getProxy(protocol, rmAddress, conf); -+ } -+ }); -+ } -+ -+ public static RetryPolicy createRetryPolicy(Configuration conf) { -+ long rmConnectWaitMS = -+ conf.getInt( -+ YarnConfiguration.RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS, -+ YarnConfiguration.DEFAULT_RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS) -+ * 1000; -+ long rmConnectionRetryIntervalMS = -+ conf.getLong( -+ YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS, -+ YarnConfiguration -+ .DEFAULT_RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS) -+ * 1000; -+ -+ if (rmConnectionRetryIntervalMS < 0) { -+ throw new YarnRuntimeException(""Invalid Configuration. "" + -+ YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS + -+ "" should not be negative.""); -+ } -+ -+ boolean waitForEver = (rmConnectWaitMS == -1000); -+ -+ if (waitForEver) { -+ return RetryPolicies.RETRY_FOREVER; -+ } else { -+ if (rmConnectWaitMS < 0) { -+ throw new YarnRuntimeException(""Invalid Configuration. "" -+ + YarnConfiguration.RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS -+ + "" can be -1, but can not be other negative numbers""); -+ } -+ -+ // try connect once -+ if (rmConnectWaitMS < rmConnectionRetryIntervalMS) { -+ LOG.warn(YarnConfiguration.RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS -+ + "" is smaller than "" -+ + YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS -+ + "". Only try connect once.""); -+ rmConnectWaitMS = 0; -+ } -+ } -+ -+ RetryPolicy retryPolicy = -+ RetryPolicies.retryUpToMaximumTimeWithFixedSleep(rmConnectWaitMS, -+ rmConnectionRetryIntervalMS, -+ TimeUnit.MILLISECONDS); -+ -+ Map, RetryPolicy> exceptionToPolicyMap = -+ new HashMap, RetryPolicy>(); -+ exceptionToPolicyMap.put(ConnectException.class, retryPolicy); -+ //TO DO: after HADOOP-9576, IOException can be changed to EOFException -+ exceptionToPolicyMap.put(IOException.class, retryPolicy); -+ -+ return RetryPolicies.retryByException(RetryPolicies.TRY_ONCE_THEN_FAIL, -+ exceptionToPolicyMap); -+ } -+} -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/ServerRMProxy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/ServerRMProxy.java -new file mode 100644 -index 0000000000000..ef9154fde1b5f ---- /dev/null -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/ServerRMProxy.java -@@ -0,0 +1,55 @@ -+/** -+* Licensed to the Apache Software Foundation (ASF) under one -+* or more contributor license agreements. See the NOTICE file -+* distributed with this work for additional information -+* regarding copyright ownership. The ASF licenses this file -+* to you under the Apache License, Version 2.0 (the -+* ""License""); you may not use this file except in compliance -+* with the License. You may obtain a copy of the License at -+* -+* http://www.apache.org/licenses/LICENSE-2.0 -+* -+* Unless required by applicable law or agreed to in writing, software -+* distributed under the License is distributed on an ""AS IS"" BASIS, -+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -+* See the License for the specific language governing permissions and -+* limitations under the License. -+*/ -+ -+package org.apache.hadoop.yarn.server.api; -+ -+import java.io.IOException; -+import java.net.InetSocketAddress; -+ -+import org.apache.commons.logging.Log; -+import org.apache.commons.logging.LogFactory; -+import org.apache.hadoop.conf.Configuration; -+import org.apache.hadoop.yarn.client.RMProxy; -+import org.apache.hadoop.yarn.conf.YarnConfiguration; -+ -+public class ServerRMProxy extends RMProxy{ -+ -+ private static final Log LOG = LogFactory.getLog(ServerRMProxy.class); -+ -+ public static T createRMProxy(final Configuration conf, -+ final Class protocol) throws IOException { -+ InetSocketAddress rmAddress = getRMAddress(conf, protocol); -+ return createRMProxy(conf, protocol, rmAddress); -+ } -+ -+ private static InetSocketAddress getRMAddress(Configuration conf, Class protocol) { -+ if (protocol == ResourceTracker.class) { -+ return conf.getSocketAddr( -+ YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS, -+ YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS, -+ YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_PORT); -+ } -+ else { -+ String message = ""Unsupported protocol found when creating the proxy "" + -+ ""connection to ResourceManager: "" + -+ ((protocol != null) ? protocol.getClass().getName() : ""null""); -+ LOG.error(message); -+ throw new IllegalStateException(message); -+ } -+ } -+} -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java -index 396204cf2dbdb..40f6874623fdf 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java -@@ -18,6 +18,7 @@ - - package org.apache.hadoop.yarn.server.api.impl.pb.client; - -+import java.io.Closeable; - import java.io.IOException; - import java.net.InetSocketAddress; - -@@ -41,7 +42,7 @@ - - import com.google.protobuf.ServiceException; - --public class ResourceTrackerPBClientImpl implements ResourceTracker { -+public class ResourceTrackerPBClientImpl implements ResourceTracker, Closeable { - - private ResourceTrackerPB proxy; - -@@ -50,7 +51,14 @@ public ResourceTrackerPBClientImpl(long clientVersion, InetSocketAddress addr, C - proxy = (ResourceTrackerPB)RPC.getProxy( - ResourceTrackerPB.class, clientVersion, addr, conf); - } -- -+ -+ @Override -+ public void close() { -+ if(this.proxy != null) { -+ RPC.stopProxy(this.proxy); -+ } -+ } -+ - @Override - public RegisterNodeManagerResponse registerNodeManager( - RegisterNodeManagerRequest request) throws YarnException, -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java -index 550cdc5a98f4f..b0e71e915633e 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java -@@ -19,7 +19,7 @@ - package org.apache.hadoop.yarn.server.nodemanager; - - import java.io.IOException; --import java.net.InetSocketAddress; -+import java.net.ConnectException; - import java.util.ArrayList; - import java.util.Collections; - import java.util.HashMap; -@@ -33,6 +33,7 @@ - import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.classification.InterfaceAudience.Private; - import org.apache.hadoop.conf.Configuration; -+import org.apache.hadoop.ipc.RPC; - import org.apache.hadoop.security.UserGroupInformation; - import org.apache.hadoop.service.AbstractService; - import org.apache.hadoop.yarn.api.records.ApplicationId; -@@ -47,9 +48,9 @@ - import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; - import org.apache.hadoop.yarn.factories.RecordFactory; - import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; --import org.apache.hadoop.yarn.ipc.YarnRPC; - import org.apache.hadoop.yarn.server.api.ResourceManagerConstants; - import org.apache.hadoop.yarn.server.api.ResourceTracker; -+import org.apache.hadoop.yarn.server.api.ServerRMProxy; - import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatRequest; - import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse; - import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerRequest; -@@ -77,7 +78,6 @@ public class NodeStatusUpdaterImpl extends AbstractService implements - private NodeId nodeId; - private long nextHeartBeatInterval; - private ResourceTracker resourceTracker; -- private InetSocketAddress rmAddress; - private Resource totalResource; - private int httpPort; - private volatile boolean isStopped; -@@ -91,9 +91,6 @@ public class NodeStatusUpdaterImpl extends AbstractService implements - - private final NodeHealthCheckerService healthChecker; - private final NodeManagerMetrics metrics; -- private long rmConnectWaitMS; -- private long rmConnectionRetryIntervalMS; -- private boolean waitForEver; - - private Runnable statusUpdaterRunnable; - private Thread statusUpdater; -@@ -110,11 +107,6 @@ public NodeStatusUpdaterImpl(Context context, Dispatcher dispatcher, - - @Override - protected void serviceInit(Configuration conf) throws Exception { -- this.rmAddress = conf.getSocketAddr( -- YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS, -- YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS, -- YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_PORT); -- - int memoryMb = - conf.getInt( - YarnConfiguration.NM_PMEM_MB, YarnConfiguration.DEFAULT_NM_PMEM_MB); -@@ -153,6 +145,7 @@ protected void serviceStart() throws Exception { - try { - // Registration has to be in start so that ContainerManager can get the - // perNM tokens needed to authenticate ContainerTokens. -+ this.resourceTracker = getRMClient(); - registerWithRM(); - super.serviceStart(); - startStatusUpdater(); -@@ -167,6 +160,7 @@ protected void serviceStart() throws Exception { - protected void serviceStop() throws Exception { - // Interrupt the updater. - this.isStopped = true; -+ stopRMProxy(); - super.serviceStop(); - } - -@@ -188,6 +182,13 @@ protected void rebootNodeStatusUpdater() { - } - } - -+ @VisibleForTesting -+ protected void stopRMProxy() { -+ if(this.resourceTracker != null) { -+ RPC.stopProxy(this.resourceTracker); -+ } -+ } -+ - @Private - protected boolean isTokenKeepAliveEnabled(Configuration conf) { - return conf.getBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, -@@ -195,93 +196,22 @@ protected boolean isTokenKeepAliveEnabled(Configuration conf) { - && UserGroupInformation.isSecurityEnabled(); - } - -- protected ResourceTracker getRMClient() { -+ @VisibleForTesting -+ protected ResourceTracker getRMClient() throws IOException { - Configuration conf = getConfig(); -- YarnRPC rpc = YarnRPC.create(conf); -- return (ResourceTracker) rpc.getProxy(ResourceTracker.class, rmAddress, -- conf); -+ return ServerRMProxy.createRMProxy(conf, ResourceTracker.class); - } - - @VisibleForTesting - protected void registerWithRM() throws YarnException, IOException { -- Configuration conf = getConfig(); -- rmConnectWaitMS = -- conf.getInt( -- YarnConfiguration.RESOURCEMANAGER_CONNECT_WAIT_SECS, -- YarnConfiguration.DEFAULT_RESOURCEMANAGER_CONNECT_WAIT_SECS) -- * 1000; -- rmConnectionRetryIntervalMS = -- conf.getLong( -- YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS, -- YarnConfiguration -- .DEFAULT_RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS) -- * 1000; -- -- if(rmConnectionRetryIntervalMS < 0) { -- throw new YarnRuntimeException(""Invalid Configuration. "" + -- YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS + -- "" should not be negative.""); -- } -- -- waitForEver = (rmConnectWaitMS == -1000); -- -- if(! waitForEver) { -- if(rmConnectWaitMS < 0) { -- throw new YarnRuntimeException(""Invalid Configuration. "" + -- YarnConfiguration.RESOURCEMANAGER_CONNECT_WAIT_SECS + -- "" can be -1, but can not be other negative numbers""); -- } -- -- //try connect once -- if(rmConnectWaitMS < rmConnectionRetryIntervalMS) { -- LOG.warn(YarnConfiguration.RESOURCEMANAGER_CONNECT_WAIT_SECS -- + "" is smaller than "" -- + YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS -- + "". Only try connect once.""); -- rmConnectWaitMS = 0; -- } -- } -- -- int rmRetryCount = 0; -- long waitStartTime = System.currentTimeMillis(); -- - RegisterNodeManagerRequest request = - recordFactory.newRecordInstance(RegisterNodeManagerRequest.class); - request.setHttpPort(this.httpPort); - request.setResource(this.totalResource); - request.setNodeId(this.nodeId); -- RegisterNodeManagerResponse regNMResponse; -- -- while(true) { -- try { -- rmRetryCount++; -- LOG.info(""Connecting to ResourceManager at "" + this.rmAddress -- + "". current no. of attempts is "" + rmRetryCount); -- this.resourceTracker = getRMClient(); -- regNMResponse = -- this.resourceTracker.registerNodeManager(request); -- this.rmIdentifier = regNMResponse.getRMIdentifier(); -- break; -- } catch(Throwable e) { -- LOG.warn(""Trying to connect to ResourceManager, "" + -- ""current no. of failed attempts is ""+rmRetryCount); -- if(System.currentTimeMillis() - waitStartTime < rmConnectWaitMS -- || waitForEver) { -- try { -- LOG.info(""Sleeping for "" + rmConnectionRetryIntervalMS/1000 -- + "" seconds before next connection retry to RM""); -- Thread.sleep(rmConnectionRetryIntervalMS); -- } catch(InterruptedException ex) { -- //done nothing -- } -- } else { -- String errorMessage = ""Failed to Connect to RM, "" + -- ""no. of failed attempts is ""+rmRetryCount; -- LOG.error(errorMessage,e); -- throw new YarnRuntimeException(errorMessage,e); -- } -- } -- } -+ RegisterNodeManagerResponse regNMResponse = -+ resourceTracker.registerNodeManager(request); -+ this.rmIdentifier = regNMResponse.getRMIdentifier(); - // if the Resourcemanager instructs NM to shutdown. - if (NodeAction.SHUTDOWN.equals(regNMResponse.getNodeAction())) { - String message = -@@ -426,8 +356,6 @@ public void run() { - // Send heartbeat - try { - NodeHeartbeatResponse response = null; -- int rmRetryCount = 0; -- long waitStartTime = System.currentTimeMillis(); - NodeStatus nodeStatus = getNodeStatusAndUpdateContainersInContext(); - nodeStatus.setResponseId(lastHeartBeatID); - -@@ -440,31 +368,7 @@ public void run() { - request - .setLastKnownNMTokenMasterKey(NodeStatusUpdaterImpl.this.context - .getNMTokenSecretManager().getCurrentKey()); -- while (!isStopped) { -- try { -- rmRetryCount++; -- response = resourceTracker.nodeHeartbeat(request); -- break; -- } catch (Throwable e) { -- LOG.warn(""Trying to heartbeat to ResourceManager, "" -- + ""current no. of failed attempts is "" + rmRetryCount); -- if(System.currentTimeMillis() - waitStartTime < rmConnectWaitMS -- || waitForEver) { -- try { -- LOG.info(""Sleeping for "" + rmConnectionRetryIntervalMS/1000 -- + "" seconds before next heartbeat to RM""); -- Thread.sleep(rmConnectionRetryIntervalMS); -- } catch(InterruptedException ex) { -- //done nothing -- } -- } else { -- String errorMessage = ""Failed to heartbeat to RM, "" + -- ""no. of failed attempts is ""+rmRetryCount; -- LOG.error(errorMessage,e); -- throw new YarnRuntimeException(errorMessage,e); -- } -- } -- } -+ response = resourceTracker.nodeHeartbeat(request); - //get next heartbeat interval from response - nextHeartBeatInterval = response.getNextHeartBeatInterval(); - updateMasterKeys(response); -@@ -508,11 +412,11 @@ public void run() { - dispatcher.getEventHandler().handle( - new CMgrCompletedAppsEvent(appsToCleanup)); - } -- } catch (YarnRuntimeException e) { -+ } catch (ConnectException e) { - //catch and throw the exception if tried MAX wait time to connect RM - dispatcher.getEventHandler().handle( - new NodeManagerEvent(NodeManagerEventType.SHUTDOWN)); -- throw e; -+ throw new YarnRuntimeException(e); - } catch (Throwable e) { - // TODO Better error handling. Thread can die with the rest of the - // NM still running. -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockNodeStatusUpdater.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockNodeStatusUpdater.java -index e93778e2987ef..a3e1faf310e54 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockNodeStatusUpdater.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockNodeStatusUpdater.java -@@ -61,6 +61,10 @@ public MockNodeStatusUpdater(Context context, Dispatcher dispatcher, - protected ResourceTracker getRMClient() { - return resourceTracker; - } -+ @Override -+ protected void stopRMProxy() { -+ return; -+ } - - private static class MockResourceTracker implements ResourceTracker { - private int heartBeatID; -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java -index 668b85b6511bd..294c93ed3b84a 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java -@@ -107,6 +107,11 @@ protected ResourceTracker getRMClient() { - return new LocalRMInterface(); - }; - -+ @Override -+ protected void stopRMProxy() { -+ return; -+ } -+ - @Override - protected void startStatusUpdater() { - return; // Don't start any updating thread. -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java -index e17131fd3a1dc..2a3e3d579ca03 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java -@@ -41,6 +41,8 @@ - import org.apache.hadoop.conf.Configuration; - import org.apache.hadoop.fs.FileContext; - import org.apache.hadoop.fs.Path; -+import org.apache.hadoop.io.retry.RetryPolicy; -+import org.apache.hadoop.io.retry.RetryProxy; - import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; - import org.apache.hadoop.net.NetUtils; - import org.apache.hadoop.service.ServiceOperations; -@@ -53,6 +55,7 @@ - import org.apache.hadoop.yarn.api.records.ContainerStatus; - import org.apache.hadoop.yarn.api.records.NodeId; - import org.apache.hadoop.yarn.api.records.Resource; -+import org.apache.hadoop.yarn.client.RMProxy; - import org.apache.hadoop.yarn.conf.YarnConfiguration; - import org.apache.hadoop.yarn.event.Dispatcher; - import org.apache.hadoop.yarn.event.EventHandler; -@@ -60,9 +63,9 @@ - import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; - import org.apache.hadoop.yarn.factories.RecordFactory; - import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; --import org.apache.hadoop.yarn.ipc.RPCUtil; - import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; - import org.apache.hadoop.yarn.server.api.ResourceTracker; -+import org.apache.hadoop.yarn.server.api.ServerRMProxy; - import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatRequest; - import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse; - import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerRequest; -@@ -103,11 +106,17 @@ public class TestNodeStatusUpdater { - volatile int heartBeatID = 0; - volatile Throwable nmStartError = null; - private final List registeredNodes = new ArrayList(); -- private final Configuration conf = createNMConfig(); -+ private boolean triggered = false; -+ private Configuration conf; - private NodeManager nm; - private boolean containerStatusBackupSuccessfully = true; - private List completedContainerStatusList = new ArrayList(); - -+ @Before -+ public void setUp() { -+ conf = createNMConfig(); -+ } -+ - @After - public void tearDown() { - this.registeredNodes.clear(); -@@ -274,6 +283,11 @@ public MyNodeStatusUpdater(Context context, Dispatcher dispatcher, - protected ResourceTracker getRMClient() { - return resourceTracker; - } -+ -+ @Override -+ protected void stopRMProxy() { -+ return; -+ } - } - - private class MyNodeStatusUpdater2 extends NodeStatusUpdaterImpl { -@@ -290,6 +304,10 @@ protected ResourceTracker getRMClient() { - return resourceTracker; - } - -+ @Override -+ protected void stopRMProxy() { -+ return; -+ } - } - - private class MyNodeStatusUpdater3 extends NodeStatusUpdaterImpl { -@@ -307,7 +325,12 @@ public MyNodeStatusUpdater3(Context context, Dispatcher dispatcher, - protected ResourceTracker getRMClient() { - return resourceTracker; - } -- -+ -+ @Override -+ protected void stopRMProxy() { -+ return; -+ } -+ - @Override - protected boolean isTokenKeepAliveEnabled(Configuration conf) { - return true; -@@ -315,21 +338,16 @@ protected boolean isTokenKeepAliveEnabled(Configuration conf) { - } - - private class MyNodeStatusUpdater4 extends NodeStatusUpdaterImpl { -- public ResourceTracker resourceTracker = -- new MyResourceTracker(this.context); -+ - private Context context; -- private long waitStartTime; - private final long rmStartIntervalMS; - private final boolean rmNeverStart; -- private volatile boolean triggered = false; -- private long durationWhenTriggered = -1; -- -+ public ResourceTracker resourceTracker; - public MyNodeStatusUpdater4(Context context, Dispatcher dispatcher, - NodeHealthCheckerService healthChecker, NodeManagerMetrics metrics, - long rmStartIntervalMS, boolean rmNeverStart) { - super(context, dispatcher, healthChecker, metrics); - this.context = context; -- this.waitStartTime = System.currentTimeMillis(); - this.rmStartIntervalMS = rmStartIntervalMS; - this.rmNeverStart = rmNeverStart; - } -@@ -337,25 +355,16 @@ public MyNodeStatusUpdater4(Context context, Dispatcher dispatcher, - @Override - protected void serviceStart() throws Exception { - //record the startup time -- this.waitStartTime = System.currentTimeMillis(); - super.serviceStart(); - } - - @Override -- protected ResourceTracker getRMClient() { -- if (!triggered) { -- long t = System.currentTimeMillis(); -- long duration = t - waitStartTime; -- if (duration <= rmStartIntervalMS -- || rmNeverStart) { -- throw new YarnRuntimeException(""Faking RM start failure as start "" + -- ""delay timer has not expired.""); -- } else { -- //triggering -- triggered = true; -- durationWhenTriggered = duration; -- } -- } -+ protected ResourceTracker getRMClient() throws IOException { -+ RetryPolicy retryPolicy = RMProxy.createRetryPolicy(conf); -+ resourceTracker = -+ (ResourceTracker) RetryProxy.create(ResourceTracker.class, -+ new MyResourceTracker6(this.context, rmStartIntervalMS, -+ rmNeverStart), retryPolicy); - return resourceTracker; - } - -@@ -363,37 +372,35 @@ private boolean isTriggered() { - return triggered; - } - -- private long getWaitStartTime() { -- return waitStartTime; -- } -- -- private long getDurationWhenTriggered() { -- return durationWhenTriggered; -- } -- - @Override -- public String toString() { -- return ""MyNodeStatusUpdater4{"" + -- ""rmNeverStart="" + rmNeverStart + -- "", triggered="" + triggered + -- "", duration="" + durationWhenTriggered + -- "", rmStartIntervalMS="" + rmStartIntervalMS + -- '}'; -+ protected void stopRMProxy() { -+ return; - } - } - -+ -+ - private class MyNodeStatusUpdater5 extends NodeStatusUpdaterImpl { - private ResourceTracker resourceTracker; -+ private Configuration conf; - - public MyNodeStatusUpdater5(Context context, Dispatcher dispatcher, -- NodeHealthCheckerService healthChecker, NodeManagerMetrics metrics) { -+ NodeHealthCheckerService healthChecker, NodeManagerMetrics metrics, Configuration conf) { - super(context, dispatcher, healthChecker, metrics); - resourceTracker = new MyResourceTracker5(); -+ this.conf = conf; - } - - @Override - protected ResourceTracker getRMClient() { -- return resourceTracker; -+ RetryPolicy retryPolicy = RMProxy.createRetryPolicy(conf); -+ return (ResourceTracker) RetryProxy.create(ResourceTracker.class, -+ resourceTracker, retryPolicy); -+ } -+ -+ @Override -+ protected void stopRMProxy() { -+ return; - } - } - -@@ -417,15 +424,18 @@ private class MyNodeManager2 extends NodeManager { - public boolean isStopped = false; - private NodeStatusUpdater nodeStatusUpdater; - private CyclicBarrier syncBarrier; -- public MyNodeManager2 (CyclicBarrier syncBarrier) { -+ private Configuration conf; -+ -+ public MyNodeManager2 (CyclicBarrier syncBarrier, Configuration conf) { - this.syncBarrier = syncBarrier; -+ this.conf = conf; - } - @Override - protected NodeStatusUpdater createNodeStatusUpdater(Context context, - Dispatcher dispatcher, NodeHealthCheckerService healthChecker) { - nodeStatusUpdater = - new MyNodeStatusUpdater5(context, dispatcher, healthChecker, -- metrics); -+ metrics, conf); - return nodeStatusUpdater; - } - -@@ -577,7 +587,7 @@ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - .get(4).getState() == ContainerState.RUNNING - && request.getNodeStatus().getContainersStatuses().get(4) - .getContainerId().getId() == 5); -- throw new YarnRuntimeException(""Lost the heartbeat response""); -+ throw new java.net.ConnectException(""Lost the heartbeat response""); - } else if (heartBeatID == 2) { - Assert.assertEquals(request.getNodeStatus().getContainersStatuses() - .size(), 7); -@@ -646,7 +656,63 @@ public RegisterNodeManagerResponse registerNodeManager( - public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - throws YarnException, IOException { - heartBeatID++; -- throw RPCUtil.getRemoteException(""NodeHeartbeat exception""); -+ throw new java.net.ConnectException( -+ ""NodeHeartbeat exception""); -+ } -+ } -+ -+ private class MyResourceTracker6 implements ResourceTracker { -+ -+ private final Context context; -+ private long rmStartIntervalMS; -+ private boolean rmNeverStart; -+ private final long waitStartTime; -+ -+ public MyResourceTracker6(Context context, long rmStartIntervalMS, -+ boolean rmNeverStart) { -+ this.context = context; -+ this.rmStartIntervalMS = rmStartIntervalMS; -+ this.rmNeverStart = rmNeverStart; -+ this.waitStartTime = System.currentTimeMillis(); -+ } -+ -+ @Override -+ public RegisterNodeManagerResponse registerNodeManager( -+ RegisterNodeManagerRequest request) throws YarnException, IOException, -+ IOException { -+ if (System.currentTimeMillis() - waitStartTime <= rmStartIntervalMS -+ || rmNeverStart) { -+ throw new java.net.ConnectException(""Faking RM start failure as start "" -+ + ""delay timer has not expired.""); -+ } else { -+ NodeId nodeId = request.getNodeId(); -+ Resource resource = request.getResource(); -+ LOG.info(""Registering "" + nodeId.toString()); -+ // NOTE: this really should be checking against the config value -+ InetSocketAddress expected = NetUtils.getConnectAddress( -+ conf.getSocketAddr(YarnConfiguration.NM_ADDRESS, null, -1)); -+ Assert.assertEquals(NetUtils.getHostPortString(expected), -+ nodeId.toString()); -+ Assert.assertEquals(5 * 1024, resource.getMemory()); -+ registeredNodes.add(nodeId); -+ -+ RegisterNodeManagerResponse response = recordFactory -+ .newRecordInstance(RegisterNodeManagerResponse.class); -+ triggered = true; -+ return response; -+ } -+ } -+ -+ @Override -+ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) -+ throws YarnException, IOException { -+ NodeStatus nodeStatus = request.getNodeStatus(); -+ nodeStatus.setResponseId(heartBeatID++); -+ -+ NodeHeartbeatResponse nhResponse = YarnServerBuilderUtils. -+ newNodeHeartbeatResponse(heartBeatID, NodeAction.NORMAL, null, -+ null, null, null, 1000L); -+ return nhResponse; - } - } - -@@ -843,8 +909,7 @@ public void testNMConnectionToRM() throws Exception { - final long connectionRetryIntervalSecs = 1; - //Waiting for rmStartIntervalMS, RM will be started - final long rmStartIntervalMS = 2*1000; -- YarnConfiguration conf = createNMConfig(); -- conf.setLong(YarnConfiguration.RESOURCEMANAGER_CONNECT_WAIT_SECS, -+ conf.setLong(YarnConfiguration.RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS, - connectionWaitSecs); - conf.setLong(YarnConfiguration - .RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS, -@@ -907,8 +972,6 @@ protected NodeStatusUpdater createUpdater(Context context, - } - long duration = System.currentTimeMillis() - waitStartTime; - MyNodeStatusUpdater4 myUpdater = (MyNodeStatusUpdater4) updater; -- Assert.assertTrue(""Updater was never started"", -- myUpdater.getWaitStartTime()>0); - Assert.assertTrue(""NM started before updater triggered"", - myUpdater.isTriggered()); - Assert.assertTrue(""NM should have connected to RM after "" -@@ -1037,13 +1100,13 @@ public void testNodeStatusUpdaterRetryAndNMShutdown() - final long connectionWaitSecs = 1; - final long connectionRetryIntervalSecs = 1; - YarnConfiguration conf = createNMConfig(); -- conf.setLong(YarnConfiguration.RESOURCEMANAGER_CONNECT_WAIT_SECS, -+ conf.setLong(YarnConfiguration.RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS, - connectionWaitSecs); - conf.setLong(YarnConfiguration - .RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS, - connectionRetryIntervalSecs); - CyclicBarrier syncBarrier = new CyclicBarrier(2); -- nm = new MyNodeManager2(syncBarrier); -+ nm = new MyNodeManager2(syncBarrier, conf); - nm.init(conf); - nm.start(); - try { -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java -index 83d21e1640721..cfcf7f6445e63 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java -@@ -117,6 +117,11 @@ protected ResourceTracker getRMClient() { - return new LocalRMInterface(); - }; - -+ @Override -+ protected void stopRMProxy() { -+ return; -+ } -+ - @Override - protected void startStatusUpdater() { - return; // Don't start any updating thread. -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java -index cc529739dea79..144b111f83072 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java -@@ -390,6 +390,11 @@ public RegisterNodeManagerResponse registerNodeManager( - } - }; - }; -+ -+ @Override -+ protected void stopRMProxy() { -+ return; -+ } - }; - }; - }" -367a2b8bf66addf6fd731c54b8436ffdd8ed9061,hadoop,HDFS-2465. Add HDFS support for fadvise readahead- and drop-behind. Contributed by Todd Lipcon.--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1190625 13f79535-47bb-0310-9956-ffa450edef68-,a,https://github.com/apache/hadoop,"diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt -index 08a78a07e6a31..8bd74374ac678 100644 ---- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt -+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt -@@ -772,6 +772,8 @@ Release 0.23.0 - Unreleased - HDFS-2500. Avoid file system operations in BPOfferService thread while - processing deletes. (todd) - -+ HDFS-2465. Add HDFS support for fadvise readahead and drop-behind. (todd) -+ - BUG FIXES - - HDFS-2344. Fix the TestOfflineEditsViewer test failure in 0.23 branch. -diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java -index 9c53bc08796cd..6c10d0e8473bf 100644 ---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java -+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java -@@ -54,6 +54,15 @@ public class DFSConfigKeys extends CommonConfigurationKeys { - public static final String DFS_NAMENODE_BACKUP_SERVICE_RPC_ADDRESS_KEY = ""dfs.namenode.backup.dnrpc-address""; - public static final String DFS_DATANODE_BALANCE_BANDWIDTHPERSEC_KEY = ""dfs.datanode.balance.bandwidthPerSec""; - public static final long DFS_DATANODE_BALANCE_BANDWIDTHPERSEC_DEFAULT = 1024*1024; -+ public static final String DFS_DATANODE_READAHEAD_BYTES_KEY = ""dfs.datanode.readahead.bytes""; -+ public static final long DFS_DATANODE_READAHEAD_BYTES_DEFAULT = 0; -+ public static final String DFS_DATANODE_DROP_CACHE_BEHIND_WRITES_KEY = ""dfs.datanode.drop.cache.behind.writes""; -+ public static final boolean DFS_DATANODE_DROP_CACHE_BEHIND_WRITES_DEFAULT = false; -+ public static final String DFS_DATANODE_SYNC_BEHIND_WRITES_KEY = ""dfs.datanode.sync.behind.writes""; -+ public static final boolean DFS_DATANODE_SYNC_BEHIND_WRITES_DEFAULT = false; -+ public static final String DFS_DATANODE_DROP_CACHE_BEHIND_READS_KEY = ""dfs.datanode.drop.cache.behind.reads""; -+ public static final boolean DFS_DATANODE_DROP_CACHE_BEHIND_READS_DEFAULT = false; -+ - public static final String DFS_NAMENODE_HTTP_ADDRESS_KEY = ""dfs.namenode.http-address""; - public static final String DFS_NAMENODE_HTTP_ADDRESS_DEFAULT = ""0.0.0.0:50070""; - public static final String DFS_NAMENODE_RPC_ADDRESS_KEY = ""dfs.namenode.rpc-address""; -diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java -index 50e118aaa0093..b935aafd412fb 100644 ---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java -+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java -@@ -24,6 +24,7 @@ - import java.io.DataInputStream; - import java.io.DataOutputStream; - import java.io.EOFException; -+import java.io.FileDescriptor; - import java.io.FileOutputStream; - import java.io.IOException; - import java.io.OutputStream; -@@ -46,6 +47,7 @@ - import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration; - import org.apache.hadoop.hdfs.util.DataTransferThrottler; - import org.apache.hadoop.io.IOUtils; -+import org.apache.hadoop.io.nativeio.NativeIO; - import org.apache.hadoop.util.Daemon; - import org.apache.hadoop.util.DataChecksum; - import org.apache.hadoop.util.PureJavaCrc32; -@@ -57,10 +59,13 @@ - class BlockReceiver implements Closeable { - public static final Log LOG = DataNode.LOG; - static final Log ClientTraceLog = DataNode.ClientTraceLog; -+ -+ private static final long CACHE_DROP_LAG_BYTES = 8 * 1024 * 1024; - - private DataInputStream in = null; // from where data are read - private DataChecksum checksum; // from where chunks of a block can be read - private OutputStream out = null; // to block file at local disk -+ private FileDescriptor outFd; - private OutputStream cout = null; // output stream for cehcksum file - private DataOutputStream checksumOut = null; // to crc file at local disk - private int bytesPerChecksum; -@@ -80,6 +85,11 @@ class BlockReceiver implements Closeable { - private final DataNode datanode; - volatile private boolean mirrorError; - -+ // Cache management state -+ private boolean dropCacheBehindWrites; -+ private boolean syncBehindWrites; -+ private long lastCacheDropOffset = 0; -+ - /** The client name. It is empty if a datanode is the client */ - private final String clientname; - private final boolean isClient; -@@ -170,6 +180,8 @@ class BlockReceiver implements Closeable { - this.checksum = DataChecksum.newDataChecksum(in); - this.bytesPerChecksum = checksum.getBytesPerChecksum(); - this.checksumSize = checksum.getChecksumSize(); -+ this.dropCacheBehindWrites = datanode.shouldDropCacheBehindWrites(); -+ this.syncBehindWrites = datanode.shouldSyncBehindWrites(); - - final boolean isCreate = isDatanode || isTransfer - || stage == BlockConstructionStage.PIPELINE_SETUP_CREATE; -@@ -177,6 +189,12 @@ class BlockReceiver implements Closeable { - this.bytesPerChecksum, this.checksumSize); - if (streams != null) { - this.out = streams.dataOut; -+ if (out instanceof FileOutputStream) { -+ this.outFd = ((FileOutputStream)out).getFD(); -+ } else { -+ LOG.warn(""Could not get file descriptor for outputstream of class "" + -+ out.getClass()); -+ } - this.cout = streams.checksumOut; - this.checksumOut = new DataOutputStream(new BufferedOutputStream( - streams.checksumOut, HdfsConstants.SMALL_BUFFER_SIZE)); -@@ -631,6 +649,8 @@ private int receivePacket(long offsetInBlock, long seqno, - ); - - datanode.metrics.incrBytesWritten(len); -+ -+ dropOsCacheBehindWriter(offsetInBlock); - } - } catch (IOException iex) { - datanode.checkDiskError(iex); -@@ -645,6 +665,28 @@ private int receivePacket(long offsetInBlock, long seqno, - return lastPacketInBlock?-1:len; - } - -+ private void dropOsCacheBehindWriter(long offsetInBlock) throws IOException { -+ try { -+ if (outFd != null && -+ offsetInBlock > lastCacheDropOffset + CACHE_DROP_LAG_BYTES) { -+ long twoWindowsAgo = lastCacheDropOffset - CACHE_DROP_LAG_BYTES; -+ if (twoWindowsAgo > 0 && dropCacheBehindWrites) { -+ NativeIO.posixFadviseIfPossible(outFd, 0, lastCacheDropOffset, -+ NativeIO.POSIX_FADV_DONTNEED); -+ } -+ -+ if (syncBehindWrites) { -+ NativeIO.syncFileRangeIfPossible(outFd, lastCacheDropOffset, CACHE_DROP_LAG_BYTES, -+ NativeIO.SYNC_FILE_RANGE_WRITE); -+ } -+ -+ lastCacheDropOffset += CACHE_DROP_LAG_BYTES; -+ } -+ } catch (Throwable t) { -+ LOG.warn(""Couldn't drop os cache behind writer for "" + block, t); -+ } -+ } -+ - void writeChecksumHeader(DataOutputStream mirrorOut) throws IOException { - checksum.writeHeader(mirrorOut); - } -diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java -index 84b38b37e9a14..ca9765ce3ea0f 100644 ---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java -+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java -@@ -20,6 +20,7 @@ - import java.io.BufferedInputStream; - import java.io.DataInputStream; - import java.io.DataOutputStream; -+import java.io.FileDescriptor; - import java.io.FileInputStream; - import java.io.IOException; - import java.io.InputStream; -@@ -36,6 +37,9 @@ - import org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader; - import org.apache.hadoop.hdfs.util.DataTransferThrottler; - import org.apache.hadoop.io.IOUtils; -+import org.apache.hadoop.io.ReadaheadPool; -+import org.apache.hadoop.io.ReadaheadPool.ReadaheadRequest; -+import org.apache.hadoop.io.nativeio.NativeIO; - import org.apache.hadoop.net.SocketOutputStream; - import org.apache.hadoop.util.DataChecksum; - -@@ -118,7 +122,9 @@ class BlockSender implements java.io.Closeable { - private DataInputStream checksumIn; - /** Checksum utility */ - private final DataChecksum checksum; -- /** Starting position to read */ -+ /** Initial position to read */ -+ private long initialOffset; -+ /** Current position of read */ - private long offset; - /** Position of last byte to read from block file */ - private final long endOffset; -@@ -142,6 +148,24 @@ class BlockSender implements java.io.Closeable { - private final String clientTraceFmt; - private volatile ChunkChecksum lastChunkChecksum = null; - -+ /** The file descriptor of the block being sent */ -+ private FileDescriptor blockInFd; -+ -+ // Cache-management related fields -+ private final long readaheadLength; -+ private boolean shouldDropCacheBehindRead; -+ private ReadaheadRequest curReadahead; -+ private long lastCacheDropOffset; -+ private static final long CACHE_DROP_INTERVAL_BYTES = 1024 * 1024; // 1MB -+ /** -+ * Minimum length of read below which management of the OS -+ * buffer cache is disabled. -+ */ -+ private static final long LONG_READ_THRESHOLD_BYTES = 256 * 1024; -+ -+ private static ReadaheadPool readaheadPool = -+ ReadaheadPool.getInstance(); -+ - /** - * Constructor - * -@@ -165,6 +189,8 @@ class BlockSender implements java.io.Closeable { - this.corruptChecksumOk = corruptChecksumOk; - this.verifyChecksum = verifyChecksum; - this.clientTraceFmt = clientTraceFmt; -+ this.readaheadLength = datanode.getReadaheadLength(); -+ this.shouldDropCacheBehindRead = datanode.shouldDropCacheBehindReads(); - - synchronized(datanode.data) { - this.replica = getReplica(block, datanode); -@@ -277,6 +303,11 @@ class BlockSender implements java.io.Closeable { - DataNode.LOG.debug(""replica="" + replica); - } - blockIn = datanode.data.getBlockInputStream(block, offset); // seek to offset -+ if (blockIn instanceof FileInputStream) { -+ blockInFd = ((FileInputStream)blockIn).getFD(); -+ } else { -+ blockInFd = null; -+ } - } catch (IOException ioe) { - IOUtils.closeStream(this); - IOUtils.closeStream(blockIn); -@@ -288,6 +319,20 @@ class BlockSender implements java.io.Closeable { - * close opened files. - */ - public void close() throws IOException { -+ if (blockInFd != null && shouldDropCacheBehindRead) { -+ // drop the last few MB of the file from cache -+ try { -+ NativeIO.posixFadviseIfPossible( -+ blockInFd, lastCacheDropOffset, offset - lastCacheDropOffset, -+ NativeIO.POSIX_FADV_DONTNEED); -+ } catch (Exception e) { -+ LOG.warn(""Unable to drop cache on file close"", e); -+ } -+ } -+ if (curReadahead != null) { -+ curReadahead.cancel(); -+ } -+ - IOException ioe = null; - if(checksumIn!=null) { - try { -@@ -304,6 +349,7 @@ public void close() throws IOException { - ioe = e; - } - blockIn = null; -+ blockInFd = null; - } - // throw IOException if there is any - if(ioe!= null) { -@@ -538,10 +584,20 @@ long sendBlock(DataOutputStream out, OutputStream baseStream, - if (out == null) { - throw new IOException( ""out stream is null"" ); - } -- final long initialOffset = offset; -+ initialOffset = offset; - long totalRead = 0; - OutputStream streamForSendChunks = out; - -+ lastCacheDropOffset = initialOffset; -+ -+ if (isLongRead() && blockInFd != null) { -+ // Advise that this file descriptor will be accessed sequentially. -+ NativeIO.posixFadviseIfPossible(blockInFd, 0, 0, NativeIO.POSIX_FADV_SEQUENTIAL); -+ } -+ -+ // Trigger readahead of beginning of file if configured. -+ manageOsCache(); -+ - final long startTime = ClientTraceLog.isInfoEnabled() ? System.nanoTime() : 0; - try { - writeChecksumHeader(out); -@@ -569,6 +625,7 @@ long sendBlock(DataOutputStream out, OutputStream baseStream, - ByteBuffer pktBuf = ByteBuffer.allocate(pktSize); - - while (endOffset > offset) { -+ manageOsCache(); - long len = sendPacket(pktBuf, maxChunksPerPacket, streamForSendChunks, - transferTo, throttler); - offset += len; -@@ -595,6 +652,45 @@ long sendBlock(DataOutputStream out, OutputStream baseStream, - } - return totalRead; - } -+ -+ /** -+ * Manage the OS buffer cache by performing read-ahead -+ * and drop-behind. -+ */ -+ private void manageOsCache() throws IOException { -+ if (!isLongRead() || blockInFd == null) { -+ // don't manage cache manually for short-reads, like -+ // HBase random read workloads. -+ return; -+ } -+ -+ // Perform readahead if necessary -+ if (readaheadLength > 0 && readaheadPool != null) { -+ curReadahead = readaheadPool.readaheadStream( -+ clientTraceFmt, blockInFd, -+ offset, readaheadLength, Long.MAX_VALUE, -+ curReadahead); -+ } -+ -+ // Drop what we've just read from cache, since we aren't -+ // likely to need it again -+ long nextCacheDropOffset = lastCacheDropOffset + CACHE_DROP_INTERVAL_BYTES; -+ if (shouldDropCacheBehindRead && -+ offset >= nextCacheDropOffset) { -+ long dropLength = offset - lastCacheDropOffset; -+ if (dropLength >= 1024) { -+ NativeIO.posixFadviseIfPossible(blockInFd, -+ lastCacheDropOffset, dropLength, -+ NativeIO.POSIX_FADV_DONTNEED); -+ } -+ lastCacheDropOffset += CACHE_DROP_INTERVAL_BYTES; -+ } -+ } -+ -+ private boolean isLongRead() { -+ return (endOffset - offset) > LONG_READ_THRESHOLD_BYTES; -+ } -+ - - /** - * Write checksum header to the output stream -diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java -index 3f7733608999c..5be82dd59d8cf 100644 ---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java -+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java -@@ -104,6 +104,7 @@ - import org.apache.hadoop.fs.LocalFileSystem; - import org.apache.hadoop.fs.Path; - import org.apache.hadoop.fs.permission.FsPermission; -+import org.apache.hadoop.hdfs.DFSConfigKeys; - import org.apache.hadoop.hdfs.DFSUtil; - import org.apache.hadoop.hdfs.HDFSPolicyProvider; - import org.apache.hadoop.hdfs.HdfsConfiguration; -@@ -410,6 +411,11 @@ void refreshNamenodes(Configuration conf) - int socketTimeout; - int socketWriteTimeout = 0; - boolean transferToAllowed = true; -+ private boolean dropCacheBehindWrites = false; -+ private boolean syncBehindWrites = false; -+ private boolean dropCacheBehindReads = false; -+ private long readaheadLength = 0; -+ - int writePacketSize = 0; - boolean isBlockTokenEnabled; - BlockPoolTokenSecretManager blockPoolTokenSecretManager; -@@ -493,6 +499,20 @@ private void initConfig(Configuration conf) { - DFS_DATANODE_TRANSFERTO_ALLOWED_DEFAULT); - this.writePacketSize = conf.getInt(DFS_CLIENT_WRITE_PACKET_SIZE_KEY, - DFS_CLIENT_WRITE_PACKET_SIZE_DEFAULT); -+ -+ this.readaheadLength = conf.getLong( -+ DFSConfigKeys.DFS_DATANODE_READAHEAD_BYTES_KEY, -+ DFSConfigKeys.DFS_DATANODE_READAHEAD_BYTES_DEFAULT); -+ this.dropCacheBehindWrites = conf.getBoolean( -+ DFSConfigKeys.DFS_DATANODE_DROP_CACHE_BEHIND_WRITES_KEY, -+ DFSConfigKeys.DFS_DATANODE_DROP_CACHE_BEHIND_WRITES_DEFAULT); -+ this.syncBehindWrites = conf.getBoolean( -+ DFSConfigKeys.DFS_DATANODE_SYNC_BEHIND_WRITES_KEY, -+ DFSConfigKeys.DFS_DATANODE_SYNC_BEHIND_WRITES_DEFAULT); -+ this.dropCacheBehindReads = conf.getBoolean( -+ DFSConfigKeys.DFS_DATANODE_DROP_CACHE_BEHIND_READS_KEY, -+ DFSConfigKeys.DFS_DATANODE_DROP_CACHE_BEHIND_READS_DEFAULT); -+ - this.blockReportInterval = conf.getLong(DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, - DFS_BLOCKREPORT_INTERVAL_MSEC_DEFAULT); - this.initialBlockReportDelay = conf.getLong( -@@ -2859,4 +2879,20 @@ public Long getBalancerBandwidth() { - (DataXceiverServer) this.dataXceiverServer.getRunnable(); - return dxcs.balanceThrottler.getBandwidth(); - } -+ -+ long getReadaheadLength() { -+ return readaheadLength; -+ } -+ -+ boolean shouldDropCacheBehindWrites() { -+ return dropCacheBehindWrites; -+ } -+ -+ boolean shouldDropCacheBehindReads() { -+ return dropCacheBehindReads; -+ } -+ -+ boolean shouldSyncBehindWrites() { -+ return syncBehindWrites; -+ } - }" -e15bf1a95abeec1a39f212d65887d88a19f9f68e,drools,-fixed MVEL parser context naming issue.--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@23995 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-,c,https://github.com/kiegroup/drools,"diff --git a/drools-compiler/src/main/java/org/drools/rule/builder/dialect/mvel/MVELDialect.java b/drools-compiler/src/main/java/org/drools/rule/builder/dialect/mvel/MVELDialect.java -index 8206e2099a2..4ddff78583d 100644 ---- a/drools-compiler/src/main/java/org/drools/rule/builder/dialect/mvel/MVELDialect.java -+++ b/drools-compiler/src/main/java/org/drools/rule/builder/dialect/mvel/MVELDialect.java -@@ -134,9 +134,7 @@ public class MVELDialect - - private boolean strictMode; - private int languageLevel; -- public static final Object COMPILER_LOCK = new Object(); -- -- private static AtomicInteger nameCounter = new AtomicInteger(); -+ public static final Object COMPILER_LOCK = new Object(); - - public MVELDialect(PackageBuilder builder, - PackageRegistry pkgRegistry, -@@ -666,13 +664,17 @@ public ParserContext getParserContext(final Dialect.AnalysisResult analysis, - // @todo proper source file name - String name; - if ( context != null && context.getPkg() != null & context.getPkg().getName() != null ) { -- name = context.getPkg().getName(); -+ if ( context instanceof RuleBuildContext ) { -+ name = context.getPkg().getName() + ""."" + ((RuleBuildContext)context).getRuleDescr().getClassName(); -+ } else { -+ name = context.getPkg().getName() + "".Unknown""; -+ } - } else { -- name = """"; -+ name = ""Unknown""; - } - final ParserContext parserContext = new ParserContext( this.imports, - null, -- name + ""_"" + nameCounter.getAndIncrement() ); -+ name ); - // getRuleDescr().getClassName() ); - - for ( Iterator it = this.packageImports.values().iterator(); it.hasNext(); ) {" -a09d266b247b185054d0ab0dfdb6e8dc2e8898bc,orientdb,Minor: removed some warnings--,p,https://github.com/JetBrains/intellij-community,⚠️ HTTP 404: Not Found -ca849f196990eec942468efaef3719f829c265eb,orientdb,Improved management of distributed cluster nodes--,p,https://github.com/JetBrains/intellij-community,⚠️ HTTP 404: Not Found -10c4f93ae28a5c0746783dd4cbec6bf20a11bad8,evllabs$jgaap,"slight optimizations to distances -",p,https://github.com/evllabs/jgaap,"diff --git a/src/com/jgaap/distances/BrayCurtisDistance.java b/src/com/jgaap/distances/BrayCurtisDistance.java -index 6021e5466..14de61bc5 100644 ---- a/src/com/jgaap/distances/BrayCurtisDistance.java -+++ b/src/com/jgaap/distances/BrayCurtisDistance.java -@@ -3,7 +3,6 @@ - import java.util.HashSet; - import java.util.Set; - --import com.jgaap.generics.DistanceCalculationException; - import com.jgaap.generics.DistanceFunction; - import com.jgaap.generics.Event; - import com.jgaap.generics.EventMap; -@@ -34,8 +33,7 @@ public boolean showInGUI() { - } - - @Override -- public double distance(EventMap unknownEventMap, EventMap knownEventMap) -- throws DistanceCalculationException { -+ public double distance(EventMap unknownEventMap, EventMap knownEventMap) { - - Set events = new HashSet(unknownEventMap.uniqueEvents()); - events.addAll(knownEventMap.uniqueEvents()); -@@ -43,8 +41,10 @@ public double distance(EventMap unknownEventMap, EventMap knownEventMap) - double distance = 0.0, sumNumer = 0.0, sumDenom = 0.0; - - for(Event event: events){ -- sumNumer += Math.abs(unknownEventMap.relativeFrequency(event) - knownEventMap.relativeFrequency(event)); -- sumDenom += unknownEventMap.relativeFrequency(event) + knownEventMap.relativeFrequency(event); -+ double known = knownEventMap.relativeFrequency(event); -+ double unknown = unknownEventMap.relativeFrequency(event); -+ sumNumer += Math.abs(unknown - known); -+ sumDenom += unknown + known; - } - distance = sumNumer / sumDenom; - return distance; -diff --git a/src/com/jgaap/distances/SoergleDistance.java b/src/com/jgaap/distances/SoergleDistance.java -index 8cfef0782..432182454 100644 ---- a/src/com/jgaap/distances/SoergleDistance.java -+++ b/src/com/jgaap/distances/SoergleDistance.java -@@ -43,8 +43,10 @@ public double distance(EventMap unknownEventMap, EventMap knownEventMap) - double distance = 0.0, sumNumer = 0.0, sumDenom = 0.0; - - for(Event event : events){ -- sumNumer += Math.abs(unknownEventMap.relativeFrequency(event) - knownEventMap.relativeFrequency(event)); -- sumDenom += Math.max(unknownEventMap.relativeFrequency(event), knownEventMap.relativeFrequency(event)); -+ double known = knownEventMap.relativeFrequency(event); -+ double unknown = unknownEventMap.relativeFrequency(event); -+ sumNumer += Math.abs(unknown - known); -+ sumDenom += Math.max(unknown, known); - } - distance = sumNumer / sumDenom;" -741dd3b80dc20cf513cf374ad938a1a4fe965887,Vala,"Change many static delegates to has_target = false -",a,https://github.com/GNOME/vala/,⚠️ Could not parse repo info -3d53bdc3d320973bcb0ca67047a59e0e58cee0b3,Mylyn Reviews,"cleanup warnings - -* Fix build path issue -* Minor Cleanup for Action - -Change-Id: I4e4bda10624e16d4f69545a953ae80d84939c1dd -",p,https://github.com/eclipse-mylyn/org.eclipse.mylyn.reviews,"diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/build.properties b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/build.properties -index e10dcceb..cc91072e 100644 ---- a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/build.properties -+++ b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/build.properties -@@ -1,5 +1,4 @@ --source.. = src/,\ -- src-gen/ -+source.. = src/ - bin.includes = META-INF/,\ - .,\ -- plugin.xml -\ No newline at end of file -+ plugin.xml -diff --git a/tbr/org.eclipse.mylyn.versions.context.ui/src/org/eclipse/mylyn/versions/tasks/context/ImportAsContextAction.java b/tbr/org.eclipse.mylyn.versions.context.ui/src/org/eclipse/mylyn/versions/tasks/context/ImportAsContextAction.java -index 059158ce..68ea94c6 100644 ---- a/tbr/org.eclipse.mylyn.versions.context.ui/src/org/eclipse/mylyn/versions/tasks/context/ImportAsContextAction.java -+++ b/tbr/org.eclipse.mylyn.versions.context.ui/src/org/eclipse/mylyn/versions/tasks/context/ImportAsContextAction.java -@@ -41,7 +41,7 @@ public ImportAsContextAction() { - } - - public void run() { -- -+ throw new java.lang.UnsupportedOperationException(); - } - - private String formatHandleString(Change c) { -@@ -71,11 +71,9 @@ public void run(ITaskVersionsModel model) { - if (elementNotDeleted(c)) { - InteractionEvent interactionEvent = new InteractionEvent( - Kind.SELECTION, null, formatHandleString(c), ORIGIN); -+ ContextCore.getContextManager().processInteractionEvent(interactionEvent); - -- ContextCore.getContextManager().processInteractionEvent( -- interactionEvent); -- MonitorUiPlugin.getDefault().notifyInteractionObserved( -- interactionEvent); -+ MonitorUiPlugin.getDefault().notifyInteractionObserved(interactionEvent); - } - } - } -diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.tests/src/org/eclipse/mylyn/versions/tasks/mapper/generic/GenericTaskChangesetMapperTest.java b/tbr/org.eclipse.mylyn.versions.tasks.mapper.tests/src/org/eclipse/mylyn/versions/tasks/mapper/generic/GenericTaskChangesetMapperTest.java -index 18c1d070..bc4cb83d 100644 ---- a/tbr/org.eclipse.mylyn.versions.tasks.mapper.tests/src/org/eclipse/mylyn/versions/tasks/mapper/generic/GenericTaskChangesetMapperTest.java -+++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.tests/src/org/eclipse/mylyn/versions/tasks/mapper/generic/GenericTaskChangesetMapperTest.java -@@ -1,3 +1,13 @@ -+/******************************************************************************* -+ * Copyright (c) 2012 Research Group for Industrial Software (INSO), Vienna University of Technology. -+ * All rights reserved. This program and the accompanying materials -+ * are made available under the terms of the Eclipse Public License v1.0 -+ * which accompanies this distribution, and is available at -+ * http://www.eclipse.org/legal/epl-v10.html -+ * -+ * Contributors: -+ * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation -+ *******************************************************************************/ - package org.eclipse.mylyn.versions.tasks.mapper.generic; - - import static org.junit.Assert.*; -@@ -16,7 +26,11 @@ - import org.junit.Test; - import org.mockito.invocation.InvocationOnMock; - import org.mockito.stubbing.Answer; -- -+/** -+ * -+ * @author Kilian Matt -+ * -+ */ - public class GenericTaskChangesetMapperTest { - - private GenericTaskChangesetMapper mapper; -diff --git a/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/internal/versions/tasks/ui/ChangesetPart.java b/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/internal/versions/tasks/ui/ChangesetPart.java -index 49563f3c..bcfffdae 100644 ---- a/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/internal/versions/tasks/ui/ChangesetPart.java -+++ b/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/internal/versions/tasks/ui/ChangesetPart.java -@@ -45,14 +45,13 @@ - import org.eclipse.ui.forms.widgets.Section; - - /** -- * - * @author Kilian Matt -- * - */ - @SuppressWarnings(""restriction"") - public class ChangesetPart extends AbstractTaskEditorPart { - private TableViewer table; -- private ChangesetModel model = new ChangesetModel(); -+ -+ private final ChangesetModel model = new ChangesetModel(); - - public ChangesetPart() { - setPartName(""Changeset""); -@@ -67,8 +66,7 @@ public void createControl(Composite parent, FormToolkit toolkit) { - createTable(composite); - } - -- private Composite createContentComposite(FormToolkit toolkit, -- Section createSection) { -+ private Composite createContentComposite(FormToolkit toolkit, Section createSection) { - Composite composite = toolkit.createComposite(createSection); - createSection.setClient(composite); - composite.setLayout(new FillLayout()); -@@ -107,10 +105,10 @@ protected void fillToolBar(ToolBarManager toolBarManager) { - super.fillToolBar(toolBarManager); - toolBarManager.add(new IncludeSubTasksAction(model)); - List contributions = InternalExtensionPointLoader.loadActionContributions(); -- for(final ITaskVersionsContributionAction action : contributions) { -+ for (final ITaskVersionsContributionAction action : contributions) { - toolBarManager.add(new ActionDelegate(action) { -- @Override -- public void runWithEvent(Event event) { -+ @Override -+ public void runWithEvent(Event event) { - action.run(model); - } - }); -@@ -120,37 +118,33 @@ public void runWithEvent(Event event) { - private void registerContextMenu(TableViewer table) { - MenuManager menuManager = new MenuManager(); - menuManager.setRemoveAllWhenShown(true); -- getTaskEditorPage().getEditorSite().registerContextMenu( -- ""org.eclipse.mylyn.versions.changesets"", menuManager, table, -- true); -+ getTaskEditorPage().getEditorSite().registerContextMenu(""org.eclipse.mylyn.versions.changesets"", menuManager, -+ table, true); - Menu menu = menuManager.createContextMenu(table.getControl()); - table.getTable().setMenu(menu); - } - - private void addColumn(TableViewer table, String name) { -- TableViewerColumn tableViewerColumn = new TableViewerColumn(table, -- SWT.LEFT); -+ TableViewerColumn tableViewerColumn = new TableViewerColumn(table, SWT.LEFT); - tableViewerColumn.getColumn().setText(name); - tableViewerColumn.getColumn().setWidth(100); - } - -- private AbstractChangesetMappingProvider determineBestProvider( -- final ITask task) { -+ private AbstractChangesetMappingProvider determineBestProvider(final ITask task) { - AbstractChangesetMappingProvider bestProvider = new NullProvider(); - int score = Integer.MIN_VALUE; -- for (AbstractChangesetMappingProvider mappingProvider : TaskChangesetUtil -- .getMappingProviders()) { -+ for (AbstractChangesetMappingProvider mappingProvider : TaskChangesetUtil.getMappingProviders()) { - if (score < mappingProvider.getScoreFor(task)) { - bestProvider = mappingProvider; - } - } - return bestProvider; - } -- private static class NullProvider extends AbstractChangesetMappingProvider{ -+ -+ private static class NullProvider extends AbstractChangesetMappingProvider { - - @Override -- public void getChangesetsForTask(IChangeSetMapping mapping, -- IProgressMonitor monitor) throws CoreException { -+ public void getChangesetsForTask(IChangeSetMapping mapping, IProgressMonitor monitor) throws CoreException { - } - - @Override -@@ -160,8 +154,7 @@ public int getScoreFor(ITask task) { - - } - -- private IChangeSetMapping createChangeSetMapping(final ITask task, -- final List changesets) { -+ private IChangeSetMapping createChangeSetMapping(final ITask task, final List changesets) { - return new IChangeSetMapping() { - - public ITask getTask() { -@@ -207,8 +200,7 @@ public List getInput() { - if (task instanceof ITaskContainer) { - ITaskContainer taskContainer = (ITaskContainer) task; - for (ITask subTask : taskContainer.getChildren()) { -- changesetsMapping.add(createChangeSetMapping(subTask, -- changesets)); -+ changesetsMapping.add(createChangeSetMapping(subTask, changesets)); - } - } - } -@@ -221,7 +213,8 @@ public void run() { - provider.getChangesetsForTask(csm, new NullProgressMonitor()); - } - } catch (CoreException e) { -- getTaskEditorPage().getTaskEditor().setMessage(""An exception occurred "" + e.getMessage(), IMessageProvider.ERROR); -+ getTaskEditorPage().getTaskEditor().setMessage(""An exception occurred "" + e.getMessage(), -+ IMessageProvider.ERROR); - } - }" -770ee735efa935a21b9992090eb063732e826ba5,ReactiveX-RxJava,Add unit tests for recursive scheduler usage--These tests came from @mairbek at https://github.com/Netflix/RxJava/pull/229-issuecomment-16115941-,p,https://github.com/ReactiveX/RxJava,"diff --git a/rxjava-core/src/test/java/rx/concurrency/TestSchedulers.java b/rxjava-core/src/test/java/rx/concurrency/TestSchedulers.java -index ec247d0b95..a4760ff65e 100644 ---- a/rxjava-core/src/test/java/rx/concurrency/TestSchedulers.java -+++ b/rxjava-core/src/test/java/rx/concurrency/TestSchedulers.java -@@ -16,20 +16,24 @@ - package rx.concurrency; - - import static org.junit.Assert.*; -+import static org.mockito.Mockito.*; - - import java.util.concurrent.CountDownLatch; - import java.util.concurrent.TimeUnit; -+import java.util.concurrent.atomic.AtomicBoolean; - import java.util.concurrent.atomic.AtomicInteger; --import java.util.concurrent.atomic.AtomicReference; - - import org.junit.Test; - - import rx.Observable; - import rx.Observer; -+import rx.Scheduler; - import rx.Subscription; -+import rx.subscriptions.BooleanSubscription; - import rx.subscriptions.Subscriptions; - import rx.util.functions.Action1; - import rx.util.functions.Func1; -+import rx.util.functions.Func2; - - public class TestSchedulers { - -@@ -245,4 +249,114 @@ public void call(Integer t) { - assertEquals(5, count.get()); - } - -+ @Test -+ public void testRecursiveScheduler1() { -+ Observable obs = Observable.create(new Func1, Subscription>() { -+ @Override -+ public Subscription call(final Observer observer) { -+ return Schedulers.currentThread().schedule(0, new Func2() { -+ @Override -+ public Subscription call(Scheduler scheduler, Integer i) { -+ if (i > 42) { -+ observer.onCompleted(); -+ return Subscriptions.empty(); -+ } -+ -+ observer.onNext(i); -+ -+ return scheduler.schedule(i + 1, this); -+ } -+ }); -+ } -+ }); -+ -+ final AtomicInteger lastValue = new AtomicInteger(); -+ obs.forEach(new Action1() { -+ -+ @Override -+ public void call(Integer v) { -+ System.out.println(""Value: "" + v); -+ lastValue.set(v); -+ } -+ }); -+ -+ assertEquals(42, lastValue.get()); -+ } -+ -+ @Test -+ public void testRecursiveScheduler2() throws InterruptedException { -+ // use latches instead of Thread.sleep -+ final CountDownLatch latch = new CountDownLatch(10); -+ final CountDownLatch completionLatch = new CountDownLatch(1); -+ -+ Observable obs = Observable.create(new Func1, Subscription>() { -+ @Override -+ public Subscription call(final Observer observer) { -+ -+ return Schedulers.threadPoolForComputation().schedule(new BooleanSubscription(), new Func2() { -+ @Override -+ public Subscription call(Scheduler scheduler, BooleanSubscription cancel) { -+ if (cancel.isUnsubscribed()) { -+ observer.onCompleted(); -+ completionLatch.countDown(); -+ return Subscriptions.empty(); -+ } -+ -+ observer.onNext(42); -+ latch.countDown(); -+ -+ try { -+ Thread.sleep(1); -+ } catch (InterruptedException e) { -+ e.printStackTrace(); -+ } -+ -+ scheduler.schedule(cancel, this); -+ -+ return cancel; -+ } -+ }); -+ } -+ }); -+ -+ @SuppressWarnings(""unchecked"") -+ Observer o = mock(Observer.class); -+ -+ final AtomicInteger count = new AtomicInteger(); -+ final AtomicBoolean completed = new AtomicBoolean(false); -+ Subscription subscribe = obs.subscribe(new Observer() { -+ @Override -+ public void onCompleted() { -+ System.out.println(""Completed""); -+ completed.set(true); -+ } -+ -+ @Override -+ public void onError(Exception e) { -+ System.out.println(""Error""); -+ } -+ -+ @Override -+ public void onNext(Integer args) { -+ count.incrementAndGet(); -+ System.out.println(args); -+ } -+ }); -+ -+ if (!latch.await(5000, TimeUnit.MILLISECONDS)) { -+ fail(""Timed out waiting on onNext latch""); -+ } -+ -+ // now unsubscribe and ensure it stops the recursive loop -+ subscribe.unsubscribe(); -+ System.out.println(""unsubscribe""); -+ -+ if (!completionLatch.await(5000, TimeUnit.MILLISECONDS)) { -+ fail(""Timed out waiting on completion latch""); -+ } -+ -+ assertEquals(10, count.get()); // wondering if this could be 11 in a race condition (which would be okay due to how unsubscribe works ... just it would make this test non-deterministic) -+ assertTrue(completed.get()); -+ } -+ - }" -1257196d255cf3697ab869a86eb6f84034232f78,orientdb,Fixed problem of ConcurrentModificationException- reported by Bayoda: the problem should be due to a wrong isolation on Map in- cache: used exclusive lock even for keys() and get().--,c,https://github.com/JetBrains/intellij-community,⚠️ HTTP 404: Not Found -3a5efd71396f7febeee17b268071b5a07cba27c3,camel,CAMEL-4071 clean up the camel OSGi integration- test and load the karaf spring feature first--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@1133394 13f79535-47bb-0310-9956-ffa450edef68-,p,https://github.com/apache/camel,"diff --git a/tests/camel-itest-osgi/pom.xml b/tests/camel-itest-osgi/pom.xml -index a1352a1716b4f..f6b8abc19f577 100644 ---- a/tests/camel-itest-osgi/pom.xml -+++ b/tests/camel-itest-osgi/pom.xml -@@ -345,6 +345,7 @@ - - - ${spring-version} -+ ${karaf-version} - - - -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/OSGiIntegrationTestSupport.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/OSGiIntegrationTestSupport.java -index 8c9c2a2932c97..692426aa17002 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/OSGiIntegrationTestSupport.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/OSGiIntegrationTestSupport.java -@@ -72,39 +72,46 @@ protected void setThreadContextClassLoader() { - } - - public static UrlReference getCamelKarafFeatureUrl() { -- String springVersion = System.getProperty(""springVersion""); -- System.out.println(""*** The spring version is "" + springVersion + "" ***""); -- -+ - String type = ""xml/features""; - return mavenBundle().groupId(""org.apache.camel.karaf""). - artifactId(""apache-camel"").versionAsInProject().type(type); - } - - public static UrlReference getKarafFeatureUrl() { -- String karafVersion = ""2.2.1""; -+ String karafVersion = System.getProperty(""karafVersion""); - System.out.println(""*** The karaf version is "" + karafVersion + "" ***""); - - String type = ""xml/features""; - return mavenBundle().groupId(""org.apache.karaf.assemblies.features""). - artifactId(""standard"").version(karafVersion).type(type); - } -- -- @Configuration -- public static Option[] configure() throws Exception { -+ -+ public static Option[] getDefaultCamelKarafOptions() { - Option[] options = combine( - // Default karaf environment - Helper.getDefaultOptions( - // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -+ Helper.setLogLevel(""WARN"")), -+ -+ // install the spring, http features first -+ scanFeatures(getKarafFeatureUrl(), ""spring"", ""spring-dm"", ""jetty""), - - // using the features to install the camel components - scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test""), -- -+ ""camel-core"", ""camel-spring"", ""camel-test""), -+ - workingDirectory(""target/paxrunner/""), - - equinox(), - felix()); -+ return options; -+ } -+ -+ @Configuration -+ public static Option[] configure() throws Exception { -+ Option[] options = combine( -+ getDefaultCamelKarafOptions()); - - // for remote debugging - // vmOption(""-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=5008""), -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/ahc/AhcTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/ahc/AhcTest.java -index 9cc421830c948..d6b6da8a274ba 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/ahc/AhcTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/ahc/AhcTest.java -@@ -18,7 +18,6 @@ - - import org.apache.camel.builder.RouteBuilder; - import org.apache.camel.itest.osgi.OSGiIntegrationTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Ignore; - import org.junit.Test; - import org.junit.runner.RunWith; -@@ -26,11 +25,8 @@ - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - /** - * -@@ -70,17 +66,11 @@ public void configure() { - @Configuration - public static Option[] configure() throws Exception { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -+ getDefaultCamelKarafOptions(), -+ -+ // using the features to install other camel components - scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-test"", ""camel-jetty"", ""camel-ahc""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -+ ""camel-jetty"", ""camel-ahc"")); - - return options; - } -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsS3IntegrationTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsS3IntegrationTest.java -index 48a5708219c28..6be307e76766e 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsS3IntegrationTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsS3IntegrationTest.java -@@ -25,25 +25,15 @@ - import org.apache.camel.Processor; - import org.apache.camel.component.aws.s3.S3Constants; - import org.apache.camel.component.mock.MockEndpoint; --import org.apache.camel.itest.osgi.OSGiIntegrationSpringTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Ignore; - import org.junit.Test; - import org.junit.runner.RunWith; --import org.ops4j.pax.exam.Option; --import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - import org.springframework.osgi.context.support.OsgiBundleXmlApplicationContext; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; --import static org.ops4j.pax.exam.OptionUtils.combine; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; -- - @RunWith(JUnit4TestRunner.class) - @Ignore(""Test fails"") --public class AwsS3IntegrationTest extends OSGiIntegrationSpringTestSupport { -+public class AwsS3IntegrationTest extends AwsTestSupport { - - @EndpointInject(uri = ""mock:result"") - private MockEndpoint result; -@@ -109,22 +99,4 @@ private void assertResponseMessage(Message message) { - assertEquals(""3a5c8b1ad448bca04584ecb55b836264"", message.getHeader(S3Constants.E_TAG)); - assertNull(message.getHeader(S3Constants.VERSION_ID)); - } -- -- @Configuration -- public static Option[] configure() { -- Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-aws""), -- workingDirectory(""target/paxrunner/""), -- equinox(), -- felix()); -- -- return options; -- } - } -\ No newline at end of file -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsS3Test.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsS3Test.java -index 7a5ec944a0cfc..c6adb7c93c858 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsS3Test.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsS3Test.java -@@ -41,7 +41,7 @@ - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - @RunWith(JUnit4TestRunner.class) --public class AwsS3Test extends OSGiIntegrationSpringTestSupport { -+public class AwsS3Test extends AwsTestSupport { - - @EndpointInject(uri = ""mock:result-s3"") - private MockEndpoint result; -@@ -108,21 +108,4 @@ private void assertResponseMessage(Message message) { - assertNull(message.getHeader(S3Constants.VERSION_ID)); - } - -- @Configuration -- public static Option[] configure() { -- Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-aws""), -- workingDirectory(""target/paxrunner/""), -- equinox(), -- felix()); -- -- return options; -- } - } -\ No newline at end of file -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsSnsIntegrationTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsSnsIntegrationTest.java -index 3be6631b863e8..68c9439be01ae 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsSnsIntegrationTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsSnsIntegrationTest.java -@@ -20,25 +20,15 @@ - import org.apache.camel.ExchangePattern; - import org.apache.camel.Processor; - import org.apache.camel.component.aws.sns.SnsConstants; --import org.apache.camel.itest.osgi.OSGiIntegrationSpringTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Ignore; - import org.junit.Test; - import org.junit.runner.RunWith; --import org.ops4j.pax.exam.Option; --import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - import org.springframework.osgi.context.support.OsgiBundleXmlApplicationContext; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; --import static org.ops4j.pax.exam.OptionUtils.combine; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; -- - @RunWith(JUnit4TestRunner.class) - @Ignore(""Must be manually tested. Provide your own accessKey and secretKey in CamelIntegrationContext.xml!"") --public class AwsSnsIntegrationTest extends OSGiIntegrationSpringTestSupport { -+public class AwsSnsIntegrationTest extends AwsTestSupport { - - @Override - protected OsgiBundleXmlApplicationContext createApplicationContext() { -@@ -69,21 +59,5 @@ public void process(Exchange exchange) throws Exception { - assertNotNull(exchange.getOut().getHeader(SnsConstants.MESSAGE_ID)); - } - -- @Configuration -- public static Option[] configure() { -- Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-aws""), -- workingDirectory(""target/paxrunner/""), -- equinox(), -- felix()); -- -- return options; -- } -+ - } -\ No newline at end of file -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsSnsTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsSnsTest.java -index a9f08d891612d..1c601d647842e 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsSnsTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsSnsTest.java -@@ -36,7 +36,7 @@ - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - @RunWith(JUnit4TestRunner.class) --public class AwsSnsTest extends OSGiIntegrationSpringTestSupport { -+public class AwsSnsTest extends AwsTestSupport { - - @Override - protected OsgiBundleXmlApplicationContext createApplicationContext() { -@@ -66,22 +66,5 @@ public void process(Exchange exchange) throws Exception { - - assertEquals(""dcc8ce7a-7f18-4385-bedd-b97984b4363c"", exchange.getOut().getHeader(SnsConstants.MESSAGE_ID)); - } -- -- @Configuration -- public static Option[] configure() { -- Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-aws""), -- workingDirectory(""target/paxrunner/""), -- equinox(), -- felix()); -- -- return options; -- } -+ - } -\ No newline at end of file -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsSqsIntegrationTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsSqsIntegrationTest.java -index a6938b3829655..3b9288a88da0d 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsSqsIntegrationTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsSqsIntegrationTest.java -@@ -22,25 +22,15 @@ - import org.apache.camel.Processor; - import org.apache.camel.component.aws.sqs.SqsConstants; - import org.apache.camel.component.mock.MockEndpoint; --import org.apache.camel.itest.osgi.OSGiIntegrationSpringTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Ignore; - import org.junit.Test; - import org.junit.runner.RunWith; --import org.ops4j.pax.exam.Option; --import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - import org.springframework.osgi.context.support.OsgiBundleXmlApplicationContext; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; --import static org.ops4j.pax.exam.OptionUtils.combine; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; -- - @RunWith(JUnit4TestRunner.class) - @Ignore(""Must be manually tested. Provide your own accessKey and secretKey in CamelIntegrationContext.xml!"") --public class AwsSqsIntegrationTest extends OSGiIntegrationSpringTestSupport { -+public class AwsSqsIntegrationTest extends AwsTestSupport { - - @EndpointInject(uri = ""mock:result"") - private MockEndpoint result; -@@ -95,22 +85,4 @@ public void process(Exchange exchange) throws Exception { - assertNotNull(exchange.getOut().getHeader(SqsConstants.MESSAGE_ID)); - assertEquals(""6a1559560f67c5e7a7d5d838bf0272ee"", exchange.getOut().getHeader(SqsConstants.MD5_OF_BODY)); - } -- -- @Configuration -- public static Option[] configure() { -- Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-aws""), -- workingDirectory(""target/paxrunner/""), -- equinox(), -- felix()); -- -- return options; -- } - } -\ No newline at end of file -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsSqsTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsSqsTest.java -index 1247a68cd05c6..9a8c5086f6f49 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsSqsTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsSqsTest.java -@@ -22,23 +22,14 @@ - import org.apache.camel.Processor; - import org.apache.camel.component.aws.sqs.SqsConstants; - import org.apache.camel.component.mock.MockEndpoint; --import org.apache.camel.itest.osgi.OSGiIntegrationSpringTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Test; - import org.junit.runner.RunWith; --import org.ops4j.pax.exam.Option; --import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - import org.springframework.osgi.context.support.OsgiBundleXmlApplicationContext; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; --import static org.ops4j.pax.exam.OptionUtils.combine; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - @RunWith(JUnit4TestRunner.class) --public class AwsSqsTest extends OSGiIntegrationSpringTestSupport { -+public class AwsSqsTest extends AwsTestSupport { - - @EndpointInject(uri = ""mock:result"") - private MockEndpoint result; -@@ -94,21 +85,5 @@ public void process(Exchange exchange) throws Exception { - assertEquals(""6a1559560f67c5e7a7d5d838bf0272ee"", exchange.getOut().getHeader(SqsConstants.MD5_OF_BODY)); - } - -- @Configuration -- public static Option[] configure() { -- Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-aws""), -- workingDirectory(""target/paxrunner/""), -- equinox(), -- felix()); -- -- return options; -- } -+ - } -\ No newline at end of file -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsTestSupport.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsTestSupport.java -new file mode 100644 -index 0000000000000..629f41db6d05f ---- /dev/null -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/aws/AwsTestSupport.java -@@ -0,0 +1,42 @@ -+/** -+ * Licensed to the Apache Software Foundation (ASF) under one or more -+ * contributor license agreements. See the NOTICE file distributed with -+ * this work for additional information regarding copyright ownership. -+ * The ASF licenses this file to You under the Apache License, Version 2.0 -+ * (the ""License""); you may not use this file except in compliance with -+ * the License. You may obtain a copy of the License at -+ * -+ * http://www.apache.org/licenses/LICENSE-2.0 -+ * -+ * Unless required by applicable law or agreed to in writing, software -+ * distributed under the License is distributed on an ""AS IS"" BASIS, -+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -+ * See the License for the specific language governing permissions and -+ * limitations under the License. -+ */ -+ -+package org.apache.camel.itest.osgi.aws; -+ -+import org.apache.camel.itest.osgi.OSGiIntegrationSpringTestSupport; -+import org.ops4j.pax.exam.Option; -+import org.ops4j.pax.exam.junit.Configuration; -+import org.springframework.osgi.context.support.OsgiBundleXmlApplicationContext; -+ -+import static org.ops4j.pax.exam.OptionUtils.combine; -+import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; -+ -+ -+public abstract class AwsTestSupport extends OSGiIntegrationSpringTestSupport { -+ -+ -+ @Configuration -+ public static Option[] configure() { -+ Option[] options = combine( -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-aws"")); -+ -+ return options; -+ } -+ -+} -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/bean/validator/BeanValidatorTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/bean/validator/BeanValidatorTest.java -index bb50885b4eafd..4f90ea4175f3a 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/bean/validator/BeanValidatorTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/bean/validator/BeanValidatorTest.java -@@ -20,21 +20,16 @@ - import org.apache.camel.Exchange; - import org.apache.camel.Processor; - import org.apache.camel.itest.osgi.OSGiIntegrationTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Test; - import org.junit.runner.RunWith; - import org.ops4j.pax.exam.Option; - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; --import org.ops4j.pax.swissbox.tinybundles.dp.Constants; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; -+ - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; --import static org.ops4j.pax.swissbox.tinybundles.core.TinyBundles.newBundle; --import static org.ops4j.pax.swissbox.tinybundles.core.TinyBundles.withBnd; -+ - - @RunWith(JUnit4TestRunner.class) - public class BeanValidatorTest extends OSGiIntegrationTestSupport { -@@ -64,23 +59,13 @@ public void process(Exchange exchange) throws Exception { - Car createCar(String manufacturer, String licencePlate) { - return new CarWithAnnotations(manufacturer, licencePlate); - } -- -+ - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-bean-validator""), -- -- workingDirectory(""target/paxrunner/""), -- -- equinox(), -- felix()); -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-bean-validator"")); - - return options; - } -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/BlueprintExplicitPropertiesRouteTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/BlueprintExplicitPropertiesRouteTest.java -index 9b4e4d4e74242..75a20aa3bb782 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/BlueprintExplicitPropertiesRouteTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/BlueprintExplicitPropertiesRouteTest.java -@@ -91,6 +91,9 @@ public static Option[] configure() throws Exception { - .set(Constants.BUNDLE_SYMBOLICNAME, BlueprintExplicitPropertiesRouteTest.class.getName()) - .build()).noStart(), - -+ // install the spring, http features first -+ scanFeatures(getKarafFeatureUrl(), ""spring"", ""spring-dm"", ""jetty""), -+ - // using the features to install the camel components - scanFeatures(getCamelKarafFeatureUrl(), - ""camel-core"", ""camel-blueprint"", ""camel-test""), -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/BlueprintPropertiesRouteTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/BlueprintPropertiesRouteTest.java -index f2f7dfc9722ca..378875caa6467 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/BlueprintPropertiesRouteTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/BlueprintPropertiesRouteTest.java -@@ -90,6 +90,8 @@ public static Option[] configure() throws Exception { - // install blueprint requirements - mavenBundle(""org.apache.felix"", ""org.apache.felix.configadmin""), - -+ // install the spring, http features first -+ scanFeatures(getKarafFeatureUrl(), ""spring"", ""spring-dm"", ""jetty""), - // using the features to install the camel components - scanFeatures(getCamelKarafFeatureUrl(), - ""camel-core"", ""camel-blueprint"", ""camel-test""), -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/CamelBlueprint2Test.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/CamelBlueprint2Test.java -index 6287094266775..b557b1f666b04 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/CamelBlueprint2Test.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/CamelBlueprint2Test.java -@@ -188,6 +188,8 @@ public static Option[] configure() throws Exception { - .set(Constants.DYNAMICIMPORT_PACKAGE, ""*"") - .build()).noStart(), - -+ // install the spring, http features first -+ scanFeatures(getKarafFeatureUrl(), ""jetty""), - // using the features to install the camel components - scanFeatures(getCamelKarafFeatureUrl(), - ""camel-core"", ""camel-blueprint"", ""camel-test"", ""camel-mail"", ""camel-jaxb"", ""camel-jms""), -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/CamelBlueprint3Test.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/CamelBlueprint3Test.java -index 0e6799657cfcf..1f6a27908d399 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/CamelBlueprint3Test.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/CamelBlueprint3Test.java -@@ -112,6 +112,9 @@ public static Option[] configure() throws Exception { - .set(Constants.BUNDLE_SYMBOLICNAME, ""CamelBlueprintTestBundle9"") - .set(Constants.DYNAMICIMPORT_PACKAGE, ""*"") - .build()).noStart(), -+ -+ // install the spring, http features first -+ scanFeatures(getKarafFeatureUrl(), ""spring"", ""jetty""), - - // using the features to install the camel components - scanFeatures(getCamelKarafFeatureUrl(), -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/CamelBlueprint4Test.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/CamelBlueprint4Test.java -index dacff6254d5d7..ba5d36c80c1c6 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/CamelBlueprint4Test.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/CamelBlueprint4Test.java -@@ -91,6 +91,9 @@ public static Option[] configure() throws Exception { - .add(""org/apache/camel/itest/osgi/blueprint/example.vm"", OSGiBlueprintTestSupport.class.getResource(""example.vm"")) - .set(Constants.BUNDLE_SYMBOLICNAME, ""CamelBlueprintTestBundle20"") - .build()).noStart(), -+ -+ // install the spring, http features first -+ scanFeatures(getKarafFeatureUrl(), ""spring"", ""jetty""), - - // using the features to install the camel components - scanFeatures(getCamelKarafFeatureUrl(), -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/CamelBlueprintTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/CamelBlueprintTest.java -index a2e39070aa0f3..1600985f58966 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/CamelBlueprintTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/CamelBlueprintTest.java -@@ -146,6 +146,9 @@ public static Option[] configure() throws Exception { - .set(Constants.BUNDLE_SYMBOLICNAME, ""CamelBlueprintTestBundle5"") - .set(Constants.DYNAMICIMPORT_PACKAGE, ""*"") - .build()).noStart(), -+ -+ // install the spring, http features first -+ scanFeatures(getKarafFeatureUrl(), ""spring"", ""spring-dm"", ""jetty""), - - // using the features to install the camel components - scanFeatures(getCamelKarafFeatureUrl(), -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/OSGiBlueprintHelloWorldTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/OSGiBlueprintHelloWorldTest.java -index d1def0e19acf9..c1961004d98ee 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/OSGiBlueprintHelloWorldTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/blueprint/OSGiBlueprintHelloWorldTest.java -@@ -86,6 +86,9 @@ public static Option[] configure() throws Exception { - .add(""OSGI-INF/blueprint/test.xml"", OSGiBlueprintTestSupport.class.getResource(""blueprint-13.xml"")) - .set(Constants.BUNDLE_SYMBOLICNAME, OSGiBlueprintHelloWorldTest.class.getName()) - .build()).noStart(), -+ -+ // install the spring, http features first -+ scanFeatures(getKarafFeatureUrl(), ""spring"", ""spring-dm"", ""jetty""), - - // using the features to install the camel components - scanFeatures(getCamelKarafFeatureUrl(), -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/cache/CacheManagerFactoryRefTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/cache/CacheManagerFactoryRefTest.java -index ddcd5c1a6d2f3..47b6e9539b32e 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/cache/CacheManagerFactoryRefTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/cache/CacheManagerFactoryRefTest.java -@@ -26,18 +26,14 @@ - import org.apache.camel.component.cache.CacheEndpoint; - import org.apache.camel.component.cache.CacheManagerFactory; - import org.apache.camel.itest.osgi.OSGiIntegrationTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Test; - import org.junit.runner.RunWith; - import org.ops4j.pax.exam.Option; - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - @RunWith(JUnit4TestRunner.class) - public class CacheManagerFactoryRefTest extends OSGiIntegrationTestSupport { -@@ -78,25 +74,16 @@ public void configure() { - } - }; - } -- -+ -+ -+ - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax -- // logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- -- // using the features to install the camel components -- scanFeatures( -- getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-cache""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -- -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-cache"")); -+ - return options; - } - -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/cache/CacheRoutesManagementTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/cache/CacheRoutesManagementTest.java -index 12a06c702fe40..4a82d52ffa909 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/cache/CacheRoutesManagementTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/cache/CacheRoutesManagementTest.java -@@ -26,18 +26,15 @@ - import org.apache.camel.component.cache.CacheConstants; - import org.apache.camel.component.cache.CacheManagerFactory; - import org.apache.camel.itest.osgi.OSGiIntegrationTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Test; - import org.junit.runner.RunWith; - import org.ops4j.pax.exam.Option; - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; -+ - - @RunWith(JUnit4TestRunner.class) - public class CacheRoutesManagementTest extends OSGiIntegrationTestSupport { -@@ -100,24 +97,14 @@ public void configure() { - }; - } - -+ - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax -- // logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- -- // using the features to install the camel components -- scanFeatures( -- getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-cache""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -- -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-cache"")); -+ - return options; - } - -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/cache/CacheTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/cache/CacheTest.java -index 240ea6204226a..aef41d4f7bbdf 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/cache/CacheTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/cache/CacheTest.java -@@ -19,18 +19,14 @@ - import org.apache.camel.builder.RouteBuilder; - import org.apache.camel.component.cache.CacheConstants; - import org.apache.camel.itest.osgi.OSGiIntegrationTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Test; - import org.junit.runner.RunWith; - import org.ops4j.pax.exam.Option; - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - @RunWith(JUnit4TestRunner.class) - public class CacheTest extends OSGiIntegrationTestSupport { -@@ -66,19 +62,11 @@ public void configure() { - - - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-cache""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-cache"")); - - return options; - } -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/cache/replication/CacheReplicationTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/cache/replication/CacheReplicationTest.java -index 8fac6b558cee8..ef3df9dfef17a 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/cache/replication/CacheReplicationTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/cache/replication/CacheReplicationTest.java -@@ -20,6 +20,7 @@ - import org.apache.karaf.testing.Helper; - import org.junit.Test; - import org.junit.runner.RunWith; -+ - import org.ops4j.pax.exam.Option; - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; -@@ -70,6 +71,9 @@ public static Option[] configure() throws Exception { - // this is how you set the default log level when using pax - // logging (logProfile) - Helper.setLogLevel(""WARN"")), -+ -+ // install the spring, http features first -+ scanFeatures(getKarafFeatureUrl(), ""spring"", ""spring-dm"", ""jetty""), - - // using the features to install AMQ - scanFeatures(""mvn:org.apache.activemq/activemq-karaf/5.5.0/xml/features"", -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/cxf/CxfProxyExampleTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/cxf/CxfProxyExampleTest.java -index 98a749a79f76d..69f41bca0b920 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/cxf/CxfProxyExampleTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/cxf/CxfProxyExampleTest.java -@@ -88,6 +88,9 @@ public static Option[] configure() throws Exception { - Helper.getDefaultOptions( - // this is how you set the default log level when using pax logging (logProfile) - Helper.setLogLevel(""WARN"")), -+ -+ // install the spring, http features first -+ scanFeatures(getKarafFeatureUrl(), ""spring"", ""spring-dm"", ""jetty""), - - // using the features to install the camel components - scanFeatures(getCamelKarafFeatureUrl(), -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/dozer/DozerTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/dozer/DozerTest.java -index 5f2c37a493f26..14e805cf3f88d 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/dozer/DozerTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/dozer/DozerTest.java -@@ -27,11 +27,10 @@ - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - import org.springframework.osgi.context.support.OsgiBundleXmlApplicationContext; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; -+ - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; -+ - - /** - * @version -@@ -66,23 +65,14 @@ public void testDozer() throws Exception { - - assertMockEndpointsSatisfied(); - } -- -+ - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-dozer""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -- -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-dozer"")); -+ - return options; - } - -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/exec/ExecTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/exec/ExecTest.java -index 7f22127e58a67..bd6bcbbb8bac1 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/exec/ExecTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/exec/ExecTest.java -@@ -18,7 +18,6 @@ - - import org.apache.camel.builder.RouteBuilder; - import org.apache.camel.itest.osgi.OSGiIntegrationTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Ignore; - import org.junit.Test; - import org.junit.runner.RunWith; -@@ -26,11 +25,8 @@ - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - @RunWith(JUnit4TestRunner.class) - @Ignore(""We need a test which runs on all platforms"") -@@ -52,23 +48,15 @@ public void configure() { - }; - } - -- - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-exec""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-exec"")); - - return options; - } - -+ - } -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/freemarker/FreemarkerTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/freemarker/FreemarkerTest.java -index d14d06ed1b218..bde59afb9f502 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/freemarker/FreemarkerTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/freemarker/FreemarkerTest.java -@@ -22,18 +22,14 @@ - import org.apache.camel.Processor; - import org.apache.camel.builder.RouteBuilder; - import org.apache.camel.itest.osgi.OSGiIntegrationTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Test; - import org.junit.runner.RunWith; - import org.ops4j.pax.exam.Option; - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - @RunWith(JUnit4TestRunner.class) - public class FreemarkerTest extends OSGiIntegrationTestSupport { -@@ -64,20 +60,14 @@ public void configure() { - } - - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-freemarker""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-freemarker"")); - - return options; - } -+ -+ - } -\ No newline at end of file -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/ftp/FtpTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/ftp/FtpTest.java -index b4f48c1400b36..7a5a1b460e19f 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/ftp/FtpTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/ftp/FtpTest.java -@@ -62,6 +62,9 @@ public static Option[] configure() throws Exception { - mavenBundle().groupId(""org.apache.ftpserver"").artifactId(""ftpserver-core"").version(""1.0.5""), - mavenBundle().groupId(""org.apache.ftpserver"").artifactId(""ftplet-api"").version(""1.0.5""), - -+ // install the spring, http features first -+ scanFeatures(getKarafFeatureUrl(), ""spring"", ""spring-dm"", ""jetty""), -+ - // using the features to install the camel components - scanFeatures(getCamelKarafFeatureUrl(), - ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-ftp""), -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/groovy/GroovyTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/groovy/GroovyTest.java -index d575d3771b278..39e9346b42a16 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/groovy/GroovyTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/groovy/GroovyTest.java -@@ -19,18 +19,15 @@ - import org.apache.camel.builder.RouteBuilder; - import org.apache.camel.component.mock.MockEndpoint; - import org.apache.camel.itest.osgi.OSGiIntegrationTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Test; - import org.junit.runner.RunWith; - import org.ops4j.pax.exam.Option; - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; -+ - - @RunWith(JUnit4TestRunner.class) - public class GroovyTest extends OSGiIntegrationTestSupport { -@@ -52,19 +49,11 @@ public void configure() { - } - - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-test"", ""camel-groovy""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-groovy"")); - - return options; - } -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/hawtdb/HawtDBAggregateRouteTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/hawtdb/HawtDBAggregateRouteTest.java -index 8d25c4e8825c6..ff151bc9c603c 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/hawtdb/HawtDBAggregateRouteTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/hawtdb/HawtDBAggregateRouteTest.java -@@ -24,18 +24,14 @@ - import org.apache.camel.component.mock.MockEndpoint; - import org.apache.camel.itest.osgi.OSGiIntegrationTestSupport; - import org.apache.camel.processor.aggregate.AggregationStrategy; --import org.apache.karaf.testing.Helper; - import org.junit.Test; - import org.junit.runner.RunWith; - import org.ops4j.pax.exam.Option; - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - @RunWith(JUnit4TestRunner.class) - public class HawtDBAggregateRouteTest extends OSGiIntegrationTestSupport { -@@ -91,21 +87,13 @@ public Exchange aggregate(Exchange oldExchange, Exchange newExchange) { - return oldExchange; - } - } -- -+ - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-hawtdb""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-hawtdb"")); - - return options; - } -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/hl7/HL7DataFormatTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/hl7/HL7DataFormatTest.java -index afe4473efd4bd..2c110c8a0a09d 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/hl7/HL7DataFormatTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/hl7/HL7DataFormatTest.java -@@ -29,11 +29,8 @@ - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - @RunWith(JUnit4TestRunner.class) - public class HL7DataFormatTest extends OSGiIntegrationTestSupport { -@@ -105,22 +102,15 @@ private static String createHL7AsString() { - body.append(line8); - return body.toString(); - } -- -+ - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-hl7""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -- -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-hl7"")); -+ - return options; - } -+ - } -\ No newline at end of file -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/hl7/HL7MLLPCodecTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/hl7/HL7MLLPCodecTest.java -index 61ba7e7c9dc94..a799df9982bdd 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/hl7/HL7MLLPCodecTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/hl7/HL7MLLPCodecTest.java -@@ -19,7 +19,6 @@ - import org.apache.camel.Exchange; - import org.apache.camel.Processor; - import org.apache.camel.itest.osgi.OSGiIntegrationSpringTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Test; - import org.junit.runner.RunWith; - import org.ops4j.pax.exam.Option; -@@ -27,12 +26,8 @@ - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - import org.springframework.osgi.context.support.OsgiBundleXmlApplicationContext; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; --import static org.ops4j.pax.exam.CoreOptions.mavenBundle; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - @RunWith(JUnit4TestRunner.class) - public class HL7MLLPCodecTest extends OSGiIntegrationSpringTestSupport implements Processor { -@@ -63,25 +58,15 @@ public void process(Exchange exchange) throws Exception { - String out = ""MSH|^~\\&|MYSENDER||||200701011539||ADR^A19||||123\rMSA|AA|123\n""; - exchange.getOut().setBody(out); - } -- -+ - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-mina"", ""camel-hl7""), -- -- // add hl7 osgi bundle -- mavenBundle().groupId(""http://hl7api.sourceforge.net/m2/!ca.uhn.hapi"").artifactId(""hapi-osgi-base"").version(""1.0.1""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -- -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-hl7"", ""camel-mina"")); -+ - return options; - } -+ - } -\ No newline at end of file -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/http/HttpTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/http/HttpTest.java -index 988592314a132..49fa76f9a6daf 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/http/HttpTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/http/HttpTest.java -@@ -18,7 +18,6 @@ - - import org.apache.camel.builder.RouteBuilder; - import org.apache.camel.itest.osgi.OSGiIntegrationTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Ignore; - import org.junit.Test; - import org.junit.runner.RunWith; -@@ -26,11 +25,8 @@ - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - /** - * -@@ -66,22 +62,14 @@ public void configure() { - } - }; - } -- -+ - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-test"", ""camel-jetty"", ""camel-http""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -- -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-jetty"", ""camel-http"")); -+ - return options; - } - -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/http4/Http4Test.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/http4/Http4Test.java -index 9cb2dd9fb2d02..42c9835650bb1 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/http4/Http4Test.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/http4/Http4Test.java -@@ -18,7 +18,6 @@ - - import org.apache.camel.builder.RouteBuilder; - import org.apache.camel.itest.osgi.OSGiIntegrationTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Ignore; - import org.junit.Test; - import org.junit.runner.RunWith; -@@ -26,11 +25,8 @@ - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - /** - * -@@ -66,23 +62,16 @@ public void configure() { - } - }; - } -- -+ - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-test"", ""camel-jetty"", ""camel-http4""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -- -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-http4"", ""camel-jetty"")); -+ - return options; - } - -+ - } -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jasypt/JasyptTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jasypt/JasyptTest.java -index 9eec49769c45c..e6b5e38cd6fea 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jasypt/JasyptTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jasypt/JasyptTest.java -@@ -20,18 +20,14 @@ - import org.apache.camel.component.jasypt.JasyptPropertiesParser; - import org.apache.camel.component.properties.PropertiesComponent; - import org.apache.camel.itest.osgi.OSGiIntegrationTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Test; - import org.junit.runner.RunWith; - import org.ops4j.pax.exam.Option; - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - @RunWith(JUnit4TestRunner.class) - public class JasyptTest extends OSGiIntegrationTestSupport { -@@ -68,20 +64,13 @@ public void configure() { - } - - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-test"", ""camel-jasypt""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-jasypt"")); - - return options; - } -+ - } -\ No newline at end of file -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jaxb/JaxbDataFormatTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jaxb/JaxbDataFormatTest.java -index 75285698d0c89..2fc3cf5adfa1c 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jaxb/JaxbDataFormatTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jaxb/JaxbDataFormatTest.java -@@ -20,18 +20,14 @@ - import org.apache.camel.component.mock.MockEndpoint; - import org.apache.camel.converter.jaxb.JaxbDataFormat; - import org.apache.camel.itest.osgi.OSGiIntegrationTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Test; - import org.junit.runner.RunWith; - import org.ops4j.pax.exam.Option; - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - @RunWith(JUnit4TestRunner.class) - public class JaxbDataFormatTest extends OSGiIntegrationTestSupport { -@@ -58,24 +54,16 @@ public void testSendMessage() throws Exception { - template.sendBodyAndHeader(""direct:start"", ""FOOBAR"", - ""foo"", ""bar""); - assertMockEndpointsSatisfied(); -- } -+ } - - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-jaxb""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-jaxb"")); - - return options; - } -+ - } -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jaxb/JaxbFallbackConverterSpringTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jaxb/JaxbFallbackConverterSpringTest.java -index be157a54c4ddf..d5d2f6bed1551 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jaxb/JaxbFallbackConverterSpringTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jaxb/JaxbFallbackConverterSpringTest.java -@@ -18,7 +18,6 @@ - - import org.apache.camel.component.mock.MockEndpoint; - import org.apache.camel.itest.osgi.OSGiIntegrationSpringTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Test; - import org.junit.runner.RunWith; - import org.ops4j.pax.exam.Option; -@@ -26,11 +25,8 @@ - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - import org.springframework.osgi.context.support.OsgiBundleXmlApplicationContext; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - @RunWith(JUnit4TestRunner.class) - public class JaxbFallbackConverterSpringTest extends OSGiIntegrationSpringTestSupport { -@@ -56,19 +52,11 @@ public void testSendMessage() throws Exception { - } - - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-jaxb""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-jaxb"")); - - return options; - } -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jaxb/JaxbFallbackConverterTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jaxb/JaxbFallbackConverterTest.java -index e6157f1946603..60886b780cc7e 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jaxb/JaxbFallbackConverterTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jaxb/JaxbFallbackConverterTest.java -@@ -19,18 +19,14 @@ - import org.apache.camel.builder.RouteBuilder; - import org.apache.camel.component.mock.MockEndpoint; - import org.apache.camel.itest.osgi.OSGiIntegrationTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Test; - import org.junit.runner.RunWith; - import org.ops4j.pax.exam.Option; - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - @RunWith(JUnit4TestRunner.class) - public class JaxbFallbackConverterTest extends OSGiIntegrationTestSupport { -@@ -59,19 +55,11 @@ public void testSendMessage() throws Exception { - } - - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-jaxb""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-jaxb"")); - - return options; - } -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jetty/OSGiMulitJettyCamelContextsTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jetty/OSGiMulitJettyCamelContextsTest.java -index 5bfca0fb5b968..9a34c5aae2197 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jetty/OSGiMulitJettyCamelContextsTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jetty/OSGiMulitJettyCamelContextsTest.java -@@ -27,17 +27,13 @@ - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - import org.ops4j.pax.swissbox.tinybundles.dp.Constants; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; -- - import static org.ops4j.pax.exam.CoreOptions.provision; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - import static org.ops4j.pax.swissbox.tinybundles.core.TinyBundles.newBundle; - - @RunWith(JUnit4TestRunner.class) --@Ignore(""TODO: fix me"") -+//@Ignore(""TODO: fix me"") - public class OSGiMulitJettyCamelContextsTest extends OSGiIntegrationTestSupport { - - @Test -@@ -66,16 +62,13 @@ public void testStoppingJettyContext() throws Exception { - response = template.requestBody(endpointURI, ""Hello World"", String.class); - assertEquals(""response is "" , ""camelContext2"", response); - } -+ - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-jetty""), -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-jetty""), - //set up the camel context bundle1 - provision(newBundle().add(""META-INF/spring/CamelContext1.xml"", OSGiMulitJettyCamelContextsTest.class.getResource(""CamelContext1.xml"")) - .add(JettyProcessor.class) -@@ -89,15 +82,11 @@ public static Option[] configure() throws Exception { - .add(JettyProcessor.class) - .set(Constants.BUNDLE_SYMBOLICNAME, ""org.apache.camel.itest.osgi.CamelContextBundle2"") - .set(Constants.DYNAMICIMPORT_PACKAGE, ""*"") -- .set(Constants.BUNDLE_NAME, ""CamelContext2"").build()), -- -- -- workingDirectory(""target/paxrunner/""), -- -- equinox(), -- felix()); -+ .set(Constants.BUNDLE_NAME, ""CamelContext2"").build()) -+ ); - - return options; - } -+ - - } -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jms/JmsTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jms/JmsTest.java -index c36dc687a6401..b3070effcfdc9 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jms/JmsTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jms/JmsTest.java -@@ -17,7 +17,6 @@ - package org.apache.camel.itest.osgi.jms; - - import org.apache.camel.itest.osgi.OSGiIntegrationSpringTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Test; - import org.junit.runner.RunWith; - import org.ops4j.pax.exam.Option; -@@ -25,11 +24,8 @@ - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - import org.springframework.osgi.context.support.OsgiBundleXmlApplicationContext; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - /** - * @version -@@ -54,21 +50,13 @@ public void testJms() throws Exception { - @Configuration - public static Option[] configure() throws Exception { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -+ getDefaultCamelKarafOptions(), - - // using the features to install AMQ - scanFeatures(""mvn:org.apache.activemq/activemq-karaf/5.4.0/xml/features"", ""activemq""), - - // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-jms""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-jms"")); - - return options; - } -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jpa/JpaRouteTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jpa/JpaRouteTest.java -index 52ba5be5d7497..a5028c5f24538 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jpa/JpaRouteTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/jpa/JpaRouteTest.java -@@ -132,7 +132,10 @@ public static Option[] configure() throws Exception { - // Default karaf environment - Helper.getDefaultOptions( - // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -+ Helper.setLogLevel(""WARN"")), -+ -+ // install the spring, http features first -+ scanFeatures(getKarafFeatureUrl(), ""spring"", ""spring-dm"", ""jetty""), - // using the features to install the camel components - scanFeatures(getCamelKarafFeatureUrl(), - ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-jpa""), -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/mail/MailRouteTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/mail/MailRouteTest.java -index 5827694cc4d3d..e8d8e68cd7e9a 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/mail/MailRouteTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/mail/MailRouteTest.java -@@ -125,6 +125,9 @@ public static Option[] configure() throws Exception { - Helper.getDefaultOptions( - // this is how you set the default log level when using pax logging (logProfile) - Helper.setLogLevel(""WARN"")), -+ // install the spring, http features first -+ scanFeatures(getKarafFeatureUrl(), ""spring"", ""spring-dm"", ""jetty""), -+ - // using the features to install the camel components - scanFeatures(getCamelKarafFeatureUrl(), - ""camel-core"", ""camel-spring"", ""camel-test""), -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/mina/MinaTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/mina/MinaTest.java -index f6cb5d14ddd2d..4d1464b1db44d 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/mina/MinaTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/mina/MinaTest.java -@@ -18,18 +18,14 @@ - - import org.apache.camel.builder.RouteBuilder; - import org.apache.camel.itest.osgi.OSGiIntegrationTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Test; - import org.junit.runner.RunWith; - import org.ops4j.pax.exam.Option; - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - @RunWith(JUnit4TestRunner.class) - public class MinaTest extends OSGiIntegrationTestSupport { -@@ -55,20 +51,13 @@ public void configure() { - } - - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-test"", ""camel-mina""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-mina"")); - - return options; - } -+ - } -\ No newline at end of file -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/mybatis/MyBatisTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/mybatis/MyBatisTest.java -index 12f93145e98fc..25c3a510f2426 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/mybatis/MyBatisTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/mybatis/MyBatisTest.java -@@ -112,6 +112,9 @@ public static Option[] configure() throws Exception { - // this is how you set the default log level when using pax logging (logProfile) - Helper.setLogLevel(""WARN"")), - -+ // install the spring, http features first -+ scanFeatures(getKarafFeatureUrl(), ""spring"", ""spring-dm"", ""jetty""), -+ - mavenBundle().groupId(""org.apache.derby"").artifactId(""derby"").version(""10.4.2.0""), - - // using the features to install the camel components -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/netty/NettyTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/netty/NettyTest.java -index 4eb4133756299..c4ef7e8d7216f 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/netty/NettyTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/netty/NettyTest.java -@@ -18,18 +18,14 @@ - - import org.apache.camel.builder.RouteBuilder; - import org.apache.camel.itest.osgi.OSGiIntegrationTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Test; - import org.junit.runner.RunWith; - import org.ops4j.pax.exam.Option; - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - @RunWith(JUnit4TestRunner.class) - public class NettyTest extends OSGiIntegrationTestSupport { -@@ -55,20 +51,13 @@ public void configure() { - } - - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-test"", ""camel-netty""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-netty"")); - - return options; - } -+ - } -\ No newline at end of file -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/protobuf/ProtobufRouteTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/protobuf/ProtobufRouteTest.java -index 865657568af06..fb4ca25bd4fd6 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/protobuf/ProtobufRouteTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/protobuf/ProtobufRouteTest.java -@@ -23,18 +23,14 @@ - import org.apache.camel.dataformat.protobuf.ProtobufDataFormat; - import org.apache.camel.dataformat.protobuf.generated.AddressBookProtos; - import org.apache.camel.itest.osgi.OSGiIntegrationTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Test; - import org.junit.runner.RunWith; - import org.ops4j.pax.exam.Option; - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - @RunWith(JUnit4TestRunner.class) - public class ProtobufRouteTest extends OSGiIntegrationTestSupport { -@@ -109,21 +105,13 @@ public void configure() throws Exception { - } - - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-protobuf""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-protobuf"")); - - return options; - } -- -+ - } -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/quartz/QuartzCronRouteTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/quartz/QuartzCronRouteTest.java -index 365f09f602046..bb6ac7568670b 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/quartz/QuartzCronRouteTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/quartz/QuartzCronRouteTest.java -@@ -60,6 +60,9 @@ public static Option[] configure() throws Exception { - Helper.getDefaultOptions( - // this is how you set the default log level when using pax logging (logProfile) - Helper.setLogLevel(""WARN"")), -+ // install the spring, http features first -+ scanFeatures(getKarafFeatureUrl(), ""spring"", ""spring-dm"", ""jetty""), -+ - // using the features to install the camel components - scanFeatures(getCamelKarafFeatureUrl(), - ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-quartz""), -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/restlet/RestletTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/restlet/RestletTest.java -index 463deb5d5e4ce..1988b7174b60c 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/restlet/RestletTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/restlet/RestletTest.java -@@ -23,18 +23,14 @@ - import org.apache.camel.Processor; - import org.apache.camel.builder.RouteBuilder; - import org.apache.camel.itest.osgi.OSGiIntegrationTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Test; - import org.junit.runner.RunWith; - import org.ops4j.pax.exam.Option; - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - @RunWith(JUnit4TestRunner.class) - public class RestletTest extends OSGiIntegrationTestSupport { -@@ -63,22 +59,15 @@ public void process(Exchange exchange) throws Exception { - } - }; - } -- -+ - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-restlet""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -- -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-restlet"")); -+ - return options; - } -+ - } -\ No newline at end of file -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/restlet/example/RestletDomainServiceTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/restlet/example/RestletDomainServiceTest.java -index ebb5f917f0211..0812124f2576c 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/restlet/example/RestletDomainServiceTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/restlet/example/RestletDomainServiceTest.java -@@ -18,7 +18,6 @@ - - import org.apache.camel.Exchange; - import org.apache.camel.itest.osgi.OSGiIntegrationSpringTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Ignore; - import org.junit.Test; - import org.junit.runner.RunWith; -@@ -27,11 +26,8 @@ - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - import org.springframework.osgi.context.support.OsgiBundleXmlApplicationContext; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - /** - * @version -@@ -65,23 +61,16 @@ public void testGetDomain() throws Exception { - - assertEquals(""{www.google.com}"", response); - } -- -+ - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-cxf"", ""camel-restlet""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -- -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-cxf"", ""camel-restlet"")); -+ - return options; - } -+ - - } -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/rss/RssPollingConsumerTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/rss/RssPollingConsumerTest.java -index 264c3fcb86604..2dec9d10e880e 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/rss/RssPollingConsumerTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/rss/RssPollingConsumerTest.java -@@ -26,18 +26,14 @@ - import org.apache.camel.component.mock.MockEndpoint; - import org.apache.camel.component.rss.RssConstants; - import org.apache.camel.itest.osgi.OSGiIntegrationTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Test; - import org.junit.runner.RunWith; - import org.ops4j.pax.exam.Option; - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - @RunWith(JUnit4TestRunner.class) - public class RssPollingConsumerTest extends OSGiIntegrationTestSupport { -@@ -76,21 +72,13 @@ public void configure() throws Exception { - } - - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-rss""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-rss"")); - - return options; - } -- -+ - } -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/script/GroovyScriptOsgiTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/script/GroovyScriptOsgiTest.java -index 4349a469e4053..1a32b6c6b38e1 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/script/GroovyScriptOsgiTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/script/GroovyScriptOsgiTest.java -@@ -19,19 +19,16 @@ - import org.apache.camel.builder.RouteBuilder; - import org.apache.camel.component.mock.MockEndpoint; - import org.apache.camel.itest.osgi.OSGiIntegrationTestSupport; --import org.apache.karaf.testing.Helper; -+ - import org.junit.Ignore; - import org.junit.Test; - import org.junit.runner.RunWith; - import org.ops4j.pax.exam.Option; - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; --import static org.ops4j.pax.exam.CoreOptions.mavenBundle; -+ - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - /** - * Test camel-script for groovy expressions in OSGi -@@ -54,25 +51,14 @@ public void testLanguage() throws Exception { - - assertMockEndpointsSatisfied(); - } -- -+ - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-script""), -- -- mavenBundle().groupId(""org.apache.servicemix.bundles"").artifactId(""org.apache.servicemix.bundles.ant"").version(""1.7.0_3""), -- mavenBundle().groupId(""org.codehaus.groovy"").artifactId(""groovy-all"").version(""1.7.9""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -- -+ getDefaultCamelKarafOptions(), -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-script"", ""camel-groovy"")); - return options; - } -+ -+ - } -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/script/RubyOsgiTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/script/RubyOsgiTest.java -index 052c8164f91cc..0e856e31b84d3 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/script/RubyOsgiTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/script/RubyOsgiTest.java -@@ -55,22 +55,15 @@ public void testSendMessage() throws Exception { - template.sendBody(""direct:start"", ""Hello""); - assertMockEndpointsSatisfied(); - } -- -+ - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-script"", ""camel-ruby""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -- -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-script"", ""camel-ruby"")); -+ - return options; - } -+ - } -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/servlet/ServletComponentTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/servlet/ServletComponentTest.java -index 0c0a49ecb7b63..73767cb58d478 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/servlet/ServletComponentTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/servlet/ServletComponentTest.java -@@ -50,7 +50,7 @@ public static Option[] configure() throws Exception { - Helper.getDefaultOptions( - // this is how you set the default log level when using pax logging (logProfile) - Helper.setLogLevel(""WARN"")), -- Helper.loadKarafStandardFeatures(""http"", ""war""), -+ Helper.loadKarafStandardFeatures(""spring"", ""jetty"", ""http"", ""war""), - // set the system property for pax web - org.ops4j.pax.exam.CoreOptions.systemProperty(""org.osgi.service.http.port"").value(""9080""), - -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/servlet/ServletServicesTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/servlet/ServletServicesTest.java -index 715323449bb3e..2013ffb86662b 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/servlet/ServletServicesTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/servlet/ServletServicesTest.java -@@ -49,7 +49,7 @@ public static Option[] configure() throws Exception { - Helper.getDefaultOptions( - // this is how you set the default log level when using pax logging (logProfile) - Helper.setLogLevel(""WARN"")), -- Helper.loadKarafStandardFeatures(""http"", ""war""), -+ Helper.loadKarafStandardFeatures(""spring"", ""http"", ""war""), - // set the system property for pax web - org.ops4j.pax.exam.CoreOptions.systemProperty(""org.osgi.service.http.port"").value(""9080""), - -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/shiro/ShiroAuthenticationTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/shiro/ShiroAuthenticationTest.java -index 020c74a637375..e4ecf73b5e514 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/shiro/ShiroAuthenticationTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/shiro/ShiroAuthenticationTest.java -@@ -26,7 +26,6 @@ - import org.apache.camel.component.shiro.security.ShiroSecurityToken; - import org.apache.camel.component.shiro.security.ShiroSecurityTokenInjector; - import org.apache.camel.itest.osgi.OSGiIntegrationTestSupport; --import org.apache.karaf.testing.Helper; - import org.apache.shiro.authc.IncorrectCredentialsException; - import org.apache.shiro.authc.LockedAccountException; - import org.apache.shiro.authc.UnknownAccountException; -@@ -36,11 +35,8 @@ - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - @RunWith(JUnit4TestRunner.class) - public class ShiroAuthenticationTest extends OSGiIntegrationTestSupport { -@@ -88,24 +84,15 @@ public void testSuccessfulShiroAuthenticationWithNoAuthorization() throws Except - } - - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-shiro""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-shiro"")); - - return options; - } -- -- -+ - protected RouteBuilder createRouteBuilder() throws Exception { - return new RouteBuilder() { - public void configure() { -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/stream/StreamTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/stream/StreamTest.java -index 526a968165717..d3e312eea7295 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/stream/StreamTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/stream/StreamTest.java -@@ -52,22 +52,14 @@ public void configure() { - } - }; - } -- - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-test"", ""camel-stream""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-stream"")); - - return options; - } -+ - } -\ No newline at end of file -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/syslog/SyslogTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/syslog/SyslogTest.java -index 03e12ab9f1998..e1f3cf4f5cfd4 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/syslog/SyslogTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/syslog/SyslogTest.java -@@ -30,7 +30,6 @@ - import org.apache.camel.component.syslog.SyslogMessage; - import org.apache.camel.itest.osgi.OSGiIntegrationTestSupport; - import org.apache.camel.spi.DataFormat; --import org.apache.karaf.testing.Helper; - import org.junit.Test; - import org.junit.runner.RunWith; - import org.ops4j.pax.exam.Option; -@@ -38,11 +37,9 @@ - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; -+ - - @RunWith(JUnit4TestRunner.class) - public class SyslogTest extends OSGiIntegrationTestSupport { -@@ -100,19 +97,13 @@ public void process(Exchange ex) { - } - - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), ""camel-core"", ""camel-test"", ""camel-mina"", ""camel-syslog""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -- -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-mina"", ""camel-syslog"")); -+ - return options; - } -+ - } -diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/velocity/VelocityTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/velocity/VelocityTest.java -index 2300a3d7c3fb2..7ba634af94c9b 100644 ---- a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/velocity/VelocityTest.java -+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/velocity/VelocityTest.java -@@ -22,18 +22,14 @@ - import org.apache.camel.Processor; - import org.apache.camel.builder.RouteBuilder; - import org.apache.camel.itest.osgi.OSGiIntegrationTestSupport; --import org.apache.karaf.testing.Helper; - import org.junit.Test; - import org.junit.runner.RunWith; - import org.ops4j.pax.exam.Option; - import org.ops4j.pax.exam.junit.Configuration; - import org.ops4j.pax.exam.junit.JUnit4TestRunner; - --import static org.ops4j.pax.exam.CoreOptions.equinox; --import static org.ops4j.pax.exam.CoreOptions.felix; - import static org.ops4j.pax.exam.OptionUtils.combine; - import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures; --import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.workingDirectory; - - @RunWith(JUnit4TestRunner.class) - public class VelocityTest extends OSGiIntegrationTestSupport { -@@ -65,20 +61,14 @@ public void configure() { - } - - @Configuration -- public static Option[] configure() throws Exception { -+ public static Option[] configure() { - Option[] options = combine( -- // Default karaf environment -- Helper.getDefaultOptions( -- // this is how you set the default log level when using pax logging (logProfile) -- Helper.setLogLevel(""WARN"")), -- // using the features to install the camel components -- scanFeatures(getCamelKarafFeatureUrl(), -- ""camel-core"", ""camel-spring"", ""camel-test"", ""camel-velocity""), -- -- workingDirectory(""target/paxrunner/""), -- -- felix(), equinox()); -+ getDefaultCamelKarafOptions(), -+ // using the features to install the other camel components -+ scanFeatures(getCamelKarafFeatureUrl(), ""camel-velocity"")); - - return options; - } -+ -+ - } -\ No newline at end of file -diff --git a/tests/camel-itest-osgi/src/test/resources/org/apache/camel/itest/osgi/jetty/CamelContext1.xml b/tests/camel-itest-osgi/src/test/resources/org/apache/camel/itest/osgi/jetty/CamelContext1.xml -index f47d175222b5e..49e4097ef3525 100644 ---- a/tests/camel-itest-osgi/src/test/resources/org/apache/camel/itest/osgi/jetty/CamelContext1.xml -+++ b/tests/camel-itest-osgi/src/test/resources/org/apache/camel/itest/osgi/jetty/CamelContext1.xml -@@ -29,7 +29,7 @@ - - - -- -+ - - - -diff --git a/tests/camel-itest-osgi/src/test/resources/org/apache/camel/itest/osgi/jetty/CamelContext2.xml b/tests/camel-itest-osgi/src/test/resources/org/apache/camel/itest/osgi/jetty/CamelContext2.xml -index de47fc04df34c..f8c0387eb636f 100644 ---- a/tests/camel-itest-osgi/src/test/resources/org/apache/camel/itest/osgi/jetty/CamelContext2.xml -+++ b/tests/camel-itest-osgi/src/test/resources/org/apache/camel/itest/osgi/jetty/CamelContext2.xml -@@ -29,7 +29,7 @@ - - - -- -+ - - - " -ccd24b5c3ce471428531e12737591b94c91db8bf,ReactiveX-RxJava,Add doOnSubscribe for Single--,a,https://github.com/ReactiveX/RxJava,"diff --git a/src/main/java/rx/Single.java b/src/main/java/rx/Single.java -index 20b983c063..a8a10bafb9 100644 ---- a/src/main/java/rx/Single.java -+++ b/src/main/java/rx/Single.java -@@ -2250,6 +2250,28 @@ public void onNext(T t) { - return lift(new OperatorDoOnEach(observer)); - } - -+ /** -+ * Modifies the source {@code Single} so that it invokes the given action when it is subscribed from -+ * its subscribers. Each subscription will result in an invocation of the given action except when the -+ * source {@code Single} is reference counted, in which case the source {@code Single} will invoke -+ * the given action for the first subscription. -+ *

-+ * -+ *

-+ *
Scheduler:
-+ *
{@code doOnSubscribe} does not operate by default on a particular {@link Scheduler}.
-+ *
-+ * -+ * @param subscribe -+ * the action that gets called when an observer subscribes to this {@code Single} -+ * @return the source {@code Single} modified so as to call this Action when appropriate -+ * @see ReactiveX operators documentation: Do -+ */ -+ @Experimental -+ public final Single doOnSubscribe(final Action0 subscribe) { -+ return lift(new OperatorDoOnSubscribe(subscribe)); -+ } -+ - /** - * Returns an Single that emits the items emitted by the source Single shifted forward in time by a - * specified delay. Error notifications from the source Single are not delayed. -diff --git a/src/test/java/rx/SingleTest.java b/src/test/java/rx/SingleTest.java -index 3ce86e9772..17794e4dbb 100644 ---- a/src/test/java/rx/SingleTest.java -+++ b/src/test/java/rx/SingleTest.java -@@ -878,6 +878,43 @@ public void doOnSuccessShouldNotSwallowExceptionThrownByAction() { - verify(action).call(eq(""value"")); - } - -+ @Test -+ public void doOnSubscribeShouldInvokeAction() { -+ Action0 action = mock(Action0.class); -+ Single single = Single.just(1).doOnSubscribe(action); -+ -+ verifyZeroInteractions(action); -+ -+ single.subscribe(); -+ single.subscribe(); -+ -+ verify(action, times(2)).call(); -+ } -+ -+ @Test -+ public void doOnSubscribeShouldInvokeActionBeforeSubscriberSubscribes() { -+ final List callSequence = new ArrayList(2); -+ -+ Single single = Single.create(new OnSubscribe() { -+ @Override -+ public void call(SingleSubscriber singleSubscriber) { -+ callSequence.add(""onSubscribe""); -+ singleSubscriber.onSuccess(1); -+ } -+ }).doOnSubscribe(new Action0() { -+ @Override -+ public void call() { -+ callSequence.add(""doOnSubscribe""); -+ } -+ }); -+ -+ single.subscribe(); -+ -+ assertEquals(2, callSequence.size()); -+ assertEquals(""doOnSubscribe"", callSequence.get(0)); -+ assertEquals(""onSubscribe"", callSequence.get(1)); -+ } -+ - @Test - public void delayWithSchedulerShouldDelayCompletion() { - TestScheduler scheduler = new TestScheduler();" -e6a03e2fc037b48f3989ef899310e007bb3d16a9,hadoop,Merge r1601491 from trunk. YARN-2030. Augmented- RMStateStore with state machine. Contributed by Binglin Chang--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1601492 13f79535-47bb-0310-9956-ffa450edef68-,p,https://github.com/apache/hadoop,"diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt -index 553f378ceee6e..29add5e139352 100644 ---- a/hadoop-yarn-project/CHANGES.txt -+++ b/hadoop-yarn-project/CHANGES.txt -@@ -134,6 +134,8 @@ Release 2.5.0 - UNRELEASED - YARN-2132. ZKRMStateStore.ZKAction#runWithRetries doesn't log the exception - it encounters. (Vamsee Yarlagadda via kasha) - -+ YARN-2030. Augmented RMStateStore with state machine. (Binglin Chang via jianhe) -+ - OPTIMIZATIONS - - BUG FIXES -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/FileSystemRMStateStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/FileSystemRMStateStore.java -index 1f6e175ced108..7f4dad83fe92a 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/FileSystemRMStateStore.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/FileSystemRMStateStore.java -@@ -47,6 +47,8 @@ - import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ApplicationStateDataProto; - import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RMStateVersionProto; - import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier; -+import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationAttemptStateData; -+import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationStateData; - import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.RMStateVersion; - import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.impl.pb.ApplicationAttemptStateDataPBImpl; - import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.impl.pb.ApplicationStateDataPBImpl; -@@ -314,7 +316,7 @@ private void loadRMDTSecretManagerState(RMState rmState) throws Exception { - - @Override - public synchronized void storeApplicationStateInternal(ApplicationId appId, -- ApplicationStateDataPBImpl appStateDataPB) throws Exception { -+ ApplicationStateData appStateDataPB) throws Exception { - String appIdStr = appId.toString(); - Path appDirPath = getAppDir(rmAppRoot, appIdStr); - fs.mkdirs(appDirPath); -@@ -334,7 +336,7 @@ public synchronized void storeApplicationStateInternal(ApplicationId appId, - - @Override - public synchronized void updateApplicationStateInternal(ApplicationId appId, -- ApplicationStateDataPBImpl appStateDataPB) throws Exception { -+ ApplicationStateData appStateDataPB) throws Exception { - String appIdStr = appId.toString(); - Path appDirPath = getAppDir(rmAppRoot, appIdStr); - Path nodeCreatePath = getNodePath(appDirPath, appIdStr); -@@ -354,7 +356,7 @@ public synchronized void updateApplicationStateInternal(ApplicationId appId, - @Override - public synchronized void storeApplicationAttemptStateInternal( - ApplicationAttemptId appAttemptId, -- ApplicationAttemptStateDataPBImpl attemptStateDataPB) -+ ApplicationAttemptStateData attemptStateDataPB) - throws Exception { - Path appDirPath = - getAppDir(rmAppRoot, appAttemptId.getApplicationId().toString()); -@@ -375,7 +377,7 @@ public synchronized void storeApplicationAttemptStateInternal( - @Override - public synchronized void updateApplicationAttemptStateInternal( - ApplicationAttemptId appAttemptId, -- ApplicationAttemptStateDataPBImpl attemptStateDataPB) -+ ApplicationAttemptStateData attemptStateDataPB) - throws Exception { - Path appDirPath = - getAppDir(rmAppRoot, appAttemptId.getApplicationId().toString()); -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/MemoryRMStateStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/MemoryRMStateStore.java -index c9f3541f53542..a43b20da39256 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/MemoryRMStateStore.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/MemoryRMStateStore.java -@@ -32,9 +32,9 @@ - import org.apache.hadoop.yarn.api.records.ApplicationId; - import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; - import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier; -+import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationAttemptStateData; -+import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationStateData; - import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.RMStateVersion; --import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.impl.pb.ApplicationAttemptStateDataPBImpl; --import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.impl.pb.ApplicationStateDataPBImpl; - - import com.google.common.annotations.VisibleForTesting; - -@@ -80,7 +80,7 @@ protected synchronized void closeInternal() throws Exception { - - @Override - public void storeApplicationStateInternal(ApplicationId appId, -- ApplicationStateDataPBImpl appStateData) -+ ApplicationStateData appStateData) - throws Exception { - ApplicationState appState = - new ApplicationState(appStateData.getSubmitTime(), -@@ -92,7 +92,7 @@ public void storeApplicationStateInternal(ApplicationId appId, - - @Override - public void updateApplicationStateInternal(ApplicationId appId, -- ApplicationStateDataPBImpl appStateData) throws Exception { -+ ApplicationStateData appStateData) throws Exception { - ApplicationState updatedAppState = - new ApplicationState(appStateData.getSubmitTime(), - appStateData.getStartTime(), -@@ -112,7 +112,7 @@ public void updateApplicationStateInternal(ApplicationId appId, - @Override - public synchronized void storeApplicationAttemptStateInternal( - ApplicationAttemptId appAttemptId, -- ApplicationAttemptStateDataPBImpl attemptStateData) -+ ApplicationAttemptStateData attemptStateData) - throws Exception { - Credentials credentials = null; - if(attemptStateData.getAppAttemptTokens() != null){ -@@ -137,7 +137,7 @@ public synchronized void storeApplicationAttemptStateInternal( - @Override - public synchronized void updateApplicationAttemptStateInternal( - ApplicationAttemptId appAttemptId, -- ApplicationAttemptStateDataPBImpl attemptStateData) -+ ApplicationAttemptStateData attemptStateData) - throws Exception { - Credentials credentials = null; - if (attemptStateData.getAppAttemptTokens() != null) { -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/NullRMStateStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/NullRMStateStore.java -index a12099f46f3e2..6a0426c0e8ca2 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/NullRMStateStore.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/NullRMStateStore.java -@@ -25,9 +25,9 @@ - import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; - import org.apache.hadoop.yarn.api.records.ApplicationId; - import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier; -+import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationAttemptStateData; -+import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationStateData; - import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.RMStateVersion; --import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.impl.pb.ApplicationAttemptStateDataPBImpl; --import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.impl.pb.ApplicationStateDataPBImpl; - - @Unstable - public class NullRMStateStore extends RMStateStore { -@@ -54,13 +54,13 @@ public RMState loadState() throws Exception { - - @Override - protected void storeApplicationStateInternal(ApplicationId appId, -- ApplicationStateDataPBImpl appStateData) throws Exception { -+ ApplicationStateData appStateData) throws Exception { - // Do nothing - } - - @Override - protected void storeApplicationAttemptStateInternal(ApplicationAttemptId attemptId, -- ApplicationAttemptStateDataPBImpl attemptStateData) throws Exception { -+ ApplicationAttemptStateData attemptStateData) throws Exception { - // Do nothing - } - -@@ -102,13 +102,13 @@ public void removeRMDTMasterKeyState(DelegationKey delegationKey) throws Excepti - - @Override - protected void updateApplicationStateInternal(ApplicationId appId, -- ApplicationStateDataPBImpl appStateData) throws Exception { -+ ApplicationStateData appStateData) throws Exception { - // Do nothing - } - - @Override - protected void updateApplicationAttemptStateInternal(ApplicationAttemptId attemptId, -- ApplicationAttemptStateDataPBImpl attemptStateData) throws Exception { -+ ApplicationAttemptStateData attemptStateData) throws Exception { - } - - @Override -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStore.java -index fc4537c793f71..affc6f9d86567 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStore.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStore.java -@@ -18,7 +18,6 @@ - - package org.apache.hadoop.yarn.server.resourcemanager.recovery; - --import java.nio.ByteBuffer; - import java.util.HashMap; - import java.util.HashSet; - import java.util.Map; -@@ -31,7 +30,6 @@ - import org.apache.hadoop.classification.InterfaceAudience.Private; - import org.apache.hadoop.classification.InterfaceStability.Unstable; - import org.apache.hadoop.conf.Configuration; --import org.apache.hadoop.io.DataOutputBuffer; - import org.apache.hadoop.io.Text; - import org.apache.hadoop.security.Credentials; - import org.apache.hadoop.security.token.Token; -@@ -50,6 +48,8 @@ - import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier; - import org.apache.hadoop.yarn.server.resourcemanager.RMFatalEvent; - import org.apache.hadoop.yarn.server.resourcemanager.RMFatalEventType; -+import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationAttemptStateData; -+import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationStateData; - import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.RMStateVersion; - import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.impl.pb.ApplicationAttemptStateDataPBImpl; - import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.impl.pb.ApplicationStateDataPBImpl; -@@ -61,6 +61,10 @@ - import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; - import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptNewSavedEvent; - import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptUpdateSavedEvent; -+import org.apache.hadoop.yarn.state.InvalidStateTransitonException; -+import org.apache.hadoop.yarn.state.SingleArcTransition; -+import org.apache.hadoop.yarn.state.StateMachine; -+import org.apache.hadoop.yarn.state.StateMachineFactory; - - @Private - @Unstable -@@ -83,8 +87,163 @@ public abstract class RMStateStore extends AbstractService { - - public static final Log LOG = LogFactory.getLog(RMStateStore.class); - -+ private enum RMStateStoreState { -+ DEFAULT -+ }; -+ -+ private static final StateMachineFactory -+ stateMachineFactory = new StateMachineFactory( -+ RMStateStoreState.DEFAULT) -+ .addTransition(RMStateStoreState.DEFAULT, RMStateStoreState.DEFAULT, -+ RMStateStoreEventType.STORE_APP, new StoreAppTransition()) -+ .addTransition(RMStateStoreState.DEFAULT, RMStateStoreState.DEFAULT, -+ RMStateStoreEventType.UPDATE_APP, new UpdateAppTransition()) -+ .addTransition(RMStateStoreState.DEFAULT, RMStateStoreState.DEFAULT, -+ RMStateStoreEventType.REMOVE_APP, new RemoveAppTransition()) -+ .addTransition(RMStateStoreState.DEFAULT, RMStateStoreState.DEFAULT, -+ RMStateStoreEventType.STORE_APP_ATTEMPT, new StoreAppAttemptTransition()) -+ .addTransition(RMStateStoreState.DEFAULT, RMStateStoreState.DEFAULT, -+ RMStateStoreEventType.UPDATE_APP_ATTEMPT, new UpdateAppAttemptTransition()); -+ -+ private final StateMachine stateMachine; -+ -+ private static class StoreAppTransition -+ implements SingleArcTransition { -+ @Override -+ public void transition(RMStateStore store, RMStateStoreEvent event) { -+ if (!(event instanceof RMStateStoreAppEvent)) { -+ // should never happen -+ LOG.error(""Illegal event type: "" + event.getClass()); -+ return; -+ } -+ ApplicationState appState = ((RMStateStoreAppEvent) event).getAppState(); -+ ApplicationId appId = appState.getAppId(); -+ ApplicationStateData appStateData = ApplicationStateData -+ .newInstance(appState); -+ LOG.info(""Storing info for app: "" + appId); -+ try { -+ store.storeApplicationStateInternal(appId, appStateData); -+ store.notifyDoneStoringApplication(appId, null); -+ } catch (Exception e) { -+ LOG.error(""Error storing app: "" + appId, e); -+ store.notifyStoreOperationFailed(e); -+ } -+ }; -+ } -+ -+ private static class UpdateAppTransition implements -+ SingleArcTransition { -+ @Override -+ public void transition(RMStateStore store, RMStateStoreEvent event) { -+ if (!(event instanceof RMStateUpdateAppEvent)) { -+ // should never happen -+ LOG.error(""Illegal event type: "" + event.getClass()); -+ return; -+ } -+ ApplicationState appState = ((RMStateUpdateAppEvent) event).getAppState(); -+ ApplicationId appId = appState.getAppId(); -+ ApplicationStateData appStateData = ApplicationStateData -+ .newInstance(appState); -+ LOG.info(""Updating info for app: "" + appId); -+ try { -+ store.updateApplicationStateInternal(appId, appStateData); -+ store.notifyDoneUpdatingApplication(appId, null); -+ } catch (Exception e) { -+ LOG.error(""Error updating app: "" + appId, e); -+ store.notifyStoreOperationFailed(e); -+ } -+ }; -+ } -+ -+ private static class RemoveAppTransition implements -+ SingleArcTransition { -+ @Override -+ public void transition(RMStateStore store, RMStateStoreEvent event) { -+ if (!(event instanceof RMStateStoreRemoveAppEvent)) { -+ // should never happen -+ LOG.error(""Illegal event type: "" + event.getClass()); -+ return; -+ } -+ ApplicationState appState = ((RMStateStoreRemoveAppEvent) event) -+ .getAppState(); -+ ApplicationId appId = appState.getAppId(); -+ LOG.info(""Removing info for app: "" + appId); -+ try { -+ store.removeApplicationStateInternal(appState); -+ } catch (Exception e) { -+ LOG.error(""Error removing app: "" + appId, e); -+ store.notifyStoreOperationFailed(e); -+ } -+ }; -+ } -+ -+ private static class StoreAppAttemptTransition implements -+ SingleArcTransition { -+ @Override -+ public void transition(RMStateStore store, RMStateStoreEvent event) { -+ if (!(event instanceof RMStateStoreAppAttemptEvent)) { -+ // should never happen -+ LOG.error(""Illegal event type: "" + event.getClass()); -+ return; -+ } -+ ApplicationAttemptState attemptState = -+ ((RMStateStoreAppAttemptEvent) event).getAppAttemptState(); -+ try { -+ ApplicationAttemptStateData attemptStateData = -+ ApplicationAttemptStateData.newInstance(attemptState); -+ if (LOG.isDebugEnabled()) { -+ LOG.debug(""Storing info for attempt: "" + attemptState.getAttemptId()); -+ } -+ store.storeApplicationAttemptStateInternal(attemptState.getAttemptId(), -+ attemptStateData); -+ store.notifyDoneStoringApplicationAttempt(attemptState.getAttemptId(), -+ null); -+ } catch (Exception e) { -+ LOG.error(""Error storing appAttempt: "" + attemptState.getAttemptId(), e); -+ store.notifyStoreOperationFailed(e); -+ } -+ }; -+ } -+ -+ private static class UpdateAppAttemptTransition implements -+ SingleArcTransition { -+ @Override -+ public void transition(RMStateStore store, RMStateStoreEvent event) { -+ if (!(event instanceof RMStateUpdateAppAttemptEvent)) { -+ // should never happen -+ LOG.error(""Illegal event type: "" + event.getClass()); -+ return; -+ } -+ ApplicationAttemptState attemptState = -+ ((RMStateUpdateAppAttemptEvent) event).getAppAttemptState(); -+ try { -+ ApplicationAttemptStateData attemptStateData = ApplicationAttemptStateData -+ .newInstance(attemptState); -+ if (LOG.isDebugEnabled()) { -+ LOG.debug(""Updating info for attempt: "" + attemptState.getAttemptId()); -+ } -+ store.updateApplicationAttemptStateInternal(attemptState.getAttemptId(), -+ attemptStateData); -+ store.notifyDoneUpdatingApplicationAttempt(attemptState.getAttemptId(), -+ null); -+ } catch (Exception e) { -+ LOG.error(""Error updating appAttempt: "" + attemptState.getAttemptId(), e); -+ store.notifyStoreOperationFailed(e); -+ } -+ }; -+ } -+ - public RMStateStore() { - super(RMStateStore.class.getName()); -+ stateMachine = stateMachineFactory.make(this); - } - - /** -@@ -390,10 +549,10 @@ public synchronized void updateApplicationState(ApplicationState appState) { - * application. - */ - protected abstract void storeApplicationStateInternal(ApplicationId appId, -- ApplicationStateDataPBImpl appStateData) throws Exception; -+ ApplicationStateData appStateData) throws Exception; - - protected abstract void updateApplicationStateInternal(ApplicationId appId, -- ApplicationStateDataPBImpl appStateData) throws Exception; -+ ApplicationStateData appStateData) throws Exception; - - @SuppressWarnings(""unchecked"") - /** -@@ -428,11 +587,11 @@ public synchronized void updateApplicationAttemptState( - */ - protected abstract void storeApplicationAttemptStateInternal( - ApplicationAttemptId attemptId, -- ApplicationAttemptStateDataPBImpl attemptStateData) throws Exception; -+ ApplicationAttemptStateData attemptStateData) throws Exception; - - protected abstract void updateApplicationAttemptStateInternal( - ApplicationAttemptId attemptId, -- ApplicationAttemptStateDataPBImpl attemptStateData) throws Exception; -+ ApplicationAttemptStateData attemptStateData) throws Exception; - - /** - * RMDTSecretManager call this to store the state of a delegation token -@@ -596,105 +755,10 @@ public Credentials getCredentialsFromAppAttempt(RMAppAttempt appAttempt) { - - // Dispatcher related code - protected void handleStoreEvent(RMStateStoreEvent event) { -- if (event.getType().equals(RMStateStoreEventType.STORE_APP) -- || event.getType().equals(RMStateStoreEventType.UPDATE_APP)) { -- ApplicationState appState = null; -- if (event.getType().equals(RMStateStoreEventType.STORE_APP)) { -- appState = ((RMStateStoreAppEvent) event).getAppState(); -- } else { -- assert event.getType().equals(RMStateStoreEventType.UPDATE_APP); -- appState = ((RMStateUpdateAppEvent) event).getAppState(); -- } -- -- Exception storedException = null; -- ApplicationStateDataPBImpl appStateData = -- (ApplicationStateDataPBImpl) ApplicationStateDataPBImpl -- .newApplicationStateData(appState.getSubmitTime(), -- appState.getStartTime(), appState.getUser(), -- appState.getApplicationSubmissionContext(), appState.getState(), -- appState.getDiagnostics(), appState.getFinishTime()); -- -- ApplicationId appId = -- appState.getApplicationSubmissionContext().getApplicationId(); -- -- LOG.info(""Storing info for app: "" + appId); -- try { -- if (event.getType().equals(RMStateStoreEventType.STORE_APP)) { -- storeApplicationStateInternal(appId, appStateData); -- notifyDoneStoringApplication(appId, storedException); -- } else { -- assert event.getType().equals(RMStateStoreEventType.UPDATE_APP); -- updateApplicationStateInternal(appId, appStateData); -- notifyDoneUpdatingApplication(appId, storedException); -- } -- } catch (Exception e) { -- LOG.error(""Error storing/updating app: "" + appId, e); -- notifyStoreOperationFailed(e); -- } -- } else if (event.getType().equals(RMStateStoreEventType.STORE_APP_ATTEMPT) -- || event.getType().equals(RMStateStoreEventType.UPDATE_APP_ATTEMPT)) { -- -- ApplicationAttemptState attemptState = null; -- if (event.getType().equals(RMStateStoreEventType.STORE_APP_ATTEMPT)) { -- attemptState = -- ((RMStateStoreAppAttemptEvent) event).getAppAttemptState(); -- } else { -- assert event.getType().equals(RMStateStoreEventType.UPDATE_APP_ATTEMPT); -- attemptState = -- ((RMStateUpdateAppAttemptEvent) event).getAppAttemptState(); -- } -- -- Exception storedException = null; -- Credentials credentials = attemptState.getAppAttemptCredentials(); -- ByteBuffer appAttemptTokens = null; -- try { -- if (credentials != null) { -- DataOutputBuffer dob = new DataOutputBuffer(); -- credentials.writeTokenStorageToStream(dob); -- appAttemptTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength()); -- } -- ApplicationAttemptStateDataPBImpl attemptStateData = -- (ApplicationAttemptStateDataPBImpl) ApplicationAttemptStateDataPBImpl -- .newApplicationAttemptStateData(attemptState.getAttemptId(), -- attemptState.getMasterContainer(), appAttemptTokens, -- attemptState.getStartTime(), attemptState.getState(), -- attemptState.getFinalTrackingUrl(), -- attemptState.getDiagnostics(), -- attemptState.getFinalApplicationStatus()); -- if (LOG.isDebugEnabled()) { -- LOG.debug(""Storing info for attempt: "" + attemptState.getAttemptId()); -- } -- if (event.getType().equals(RMStateStoreEventType.STORE_APP_ATTEMPT)) { -- storeApplicationAttemptStateInternal(attemptState.getAttemptId(), -- attemptStateData); -- notifyDoneStoringApplicationAttempt(attemptState.getAttemptId(), -- storedException); -- } else { -- assert event.getType().equals( -- RMStateStoreEventType.UPDATE_APP_ATTEMPT); -- updateApplicationAttemptStateInternal(attemptState.getAttemptId(), -- attemptStateData); -- notifyDoneUpdatingApplicationAttempt(attemptState.getAttemptId(), -- storedException); -- } -- } catch (Exception e) { -- LOG.error( -- ""Error storing/updating appAttempt: "" + attemptState.getAttemptId(), e); -- notifyStoreOperationFailed(e); -- } -- } else if (event.getType().equals(RMStateStoreEventType.REMOVE_APP)) { -- ApplicationState appState = -- ((RMStateStoreRemoveAppEvent) event).getAppState(); -- ApplicationId appId = appState.getAppId(); -- LOG.info(""Removing info for app: "" + appId); -- try { -- removeApplicationStateInternal(appState); -- } catch (Exception e) { -- LOG.error(""Error removing app: "" + appId, e); -- notifyStoreOperationFailed(e); -- } -- } else { -- LOG.error(""Unknown RMStateStoreEvent type: "" + event.getType()); -+ try { -+ this.stateMachine.doTransition(event.getType(), event); -+ } catch (InvalidStateTransitonException e) { -+ LOG.error(""Can't handle this event at current state"", e); - } - } - -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/ZKRMStateStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/ZKRMStateStore.java -index 31c8885d4f2c9..63ae990732c24 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/ZKRMStateStore.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/ZKRMStateStore.java -@@ -49,6 +49,8 @@ - import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RMStateVersionProto; - import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier; - import org.apache.hadoop.yarn.server.resourcemanager.RMZKUtils; -+import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationAttemptStateData; -+import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationStateData; - import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.RMStateVersion; - import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.impl.pb.ApplicationAttemptStateDataPBImpl; - import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.impl.pb.ApplicationStateDataPBImpl; -@@ -551,7 +553,7 @@ private void loadApplicationAttemptState(ApplicationState appState, - - @Override - public synchronized void storeApplicationStateInternal(ApplicationId appId, -- ApplicationStateDataPBImpl appStateDataPB) throws Exception { -+ ApplicationStateData appStateDataPB) throws Exception { - String nodeCreatePath = getNodePath(rmAppRoot, appId.toString()); - - if (LOG.isDebugEnabled()) { -@@ -565,7 +567,7 @@ public synchronized void storeApplicationStateInternal(ApplicationId appId, - - @Override - public synchronized void updateApplicationStateInternal(ApplicationId appId, -- ApplicationStateDataPBImpl appStateDataPB) throws Exception { -+ ApplicationStateData appStateDataPB) throws Exception { - String nodeUpdatePath = getNodePath(rmAppRoot, appId.toString()); - - if (LOG.isDebugEnabled()) { -@@ -587,7 +589,7 @@ public synchronized void updateApplicationStateInternal(ApplicationId appId, - @Override - public synchronized void storeApplicationAttemptStateInternal( - ApplicationAttemptId appAttemptId, -- ApplicationAttemptStateDataPBImpl attemptStateDataPB) -+ ApplicationAttemptStateData attemptStateDataPB) - throws Exception { - String appDirPath = getNodePath(rmAppRoot, - appAttemptId.getApplicationId().toString()); -@@ -605,7 +607,7 @@ public synchronized void storeApplicationAttemptStateInternal( - @Override - public synchronized void updateApplicationAttemptStateInternal( - ApplicationAttemptId appAttemptId, -- ApplicationAttemptStateDataPBImpl attemptStateDataPB) -+ ApplicationAttemptStateData attemptStateDataPB) - throws Exception { - String appIdStr = appAttemptId.getApplicationId().toString(); - String appAttemptIdStr = appAttemptId.toString(); -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/records/ApplicationAttemptStateData.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/records/ApplicationAttemptStateData.java -index 255800e86b2d9..6af048b2e3d2a 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/records/ApplicationAttemptStateData.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/records/ApplicationAttemptStateData.java -@@ -18,31 +18,73 @@ - - package org.apache.hadoop.yarn.server.resourcemanager.recovery.records; - -+import java.io.IOException; - import java.nio.ByteBuffer; - - import org.apache.hadoop.classification.InterfaceAudience.Public; - import org.apache.hadoop.classification.InterfaceStability.Unstable; -+import org.apache.hadoop.io.DataOutputBuffer; -+import org.apache.hadoop.security.Credentials; - import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; - import org.apache.hadoop.yarn.api.records.Container; - import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; -+import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ApplicationAttemptStateDataProto; -+import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore.ApplicationAttemptState; - import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; -+import org.apache.hadoop.yarn.util.Records; - - /* - * Contains the state data that needs to be persisted for an ApplicationAttempt - */ - @Public - @Unstable --public interface ApplicationAttemptStateData { -- -+public abstract class ApplicationAttemptStateData { -+ public static ApplicationAttemptStateData newInstance( -+ ApplicationAttemptId attemptId, Container container, -+ ByteBuffer attemptTokens, long startTime, RMAppAttemptState finalState, -+ String finalTrackingUrl, String diagnostics, -+ FinalApplicationStatus amUnregisteredFinalStatus) { -+ ApplicationAttemptStateData attemptStateData = -+ Records.newRecord(ApplicationAttemptStateData.class); -+ attemptStateData.setAttemptId(attemptId); -+ attemptStateData.setMasterContainer(container); -+ attemptStateData.setAppAttemptTokens(attemptTokens); -+ attemptStateData.setState(finalState); -+ attemptStateData.setFinalTrackingUrl(finalTrackingUrl); -+ attemptStateData.setDiagnostics(diagnostics); -+ attemptStateData.setStartTime(startTime); -+ attemptStateData.setFinalApplicationStatus(amUnregisteredFinalStatus); -+ return attemptStateData; -+ } -+ -+ public static ApplicationAttemptStateData newInstance( -+ ApplicationAttemptState attemptState) throws IOException { -+ Credentials credentials = attemptState.getAppAttemptCredentials(); -+ ByteBuffer appAttemptTokens = null; -+ if (credentials != null) { -+ DataOutputBuffer dob = new DataOutputBuffer(); -+ credentials.writeTokenStorageToStream(dob); -+ appAttemptTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength()); -+ } -+ return newInstance(attemptState.getAttemptId(), -+ attemptState.getMasterContainer(), appAttemptTokens, -+ attemptState.getStartTime(), attemptState.getState(), -+ attemptState.getFinalTrackingUrl(), -+ attemptState.getDiagnostics(), -+ attemptState.getFinalApplicationStatus()); -+ } -+ -+ public abstract ApplicationAttemptStateDataProto getProto(); -+ - /** - * The ApplicationAttemptId for the application attempt - * @return ApplicationAttemptId for the application attempt - */ - @Public - @Unstable -- public ApplicationAttemptId getAttemptId(); -+ public abstract ApplicationAttemptId getAttemptId(); - -- public void setAttemptId(ApplicationAttemptId attemptId); -+ public abstract void setAttemptId(ApplicationAttemptId attemptId); - - /* - * The master container running the application attempt -@@ -50,9 +92,9 @@ public interface ApplicationAttemptStateData { - */ - @Public - @Unstable -- public Container getMasterContainer(); -+ public abstract Container getMasterContainer(); - -- public void setMasterContainer(Container container); -+ public abstract void setMasterContainer(Container container); - - /** - * The application attempt tokens that belong to this attempt -@@ -60,17 +102,17 @@ public interface ApplicationAttemptStateData { - */ - @Public - @Unstable -- public ByteBuffer getAppAttemptTokens(); -+ public abstract ByteBuffer getAppAttemptTokens(); - -- public void setAppAttemptTokens(ByteBuffer attemptTokens); -+ public abstract void setAppAttemptTokens(ByteBuffer attemptTokens); - - /** - * Get the final state of the application attempt. - * @return the final state of the application attempt. - */ -- public RMAppAttemptState getState(); -+ public abstract RMAppAttemptState getState(); - -- public void setState(RMAppAttemptState state); -+ public abstract void setState(RMAppAttemptState state); - - /** - * Get the original not-proxied final tracking url for the -@@ -79,34 +121,34 @@ public interface ApplicationAttemptStateData { - * @return the original not-proxied final tracking url for the - * application - */ -- public String getFinalTrackingUrl(); -+ public abstract String getFinalTrackingUrl(); - - /** - * Set the final tracking Url of the AM. - * @param url - */ -- public void setFinalTrackingUrl(String url); -+ public abstract void setFinalTrackingUrl(String url); - /** - * Get the diagnositic information of the attempt - * @return diagnositic information of the attempt - */ -- public String getDiagnostics(); -+ public abstract String getDiagnostics(); - -- public void setDiagnostics(String diagnostics); -+ public abstract void setDiagnostics(String diagnostics); - - /** - * Get the start time of the application. - * @return start time of the application - */ -- public long getStartTime(); -+ public abstract long getStartTime(); - -- public void setStartTime(long startTime); -+ public abstract void setStartTime(long startTime); - - /** - * Get the final finish status of the application. - * @return final finish status of the application - */ -- public FinalApplicationStatus getFinalApplicationStatus(); -+ public abstract FinalApplicationStatus getFinalApplicationStatus(); - -- public void setFinalApplicationStatus(FinalApplicationStatus finishState); -+ public abstract void setFinalApplicationStatus(FinalApplicationStatus finishState); - } -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/records/ApplicationStateData.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/records/ApplicationStateData.java -index 9fce6cf12d068..55b726ffd0da8 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/records/ApplicationStateData.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/records/ApplicationStateData.java -@@ -24,7 +24,10 @@ - import org.apache.hadoop.classification.InterfaceStability.Unstable; - import org.apache.hadoop.yarn.api.records.ApplicationId; - import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; -+import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ApplicationStateDataProto; -+import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore.ApplicationState; - import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState; -+import org.apache.hadoop.yarn.util.Records; - - /** - * Contains all the state data that needs to be stored persistently -@@ -32,19 +35,43 @@ - */ - @Public - @Unstable --public interface ApplicationStateData { -- -+public abstract class ApplicationStateData { -+ public static ApplicationStateData newInstance(long submitTime, -+ long startTime, String user, -+ ApplicationSubmissionContext submissionContext, -+ RMAppState state, String diagnostics, long finishTime) { -+ ApplicationStateData appState = Records.newRecord(ApplicationStateData.class); -+ appState.setSubmitTime(submitTime); -+ appState.setStartTime(startTime); -+ appState.setUser(user); -+ appState.setApplicationSubmissionContext(submissionContext); -+ appState.setState(state); -+ appState.setDiagnostics(diagnostics); -+ appState.setFinishTime(finishTime); -+ return appState; -+ } -+ -+ public static ApplicationStateData newInstance( -+ ApplicationState appState) { -+ return newInstance(appState.getSubmitTime(), appState.getStartTime(), -+ appState.getUser(), appState.getApplicationSubmissionContext(), -+ appState.getState(), appState.getDiagnostics(), -+ appState.getFinishTime()); -+ } -+ -+ public abstract ApplicationStateDataProto getProto(); -+ - /** - * The time at which the application was received by the Resource Manager - * @return submitTime - */ - @Public - @Unstable -- public long getSubmitTime(); -+ public abstract long getSubmitTime(); - - @Public - @Unstable -- public void setSubmitTime(long submitTime); -+ public abstract void setSubmitTime(long submitTime); - - /** - * Get the start time of the application. -@@ -63,11 +90,11 @@ public interface ApplicationStateData { - */ - @Public - @Unstable -- public void setUser(String user); -+ public abstract void setUser(String user); - - @Public - @Unstable -- public String getUser(); -+ public abstract String getUser(); - - /** - * The {@link ApplicationSubmissionContext} for the application -@@ -76,34 +103,34 @@ public interface ApplicationStateData { - */ - @Public - @Unstable -- public ApplicationSubmissionContext getApplicationSubmissionContext(); -+ public abstract ApplicationSubmissionContext getApplicationSubmissionContext(); - - @Public - @Unstable -- public void setApplicationSubmissionContext( -+ public abstract void setApplicationSubmissionContext( - ApplicationSubmissionContext context); - - /** - * Get the final state of the application. - * @return the final state of the application. - */ -- public RMAppState getState(); -+ public abstract RMAppState getState(); - -- public void setState(RMAppState state); -+ public abstract void setState(RMAppState state); - - /** - * Get the diagnostics information for the application master. - * @return the diagnostics information for the application master. - */ -- public String getDiagnostics(); -+ public abstract String getDiagnostics(); - -- public void setDiagnostics(String diagnostics); -+ public abstract void setDiagnostics(String diagnostics); - - /** - * The finish time of the application. - * @return the finish time of the application., - */ -- public long getFinishTime(); -+ public abstract long getFinishTime(); - -- public void setFinishTime(long finishTime); -+ public abstract void setFinishTime(long finishTime); - } -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/records/impl/pb/ApplicationAttemptStateDataPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/records/impl/pb/ApplicationAttemptStateDataPBImpl.java -index 75ac2eef9a737..e3ebe5e08936c 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/records/impl/pb/ApplicationAttemptStateDataPBImpl.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/records/impl/pb/ApplicationAttemptStateDataPBImpl.java -@@ -25,10 +25,7 @@ - import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; - import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl; - import org.apache.hadoop.yarn.api.records.impl.pb.ContainerPBImpl; --import org.apache.hadoop.yarn.api.records.impl.pb.ProtoBase; - import org.apache.hadoop.yarn.api.records.impl.pb.ProtoUtils; --import org.apache.hadoop.yarn.factories.RecordFactory; --import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; - import org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto; - import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ApplicationAttemptStateDataProto; - import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ApplicationAttemptStateDataProtoOrBuilder; -@@ -36,12 +33,10 @@ - import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationAttemptStateData; - import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; - --public class ApplicationAttemptStateDataPBImpl --extends ProtoBase --implements ApplicationAttemptStateData { -- private static final RecordFactory recordFactory = RecordFactoryProvider -- .getRecordFactory(null); -+import com.google.protobuf.TextFormat; - -+public class ApplicationAttemptStateDataPBImpl extends -+ ApplicationAttemptStateData { - ApplicationAttemptStateDataProto proto = - ApplicationAttemptStateDataProto.getDefaultInstance(); - ApplicationAttemptStateDataProto.Builder builder = null; -@@ -60,7 +55,8 @@ public ApplicationAttemptStateDataPBImpl( - this.proto = proto; - viaProto = true; - } -- -+ -+ @Override - public ApplicationAttemptStateDataProto getProto() { - mergeLocalToProto(); - proto = viaProto ? proto : builder.build(); -@@ -76,7 +72,8 @@ private void mergeLocalToBuilder() { - builder.setMasterContainer(((ContainerPBImpl)masterContainer).getProto()); - } - if(this.appAttemptTokens != null) { -- builder.setAppAttemptTokens(convertToProtoFormat(this.appAttemptTokens)); -+ builder.setAppAttemptTokens(ProtoUtils.convertToProtoFormat( -+ this.appAttemptTokens)); - } - } - -@@ -148,7 +145,8 @@ public ByteBuffer getAppAttemptTokens() { - if(!p.hasAppAttemptTokens()) { - return null; - } -- this.appAttemptTokens = convertFromProtoFormat(p.getAppAttemptTokens()); -+ this.appAttemptTokens = ProtoUtils.convertFromProtoFormat( -+ p.getAppAttemptTokens()); - return appAttemptTokens; - } - -@@ -249,24 +247,26 @@ public void setFinalApplicationStatus(FinalApplicationStatus finishState) { - builder.setFinalApplicationStatus(convertToProtoFormat(finishState)); - } - -- public static ApplicationAttemptStateData newApplicationAttemptStateData( -- ApplicationAttemptId attemptId, Container container, -- ByteBuffer attemptTokens, long startTime, RMAppAttemptState finalState, -- String finalTrackingUrl, String diagnostics, -- FinalApplicationStatus amUnregisteredFinalStatus) { -- ApplicationAttemptStateData attemptStateData = -- recordFactory.newRecordInstance(ApplicationAttemptStateData.class); -- attemptStateData.setAttemptId(attemptId); -- attemptStateData.setMasterContainer(container); -- attemptStateData.setAppAttemptTokens(attemptTokens); -- attemptStateData.setState(finalState); -- attemptStateData.setFinalTrackingUrl(finalTrackingUrl); -- attemptStateData.setDiagnostics(diagnostics); -- attemptStateData.setStartTime(startTime); -- attemptStateData.setFinalApplicationStatus(amUnregisteredFinalStatus); -- return attemptStateData; -+ @Override -+ public int hashCode() { -+ return getProto().hashCode(); - } - -+ @Override -+ public boolean equals(Object other) { -+ if (other == null) -+ return false; -+ if (other.getClass().isAssignableFrom(this.getClass())) { -+ return this.getProto().equals(this.getClass().cast(other).getProto()); -+ } -+ return false; -+ } -+ -+ @Override -+ public String toString() { -+ return TextFormat.shortDebugString(getProto()); -+ } -+ - private static String RM_APP_ATTEMPT_PREFIX = ""RMATTEMPT_""; - public static RMAppAttemptStateProto convertToProtoFormat(RMAppAttemptState e) { - return RMAppAttemptStateProto.valueOf(RM_APP_ATTEMPT_PREFIX + e.name()); -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/records/impl/pb/ApplicationStateDataPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/records/impl/pb/ApplicationStateDataPBImpl.java -index ede8ca7c46155..8aaf1a4a7caf6 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/records/impl/pb/ApplicationStateDataPBImpl.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/records/impl/pb/ApplicationStateDataPBImpl.java -@@ -20,21 +20,15 @@ - - import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; - import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationSubmissionContextPBImpl; --import org.apache.hadoop.yarn.api.records.impl.pb.ProtoBase; --import org.apache.hadoop.yarn.factories.RecordFactory; --import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; - import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ApplicationStateDataProto; - import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ApplicationStateDataProtoOrBuilder; - import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RMAppStateProto; - import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationStateData; - import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState; - --public class ApplicationStateDataPBImpl --extends ProtoBase --implements ApplicationStateData { -- private static final RecordFactory recordFactory = RecordFactoryProvider -- .getRecordFactory(null); -+import com.google.protobuf.TextFormat; - -+public class ApplicationStateDataPBImpl extends ApplicationStateData { - ApplicationStateDataProto proto = - ApplicationStateDataProto.getDefaultInstance(); - ApplicationStateDataProto.Builder builder = null; -@@ -51,7 +45,8 @@ public ApplicationStateDataPBImpl( - this.proto = proto; - viaProto = true; - } -- -+ -+ @Override - public ApplicationStateDataProto getProto() { - mergeLocalToProto(); - proto = viaProto ? proto : builder.build(); -@@ -136,7 +131,7 @@ public ApplicationSubmissionContext getApplicationSubmissionContext() { - } - applicationSubmissionContext = - new ApplicationSubmissionContextPBImpl( -- p.getApplicationSubmissionContext()); -+ p.getApplicationSubmissionContext()); - return applicationSubmissionContext; - } - -@@ -200,21 +195,24 @@ public void setFinishTime(long finishTime) { - builder.setFinishTime(finishTime); - } - -- public static ApplicationStateData newApplicationStateData(long submitTime, -- long startTime, String user, -- ApplicationSubmissionContext submissionContext, RMAppState state, -- String diagnostics, long finishTime) { -- -- ApplicationStateData appState = -- recordFactory.newRecordInstance(ApplicationStateData.class); -- appState.setSubmitTime(submitTime); -- appState.setStartTime(startTime); -- appState.setUser(user); -- appState.setApplicationSubmissionContext(submissionContext); -- appState.setState(state); -- appState.setDiagnostics(diagnostics); -- appState.setFinishTime(finishTime); -- return appState; -+ @Override -+ public int hashCode() { -+ return getProto().hashCode(); -+ } -+ -+ @Override -+ public boolean equals(Object other) { -+ if (other == null) -+ return false; -+ if (other.getClass().isAssignableFrom(this.getClass())) { -+ return this.getProto().equals(this.getClass().cast(other).getProto()); -+ } -+ return false; -+ } -+ -+ @Override -+ public String toString() { -+ return TextFormat.shortDebugString(getProto()); - } - - private static String RM_APP_PREFIX = ""RMAPP_""; -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java -index 3bdb66c4b407c..9c2d87e444250 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java -@@ -84,8 +84,8 @@ - import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore.ApplicationState; - import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore.RMState; - import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStoreEvent; --import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.impl.pb.ApplicationAttemptStateDataPBImpl; --import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.impl.pb.ApplicationStateDataPBImpl; -+import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationAttemptStateData; -+import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationStateData; - import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; - import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState; - import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; -@@ -612,7 +612,7 @@ public void testRMRestartWaitForPreviousSucceededAttempt() throws Exception { - - @Override - public void updateApplicationStateInternal(ApplicationId appId, -- ApplicationStateDataPBImpl appStateData) throws Exception { -+ ApplicationStateData appStateData) throws Exception { - if (count == 0) { - // do nothing; simulate app final state is not saved. - LOG.info(appId + "" final state is not saved.""); -@@ -760,14 +760,14 @@ public void testRMRestartKilledAppWithNoAttempts() throws Exception { - @Override - public synchronized void storeApplicationAttemptStateInternal( - ApplicationAttemptId attemptId, -- ApplicationAttemptStateDataPBImpl attemptStateData) throws Exception { -+ ApplicationAttemptStateData attemptStateData) throws Exception { - // ignore attempt saving request. - } - - @Override - public synchronized void updateApplicationAttemptStateInternal( - ApplicationAttemptId attemptId, -- ApplicationAttemptStateDataPBImpl attemptStateData) throws Exception { -+ ApplicationAttemptStateData attemptStateData) throws Exception { - // ignore attempt saving request. - } - }; -@@ -1862,7 +1862,7 @@ public class TestMemoryRMStateStore extends MemoryRMStateStore { - - @Override - public void updateApplicationStateInternal(ApplicationId appId, -- ApplicationStateDataPBImpl appStateData) throws Exception { -+ ApplicationStateData appStateData) throws Exception { - updateApp = ++count; - super.updateApplicationStateInternal(appId, appStateData); - } -@@ -1871,7 +1871,7 @@ public void updateApplicationStateInternal(ApplicationId appId, - public synchronized void - updateApplicationAttemptStateInternal( - ApplicationAttemptId attemptId, -- ApplicationAttemptStateDataPBImpl attemptStateData) -+ ApplicationAttemptStateData attemptStateData) - throws Exception { - updateAttempt = ++count; - super.updateApplicationAttemptStateInternal(attemptId, -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestFSRMStateStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestFSRMStateStore.java -index 792b73e5a6648..da25c5beda6ad 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestFSRMStateStore.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestFSRMStateStore.java -@@ -24,7 +24,6 @@ - import java.util.concurrent.atomic.AtomicBoolean; - - import org.junit.Assert; -- - import org.apache.commons.logging.Log; - import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.conf.Configuration; -@@ -37,6 +36,7 @@ - import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; - import org.apache.hadoop.yarn.api.records.ApplicationId; - import org.apache.hadoop.yarn.conf.YarnConfiguration; -+import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationStateData; - import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.RMStateVersion; - import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.impl.pb.ApplicationStateDataPBImpl; - import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.impl.pb.RMStateVersionPBImpl; -@@ -213,9 +213,8 @@ public void run() { - try { - store.storeApplicationStateInternal( - ApplicationId.newInstance(100L, 1), -- (ApplicationStateDataPBImpl) ApplicationStateDataPBImpl -- .newApplicationStateData(111, 111, ""user"", null, -- RMAppState.ACCEPTED, ""diagnostics"", 333)); -+ ApplicationStateData.newInstance(111, 111, ""user"", null, -+ RMAppState.ACCEPTED, ""diagnostics"", 333)); - } catch (Exception e) { - // TODO 0 datanode exception will not be retried by dfs client, fix - // that separately." -5cea1f83748ed49c4cf9d2612c55ab7101599186,fozziethebeat$s-space,"Changes based on Keith's review. A few tweaks to EdgeSet to help tracking edge -removal -modified: src/edu/ucla/sspace/common/Similarity.java -- Updated to use VectorMath.dotProduct for the Tanimoto coefficient -deleted: src/edu/ucla/sspace/common/WordComparator.java -- Moved to SimpleNearestNeighborFinder -modified: src/edu/ucla/sspace/dependency/SimpleDependencyPath.java -- Removed println -modified: src/edu/ucla/sspace/graph/AbstractGraph.java -- Added missing implementation to Subgraph class so now all the unit tests pass -modified: src/edu/ucla/sspace/graph/DirectedMultigraph.java -- Added missing implementation to Subgraph class so now all the unit tests pass -- Fixed bug for reporting the correct edge types after removal -- Removed dead code -modified: src/edu/ucla/sspace/graph/EdgeSet.java -- Updated so that disconnect() now returns the number of edges that were removed -modified: src/edu/ucla/sspace/graph/GenericEdgeSet.java -modified: src/edu/ucla/sspace/graph/SparseDirectedEdgeSet.java -modified: src/edu/ucla/sspace/graph/SparseDirectedTypedEdgeSet.java -modified: src/edu/ucla/sspace/graph/SparseTypedEdgeSet.java -modified: src/edu/ucla/sspace/graph/SparseUndirectedEdgeSet.java -modified: src/edu/ucla/sspace/graph/SparseWeightedEdgeSet.java -- Updated to support EdgeSet interface change -deleted: src/edu/ucla/sspace/graph/GraphRandomizer.java -- Removed dead class (functionality is in Graphs.java) -modified: src/edu/ucla/sspace/graph/SimpleWeightedEdge.java -- Fixed hashCode() -deleted: src/edu/ucla/sspace/graph/SparseSymmetricEdgeSet.java -- Removed dead class -modified: src/edu/ucla/sspace/graph/UndirectedMultigraph.java -- Added missing implementation to Subgraph class so now all the unit tests pass -- Fixed bug for reporting the correct edge types after removal -- Removed dead code -modified: src/edu/ucla/sspace/mains/FixedDurationTemporalRandomIndexingMain.java -- Updated to replace WordComparator with SimpleNearestNeighborFinder -modified: src/edu/ucla/sspace/mains/LexSubWordsiMain.java -- Updated to replace WordComparator with SimpleNearestNeighborFinder -modified: src/edu/ucla/sspace/text/LabeledParsedStringDocument.java -- Updated for new ParsedDocument interface -modified: src/edu/ucla/sspace/text/ParsedDocument.java -- Updated to specify the format of text() as the tokens with white space delimiters. -- Added a new prettyPrintText() which is the attempt to nicely format the tokens -as they would have been originally. -modified: src/edu/ucla/sspace/text/PukWaCDocumentIterator.java -- Fixed javadoc -modified: src/edu/ucla/sspace/text/UkWaCDocumentIterator.java -- Added more class javadoc -modified: src/edu/ucla/sspace/tools/NearestNeighborFinderTool.java -- Updated to use the class instances instead of the interface -modified: src/edu/ucla/sspace/tools/SemanticSpaceExplorer.java -- Updated to replace WordComparator with PartitioningNearestNeighborFinder -modified: src/edu/ucla/sspace/tools/SimilarityListGenerator.java -- Updated to replace WordComparator with PartitioningNearestNeighborFinder -modified: src/edu/ucla/sspace/util/HashIndexer.java -- Fixed javadoc -modified: src/edu/ucla/sspace/util/PairCounter.java -- Fixed javadoc -renamed: src/edu/ucla/sspace/util/NearestNeighborFinder.java -> src/edu/ucla/sspace/util/PartitioningNearestNeighborFinder.java -- Moved so that NearestNeighborFinder can be an interface -modified: src/edu/ucla/sspace/util/ReflectionUtil.java -- Removed dead code -modified: src/edu/ucla/sspace/util/primitive/IntIntHashMultiMap.java -- Added javadoc -modified: src/edu/ucla/sspace/util/primitive/IntIntMultiMap.java -- Added javadoc -modified: test/edu/ucla/sspace/graph/DirectedMultigraphTests.java -- Uncommented out unit tests -modified: test/edu/ucla/sspace/dependency/BreadthFirstPathIteratorTest.java -modified: test/edu/ucla/sspace/dependency/CoNLLDependencyExtractorTest.java -modified: test/edu/ucla/sspace/dependency/WaCKyDependencyExtractorTest.java -modified: test/edu/ucla/sspace/text/corpora/PukWacDependencyCorpusReaderTest.java -modified: test/edu/ucla/sspace/wordsi/DependencyContextExtractorTest.java -modified: test/edu/ucla/sspace/wordsi/OccurrenceDependencyContextGeneratorTest.java -modified: test/edu/ucla/sspace/wordsi/OrderingDependencyContextGeneratorTest.java -modified: test/edu/ucla/sspace/wordsi/PartOfSpeechDependencyContextGeneratorTest.java -modified: test/edu/ucla/sspace/wordsi/psd/PseudoWordDependencyContextExtractorTest.java -modified: test/edu/ucla/sspace/wordsi/semeval/SemEvalDependencyContextExtractorTest.java -- Fixed unit tests to support proper tab-delimiting of the CoNLL format -",p,https://github.com/fozziethebeat/s-space,"diff --git a/src/edu/ucla/sspace/common/Similarity.java b/src/edu/ucla/sspace/common/Similarity.java -index d56934dd..8a46db3b 100644 ---- a/src/edu/ucla/sspace/common/Similarity.java -+++ b/src/edu/ucla/sspace/common/Similarity.java -@@ -41,6 +41,7 @@ - import edu.ucla.sspace.vector.IntegerVector; - import edu.ucla.sspace.vector.SparseVector; - import edu.ucla.sspace.vector.Vector; -+import edu.ucla.sspace.vector.VectorMath; - import edu.ucla.sspace.vector.Vectors; - import edu.ucla.sspace.vector.SparseIntegerVector; - -@@ -2187,7 +2188,7 @@ public static double tanimotoCoefficient(Vector a, Vector b) { - public static double tanimotoCoefficient(DoubleVector a, DoubleVector b) { - check(a,b); - -- // IMPLEMENTATION NOTE: The Tanimoto coefficient uses the squart of the -+ // IMPLEMENTATION NOTE: The Tanimoto coefficient uses the square of the - // vector magnitudes, which we could compute by just summing the square - // of the vector values. This would save a .sqrt() call from the - // .magnitude() call. However, we expect that this method might be -@@ -2195,83 +2196,13 @@ public static double tanimotoCoefficient(DoubleVector a, DoubleVector b) { - // should only be two multiplications instaned of |nz| multiplications - // on the second call (assuming the vector instances cache their - // magnitude, which almost all do). -- double dotProduct = 0.0; - double aMagnitude = a.magnitude(); - double bMagnitude = b.magnitude(); -- -- // Check whether both vectors support fast iteration over their non-zero -- // values. If so, use only the non-zero indices to speed up the -- // computation by avoiding zero multiplications -- if (a instanceof Iterable && b instanceof Iterable) { -- // Check whether we can easily determine how many non-zero values -- // are in each vector. This value is used to select the iteration -- // order, which affects the number of get(value) calls. -- boolean useA = -- (a instanceof SparseVector && b instanceof SparseVector) -- && ((SparseVector)a).getNonZeroIndices().length < -- ((SparseVector)b).getNonZeroIndices().length; -- -- // Choose the smaller of the two to use in computing the dot -- // product. Because it would be more expensive to compute the -- // intersection of the two sets, we assume that any potential -- // misses would be less of a performance hit. -- if (useA) { -- for (DoubleEntry e : ((Iterable)a)) { -- int index = e.index(); -- double aValue = e.value(); -- double bValue = b.get(index); -- dotProduct += aValue * bValue; -- } -- } -- else { -- for (DoubleEntry e : ((Iterable)b)) { -- int index = e.index(); -- double aValue = a.get(index); -- double bValue = e.value(); -- dotProduct += aValue * bValue; -- } -- } -- } -- -- // Check whether both vectors are sparse. If so, use only the non-zero -- // indices to speed up the computation by avoiding zero multiplications -- else if (a instanceof SparseVector && b instanceof SparseVector) { -- SparseVector svA = (SparseVector)a; -- SparseVector svB = (SparseVector)b; -- int[] nzA = svA.getNonZeroIndices(); -- int[] nzB = svB.getNonZeroIndices(); -- // Choose the smaller of the two to use in computing the dot -- // product. Because it would be more expensive to compute the -- // intersection of the two sets, we assume that any potential -- // misses would be less of a performance hit. -- if (nzA.length < nzB.length) { -- for (int nz : nzA) { -- double aValue = a.get(nz); -- double bValue = b.get(nz); -- dotProduct += aValue * bValue; -- } -- } -- else { -- for (int nz : nzB) { -- double aValue = a.get(nz); -- double bValue = b.get(nz); -- dotProduct += aValue * bValue; -- } -- } -- } -- -- // Otherwise, just assume both are dense and compute the full amount -- else { -- for (int i = 0; i < b.length(); i++) { -- double aValue = a.get(i); -- double bValue = b.get(i); -- dotProduct += aValue * bValue; -- } -- } -- -+ - if (aMagnitude == 0 || bMagnitude == 0) - return 0; -- -+ -+ double dotProduct = VectorMath.dotProduct(a, b); - double aMagSq = aMagnitude * aMagnitude; - double bMagSq = bMagnitude * bMagnitude; - -@@ -2297,82 +2228,13 @@ public static double tanimotoCoefficient(IntegerVector a, IntegerVector b) { - // should only be two multiplications instaned of |nz| multiplications - // on the second call (assuming the vector instances cache their - // magnitude, which almost all do). -- int dotProduct = 0; - double aMagnitude = a.magnitude(); - double bMagnitude = b.magnitude(); - -- // Check whether both vectors support fast iteration over their non-zero -- // values. If so, use only the non-zero indices to speed up the -- // computation by avoiding zero multiplications -- if (a instanceof Iterable && b instanceof Iterable) { -- // Check whether we can easily determine how many non-zero values -- // are in each vector. This value is used to select the iteration -- // order, which affects the number of get(value) calls. -- boolean useA = -- (a instanceof SparseVector && b instanceof SparseVector) -- && ((SparseVector)a).getNonZeroIndices().length < -- ((SparseVector)b).getNonZeroIndices().length; -- // Choose the smaller of the two to use in computing the dot -- // product. Because it would be more expensive to compute the -- // intersection of the two sets, we assume that any potential -- // misses would be less of a performance hit. -- if (useA) { -- for (IntegerEntry e : ((Iterable)a)) { -- int index = e.index(); -- int aValue = e.value(); -- int bValue = b.get(index); -- dotProduct += aValue * bValue; -- } -- } -- else { -- for (IntegerEntry e : ((Iterable)b)) { -- int index = e.index(); -- int aValue = a.get(index); -- int bValue = e.value(); -- dotProduct += aValue * bValue; -- } -- } -- } -- -- // Check whether both vectors are sparse. If so, use only the non-zero -- // indices to speed up the computation by avoiding zero multiplications -- else if (a instanceof SparseVector && b instanceof SparseVector) { -- SparseVector svA = (SparseVector)a; -- SparseVector svB = (SparseVector)b; -- int[] nzA = svA.getNonZeroIndices(); -- int[] nzB = svB.getNonZeroIndices(); -- // Choose the smaller of the two to use in computing the dot -- // product. Because it would be more expensive to compute the -- // intersection of the two sets, we assume that any potential -- // misses would be less of a performance hit. -- if (nzA.length < nzB.length) { -- for (int nz : nzA) { -- int aValue = a.get(nz); -- int bValue = b.get(nz); -- dotProduct += aValue * bValue; -- } -- } -- else { -- for (int nz : nzB) { -- int aValue = a.get(nz); -- int bValue = b.get(nz); -- dotProduct += aValue * bValue; -- } -- } -- } -- -- // Otherwise, just assume both are dense and compute the full amount -- else { -- for (int i = 0; i < b.length(); i++) { -- int aValue = a.get(i); -- int bValue = b.get(i); -- dotProduct += aValue * bValue; -- } -- } -- - if (aMagnitude == 0 || bMagnitude == 0) - return 0; -- -+ -+ int dotProduct = VectorMath.dotProduct(a, b); - double aMagSq = aMagnitude * aMagnitude; - double bMagSq = bMagnitude * bMagnitude; - -diff --git a/src/edu/ucla/sspace/common/WordComparator.java b/src/edu/ucla/sspace/common/WordComparator.java -deleted file mode 100644 -index f6b76303..00000000 ---- a/src/edu/ucla/sspace/common/WordComparator.java -+++ /dev/null -@@ -1,126 +0,0 @@ --/* -- * Copyright 2009 David Jurgens -- * -- * This file is part of the S-Space package and is covered under the terms and -- * conditions therein. -- * -- * The S-Space package is free software: you can redistribute it and/or modify -- * it under the terms of the GNU General Public License version 2 as published -- * by the Free Software Foundation and distributed hereunder to you. -- * -- * THIS SOFTWARE IS PROVIDED ""AS IS"" AND NO REPRESENTATIONS OR WARRANTIES, -- * EXPRESS OR IMPLIED ARE MADE. BY WAY OF EXAMPLE, BUT NOT LIMITATION, WE MAKE -- * NO REPRESENTATIONS OR WARRANTIES OF MERCHANT- ABILITY OR FITNESS FOR ANY -- * PARTICULAR PURPOSE OR THAT THE USE OF THE LICENSED SOFTWARE OR DOCUMENTATION -- * WILL NOT INFRINGE ANY THIRD PARTY PATENTS, COPYRIGHTS, TRADEMARKS OR OTHER -- * RIGHTS. -- * -- * You should have received a copy of the GNU General Public License -- * along with this program. If not, see . -- */ -- --package edu.ucla.sspace.common; -- --import edu.ucla.sspace.util.BoundedSortedMultiMap; --import edu.ucla.sspace.util.MultiMap; --import edu.ucla.sspace.util.SortedMultiMap; --import edu.ucla.sspace.util.WorkQueue; -- --import edu.ucla.sspace.vector.Vector; -- --import java.util.Map; --import java.util.Set; --import java.util.SortedMap; -- -- --/** -- * A utility class for finding the {@code k} most-similar words to a provided -- * word in a {@link SemanticSpace}. The comparisons required for generating the -- * list maybe be run in parallel by configuring an instance of this class to use -- * multiple threads.

-- * -- * All instances of this class are thread-safe. -- * -- * @author David Jurgens -- */ --public class WordComparator { -- -- /** -- * The {@link WorkQueue} from which worker threads run word-word comparisons -- */ -- private final WorkQueue workQueue; -- -- /** -- * Creates this {@code WordComparator} with as many threads as processors. -- */ -- public WordComparator() { -- this(Runtime.getRuntime().availableProcessors()); -- } -- -- /** -- * Creates this {@code WordComparator} with the specified number of threads. -- */ -- public WordComparator(int numThreads) { -- workQueue = WorkQueue.getWorkQueue(numThreads); -- } -- -- /** -- * Compares the provided word to all other words in the provided {@link -- * SemanticSpace} and return the specified number of words that were most -- * similar according to the specified similarity measure. -- * -- * @return the most similar words, or {@code null} if the provided word was -- * not in the semantic space. -- */ -- public SortedMultiMap getMostSimilar( -- final String word, final SemanticSpace sspace, -- int numberOfSimilarWords, final Similarity.SimType similarityType) { -- -- Vector v = sspace.getVector(word); -- -- // if the semantic space did not have the word, then return null -- if (v == null) { -- return null; -- } -- -- final Vector vector = v; -- return getMostSimilar(v, sspace, numberOfSimilarWords, similarityType); -- } -- -- public SortedMultiMap getMostSimilar( -- final Vector vector, final SemanticSpace sspace, -- int numberOfSimilarWords, final Similarity.SimType similarityType) { -- Set words = sspace.getWords(); -- -- // the most-similar set will automatically retain only a fixed number -- // of elements -- final SortedMultiMap mostSimilar = -- new BoundedSortedMultiMap(numberOfSimilarWords, -- false); -- -- Object key = workQueue.registerTaskGroup(words.size()); -- -- // loop through all the other words computing their similarity -- for (final String other : words) { -- workQueue.add(key, new Runnable() { -- public void run() { -- Vector otherV = sspace.getVector(other); -- // Skip the comparison if the vectors are actually the same. -- if (otherV == vector) -- return; -- -- Double similarity = Similarity.getSimilarity( -- similarityType, vector, otherV); -- -- // lock on the Map, as it is not thread-safe -- synchronized(mostSimilar) { -- mostSimilar.put(similarity, other); -- } -- } -- }); -- } -- -- workQueue.await(key); -- return mostSimilar; -- } --} -diff --git a/src/edu/ucla/sspace/dependency/SimpleDependencyPath.java b/src/edu/ucla/sspace/dependency/SimpleDependencyPath.java -index 576caa6f..51343ad8 100644 ---- a/src/edu/ucla/sspace/dependency/SimpleDependencyPath.java -+++ b/src/edu/ucla/sspace/dependency/SimpleDependencyPath.java -@@ -73,7 +73,6 @@ public SimpleDependencyPath(List path, - nodes.add(next); - cur = next; - } -- System.out.printf(""path: %s,%nnodes: %s%n"", path, nodes); - } - - /** -diff --git a/src/edu/ucla/sspace/graph/AbstractGraph.java b/src/edu/ucla/sspace/graph/AbstractGraph.java -index 2ffa38fe..d9189a42 100755 ---- a/src/edu/ucla/sspace/graph/AbstractGraph.java -+++ b/src/edu/ucla/sspace/graph/AbstractGraph.java -@@ -967,8 +967,10 @@ public Set getAdjacencyList(int vertex) { - * {@inheritDoc} - */ - public IntSet getNeighbors(int vertex) { -- if (!vertexSubset.contains(vertex)) -- return PrimitiveCollections.emptyIntSet(); -+ return (!vertexSubset.contains(vertex)) -+ ? PrimitiveCollections.emptyIntSet() -+ : new SubgraphNeighborsView(vertex); -+ /* - // REMINDER: make this a view, rather than a created set - IntSet neighbors = new TroveIntSet(); - IntIterator it = -@@ -979,6 +981,7 @@ public IntSet getNeighbors(int vertex) { - neighbors.add(v); - } - return neighbors; -+ */ - } - - /** -@@ -1084,7 +1087,7 @@ public IntSet vertices() { - * subgraph. This class monitors for changes to edge set to update the - * state of this graph - */ -- private class SubgraphAdjacencyListView extends AbstractSet { -+ private class SubgraphAdjacencyListView extends AbstractSet { - - private final int root; - -@@ -1275,7 +1278,7 @@ public void remove() { - * subview. This view monitors for additions and removals to the set in - * order to update the state of this {@code Subgraph}. - */ -- private class SubgraphNeighborsView extends AbstractSet { -+ private class SubgraphNeighborsView extends AbstractIntSet { - - private int root; - -@@ -1285,15 +1288,20 @@ private class SubgraphNeighborsView extends AbstractSet { - public SubgraphNeighborsView(int root) { - this.root = root; - } -+ -+ public boolean add(int vertex) { -+ throw new UnsupportedOperationException( -+ ""Cannot add vertices to subgraph""); -+ } - -- /** -- * Adds an edge to this vertex and adds the vertex to the graph if it -- * was not present before. -- */ - public boolean add(Integer vertex) { - throw new UnsupportedOperationException( - ""Cannot add vertices to subgraph""); - } -+ -+ public boolean contains(int vertex) { -+ return vertexSubset.contains(vertex) && checkVertex(vertex); -+ } - - public boolean contains(Object o) { - if (!(o instanceof Integer)) -@@ -1309,12 +1317,18 @@ private boolean checkVertex(int i) { - return AbstractGraph.this.contains(i, root); - } - -- public Iterator iterator() { -+ public IntIterator iterator() { - return new SubgraphNeighborsIterator(); - } - -- public boolean remove(Object o) { -- throw new UnsupportedOperationException(); -+ public boolean remove(int vertex) { -+ throw new UnsupportedOperationException( -+ ""Cannot remove vertices from subgraph""); -+ } -+ -+ public boolean remove(Object vertex) { -+ throw new UnsupportedOperationException( -+ ""Cannot remove vertices from subgraph""); - } - - public int size() { -@@ -1333,7 +1347,7 @@ public int size() { - * vertices set, which keeps track of which neighboring vertices are - * actually in this subview. - */ -- private class SubgraphNeighborsIterator implements Iterator { -+ private class SubgraphNeighborsIterator implements IntIterator { - - private final IntIterator iter; - -@@ -1368,6 +1382,10 @@ public Integer next() { - return cur; - } - -+ public int nextInt() { -+ return next(); -+ } -+ - /** - * Throws an {@link UnsupportedOperationException} if called. - */ -diff --git a/src/edu/ucla/sspace/graph/DirectedMultigraph.java b/src/edu/ucla/sspace/graph/DirectedMultigraph.java -index 9a2b60c5..0792e9d1 100644 ---- a/src/edu/ucla/sspace/graph/DirectedMultigraph.java -+++ b/src/edu/ucla/sspace/graph/DirectedMultigraph.java -@@ -43,6 +43,8 @@ - import edu.ucla.sspace.util.DisjointSets; - import edu.ucla.sspace.util.SetDecorator; - -+import edu.ucla.sspace.util.primitive.AbstractIntSet; -+import edu.ucla.sspace.util.primitive.IntIterator; - import edu.ucla.sspace.util.primitive.IntSet; - import edu.ucla.sspace.util.primitive.PrimitiveCollections; - import edu.ucla.sspace.util.primitive.TroveIntSet; -@@ -51,8 +53,9 @@ - import gnu.trove.iterator.TIntIterator; - import gnu.trove.iterator.TIntObjectIterator; - import gnu.trove.map.TIntObjectMap; -+import gnu.trove.map.TObjectIntMap; - import gnu.trove.map.hash.TIntObjectHashMap; --import gnu.trove.procedure.TObjectProcedure; -+import gnu.trove.map.hash.TObjectIntHashMap; - import gnu.trove.set.TIntSet; - import gnu.trove.set.hash.TIntHashSet; - -@@ -72,9 +75,9 @@ public class DirectedMultigraph - private static final long serialVersionUID = 1L; - - /** -- * The set of types contained in this graph. -+ * The count of the type distribution for all edges in the graph. - */ -- private final Set types; -+ private final TObjectIntMap typeCounts; - - /** - * The set of vertices in this mutligraph. This set is maintained -@@ -104,7 +107,7 @@ public class DirectedMultigraph - * Creates an empty graph with node edges - */ - public DirectedMultigraph() { -- types = new HashSet(); -+ typeCounts = new TObjectIntHashMap(); - vertexToEdges = new TIntObjectHashMap>(); - subgraphs = new ArrayList>(); - size = 0; -@@ -143,7 +146,7 @@ public boolean add(DirectedTypedEdge e) { - vertexToEdges.put(e.from(), from); - } - if (from.add(e)) { -- types.add(e.edgeType()); -+ updateTypeCounts(e.edgeType(), 1); - SparseDirectedTypedEdgeSet to = vertexToEdges.get(e.to()); - if (to == null) { - to = new SparseDirectedTypedEdgeSet(e.to()); -@@ -161,7 +164,7 @@ public boolean add(DirectedTypedEdge e) { - */ - public void clear() { - vertexToEdges.clear(); -- types.clear(); -+ typeCounts.clear(); - size = 0; - } - -@@ -225,14 +228,10 @@ public DirectedMultigraph copy(Set toCopy) { - return new DirectedMultigraph(this); - - DirectedMultigraph g = new DirectedMultigraph(); -- //long s = System.currentTimeMillis(); - for (int v : toCopy) { - if (!vertexToEdges.containsKey(v)) - throw new IllegalArgumentException( - ""Request copy of non-present vertex: "" + v); --// SparseDirectedTypedEdgeSet edges = vertexToEdges.get(v); --// g.vertexToEdges.put(v, edges.copy(toCopy)); -- - g.add(v); - SparseDirectedTypedEdgeSet edges = vertexToEdges.get(v); - if (edges == null) -@@ -245,8 +244,6 @@ public DirectedMultigraph copy(Set toCopy) { - g.add(e); - } - } --// if (toCopy.size() > 0) --// System.out.printf(""Copy %d vertices (%d), %d edges%n"", g.order(), toCopy.size(), g.size()); - return g; - } - -@@ -284,7 +281,7 @@ public Set> edges(T t) { - * Returns the set of edge types currently present in this graph. - */ - public Set edgeTypes() { -- return Collections.unmodifiableSet(types); -+ return Collections.unmodifiableSet(typeCounts.keySet()); - } - - /** -@@ -293,7 +290,7 @@ public Set edgeTypes() { - @Override public boolean equals(Object o) { - if (o instanceof DirectedMultigraph) { - DirectedMultigraph dm = (DirectedMultigraph)(o); -- if (dm.types.equals(types)) { -+ if (dm.typeCounts.equals(typeCounts)) { - return vertexToEdges.equals(dm.vertexToEdges); - } - return false; -@@ -301,7 +298,7 @@ public Set edgeTypes() { - else if (o instanceof Multigraph) { - @SuppressWarnings(""unchecked"") - Multigraph> m = (Multigraph>)o; -- if (m.edgeTypes().equals(types)) { -+ if (m.edgeTypes().equals(typeCounts.keySet())) { - return m.order() == order() - && m.size() == size() - && m.vertices().equals(vertices()) -@@ -372,7 +369,7 @@ public boolean hasCycles() { - * {@inheritDoc} - */ - public int hashCode() { -- return vertexToEdges.keySet().hashCode() ^ (types.hashCode() * size); -+ return vertexToEdges.keySet().hashCode() ^ (typeCounts.hashCode() * size); - } - - /** -@@ -448,6 +445,8 @@ public boolean remove(int vertex) { - // Check whether removing this vertex has caused us to remove - // the last edge for this type in the graph. If so, the graph - // no longer has this type and we need to update the state. -+ for (DirectedTypedEdge e : edges) -+ updateTypeCounts(e.edgeType(), -1); - - // Update any of the subgraphs that had this vertex to notify them - // that it was removed -@@ -467,7 +466,7 @@ public boolean remove(int vertex) { - if (s.vertexSubset.remove(vertex)) { - Iterator subgraphTypesIter = s.validTypes.iterator(); - while (subgraphTypesIter.hasNext()) { -- if (!types.contains(subgraphTypesIter.next())) -+ if (!typeCounts.containsKey(subgraphTypesIter.next())) - subgraphTypesIter.remove(); - } - } -@@ -488,25 +487,24 @@ public boolean remove(DirectedTypedEdge edge) { - // Check whether we've just removed the last edge for this type - // in the graph. If so, the graph no longer has this type and - // we need to update the state. -- -- // TODO !! -- -- -- // Remove this edge type from all the subgraphs as well -- Iterator> sIt = subgraphs.iterator(); -- while (sIt.hasNext()) { -- WeakReference ref = sIt.next(); -- Subgraph s = ref.get(); -- // Check whether this subgraph was already gc'd (the -- // subgraph was no longer in use) and if so, remove the -- // ref from the list to avoid iterating over it again -- if (s == null) { -- sIt.remove(); -- continue; -+ updateTypeCounts(edge.edgeType(), -1); -+ -+ if (!typeCounts.containsKey(edge.edgeType())) { -+ // Remove this edge type from all the subgraphs as well -+ Iterator> sIt = subgraphs.iterator(); -+ while (sIt.hasNext()) { -+ WeakReference ref = sIt.next(); -+ Subgraph s = ref.get(); -+ // Check whether this subgraph was already gc'd (the -+ // subgraph was no longer in use) and if so, remove the -+ // ref from the list to avoid iterating over it again -+ if (s == null) { -+ sIt.remove(); -+ continue; -+ } -+ s.validTypes.remove(edge.edgeType()); - } -- // FILL IN... - } -- - return true; - } - return false; -@@ -517,19 +515,6 @@ public boolean remove(DirectedTypedEdge edge) { - */ - public int size() { - return size; --// CountingProcedure count = new CountingProcedure(); --// vertexToEdges.forEachValue(count); --// return count.count / 2; -- } -- -- private class CountingProcedure -- implements TObjectProcedure> { -- -- int count = 0; -- public boolean execute(SparseDirectedTypedEdgeSet edges) { -- count += edges.size(); -- return true; -- } - } - - /** -@@ -546,7 +531,7 @@ public IntSet successors(int vertex) { - * {@inheritDoc} - */ - public DirectedMultigraph subgraph(Set subset) { -- Subgraph sub = new Subgraph(types, subset); -+ Subgraph sub = new Subgraph(typeCounts.keySet(), subset); - subgraphs.add(new WeakReference(sub)); - return sub; - } -@@ -557,7 +542,7 @@ public DirectedMultigraph subgraph(Set subset) { - public DirectedMultigraph subgraph(Set subset, Set edgeTypes) { - if (edgeTypes.isEmpty()) - throw new IllegalArgumentException(""Must specify at least one type""); -- if (!types.containsAll(edgeTypes)) { -+ if (!typeCounts.keySet().containsAll(edgeTypes)) { - throw new IllegalArgumentException( - ""Cannot create subgraph with more types than exist""); - } -@@ -574,12 +559,33 @@ public String toString() { - return ""{ vertices: "" + vertices() + "", edges: "" + edges() + ""}""; - } - -+ /** -+ * Updates how many edges have this type in the graph -+ */ -+ private void updateTypeCounts(T type, int delta) { -+ if (!typeCounts.containsKey(type)) { -+ assert delta > 0 -+ : ""removing edge type that was not originally present""; -+ typeCounts.put(type, delta); -+ } -+ else { -+ int curCount = typeCounts.get(type); -+ int newCount = curCount + delta; -+ assert newCount >= 0 -+ : ""removing edge type that was not originally present""; -+ if (newCount == 0) -+ typeCounts.remove(type); -+ else -+ typeCounts.put(type, newCount); -+ } -+ } -+ - /** - * {@inheritDoc} - */ - public IntSet vertices() { -- // TODO: make this unmodifiable -- return TroveIntSet.wrap(vertexToEdges.keySet()); -+ return PrimitiveCollections.unmodifiableSet( -+ TroveIntSet.wrap(vertexToEdges.keySet())); - } - - /** -@@ -611,8 +617,8 @@ public Iterator> iterator() { - public boolean remove(Object o) { - if (o instanceof DirectedTypedEdge) { - DirectedTypedEdge e = (DirectedTypedEdge)o; -- return DirectedMultigraph.this.types. -- contains(e.edgeType()) -+ return DirectedMultigraph.this.typeCounts. -+ containsKey(e.edgeType()) - && DirectedMultigraph.this.remove((DirectedTypedEdge)o); - } - return false; -@@ -933,7 +939,7 @@ public IntSet getNeighbors(int vertex) { - SparseDirectedTypedEdgeSet edges = vertexToEdges.get(vertex); - return (edges == null) - ? PrimitiveCollections.emptyIntSet() -- : PrimitiveCollections.unmodifiableSet(edges.connected()); -+ : new SubgraphNeighborsView(vertex); - } - - /** -@@ -947,7 +953,7 @@ public boolean hasCycles() { - * {@inheritDoc} - */ - public int hashCode() { -- return vertices().hashCode() ^ (types.hashCode() * size()); -+ return vertices().hashCode() ^ (validTypes.hashCode() * size()); - } - - /** -@@ -1118,7 +1124,6 @@ public DirectedMultigraph subgraph(Set verts, Set edgeTypes) { - * {@inheritDoc} - */ - public IntSet vertices() { -- // Check that the vertices are up to date with the backing graph - return PrimitiveCollections.unmodifiableSet(vertexSubset); - } - -@@ -1323,7 +1328,7 @@ public void remove() { - * subview. This view monitors for additions and removals to the set in - * order to update the state of this {@code Subgraph}. - */ -- private class SubgraphNeighborsView extends AbstractSet { -+ private class SubgraphNeighborsView extends AbstractIntSet { - - private int root; - -@@ -1333,15 +1338,21 @@ private class SubgraphNeighborsView extends AbstractSet { - public SubgraphNeighborsView(int root) { - this.root = root; - } -+ -+ public boolean add(int vertex) { -+ throw new UnsupportedOperationException( -+ ""Cannot add vertices to subgraph""); -+ } - -- /** -- * Adds an edge to this vertex and adds the vertex to the graph if it -- * was not present before. -- */ - public boolean add(Integer vertex) { - throw new UnsupportedOperationException( - ""Cannot add vertices to subgraph""); - } -+ -+ public boolean contains(int vertex) { -+ return vertexSubset.contains(vertex) -+ && isNeighboringVertex(vertex); -+ } - - public boolean contains(Object o) { - if (!(o instanceof Integer)) -@@ -1354,12 +1365,18 @@ private boolean isNeighboringVertex(Integer i) { - return Subgraph.this.contains(root, i); - } - -- public Iterator iterator() { -+ public IntIterator iterator() { - return new SubgraphNeighborsIterator(); - } - -- public boolean remove(Object o) { -- throw new UnsupportedOperationException(); -+ public boolean remove(int vertex) { -+ throw new UnsupportedOperationException( -+ ""Cannot remove vertices from subgraph""); -+ } -+ -+ public boolean remove(Object vertex) { -+ throw new UnsupportedOperationException( -+ ""Cannot remove vertices from subgraph""); - } - - public int size() { -@@ -1376,7 +1393,7 @@ public int size() { - * vertices set, which keeps track of which neighboring vertices are - * actually in this subview. - */ -- private class SubgraphNeighborsIterator implements Iterator { -+ private class SubgraphNeighborsIterator implements IntIterator { - - private final Iterator iter; - -@@ -1411,6 +1428,10 @@ public Integer next() { - return cur; - } - -+ public int nextInt() { -+ return next(); -+ } -+ - /** - * Throws an {@link UnsupportedOperationException} if called. - */ -diff --git a/src/edu/ucla/sspace/graph/EdgeSet.java b/src/edu/ucla/sspace/graph/EdgeSet.java -index 49facf17..eec197e7 100755 ---- a/src/edu/ucla/sspace/graph/EdgeSet.java -+++ b/src/edu/ucla/sspace/graph/EdgeSet.java -@@ -64,9 +64,10 @@ public interface EdgeSet extends Set { - EdgeSet copy(IntSet vertices); - - /** -- * Removes all edges instances that connect to the specified vertex. -+ * Removes all edges instances that connect to the specified vertex, -+ * returning the number of edges that were removed, if any. - */ -- boolean disconnect(int vertex); -+ int disconnect(int vertex); - - /** - * Returns the set of {@link Edge} instances that connect the root vertex -diff --git a/src/edu/ucla/sspace/graph/GenericEdgeSet.java b/src/edu/ucla/sspace/graph/GenericEdgeSet.java -index 97ca6fe0..1674aee2 100644 ---- a/src/edu/ucla/sspace/graph/GenericEdgeSet.java -+++ b/src/edu/ucla/sspace/graph/GenericEdgeSet.java -@@ -40,46 +40,34 @@ - * edges that may be contained within. This class keeps track of which vertices - * are in the set as well, allowing for efficient vertex-based operations. - * -- *

Due not knowing the {@link Edge} type, this class prevents modification via -- * adding or removing vertices. -- * - * @author David Jurgens - * - * @param T the type of edge to be stored in the set - */ - public class GenericEdgeSet extends AbstractSet -- implements EdgeSet { -+ implements EdgeSet, java.io.Serializable { - -+ private static final long serialVersionUID = 1L; -+ -+ /** -+ * The vertex to which all edges in the set are connected -+ */ - private final int rootVertex; - --// private final BitSet vertices; -- --// private final Set edges; -- -+ /** -+ * A mapping from connected vertices to the edges that connect to them -+ */ - private final MultiMap vertexToEdges ; - - public GenericEdgeSet(int rootVertex) { - this.rootVertex = rootVertex; --// edges = new HashSet(); --// vertices = new BitSet(); - vertexToEdges = new HashMultiMap(); - } - - /** -- * Adds the edge to this set if one of the vertices is the root vertex and -- * if the non-root vertex has a greater index that this vertex. -+ * {@inheritDoc} - */ - public boolean add(T e) { --// if (e.from() == rootVertex && edges.add(e)) { --// connected.set(e.to()); --// return true; --// } --// else if (e.to() == rootVertex && edges.add(e)) { --// connected.add(e.from()); --// return true; --// } --// return false; -- - return (e.from() == rootVertex && vertexToEdges.put(e.to(), e)) - || (e.to() == rootVertex && vertexToEdges.put(e.from(), e)); - } -@@ -96,15 +84,16 @@ public IntSet connected() { - * {@inheritDoc} - */ - public boolean connects(int vertex) { -- return vertexToEdges.containsKey(vertex); //vertices.get(vertex); -+ return vertexToEdges.containsKey(vertex); - } - - - /** - * {@inheritDoc} - */ -- public boolean disconnect(int vertex) { -- return vertexToEdges.remove(vertex) != null; -+ public int disconnect(int vertex) { -+ Set edges = vertexToEdges.remove(vertex); -+ return (edges == null) ? 0 : edges.size(); - } - - /** -diff --git a/src/edu/ucla/sspace/graph/GraphRandomizer.java b/src/edu/ucla/sspace/graph/GraphRandomizer.java -deleted file mode 100644 -index cb1f2f4a..00000000 ---- a/src/edu/ucla/sspace/graph/GraphRandomizer.java -+++ /dev/null -@@ -1,126 +0,0 @@ --/* -- * Copyright 2011 David Jurgens -- * -- * This file is part of the S-Space package and is covered under the terms and -- * conditions therein. -- * -- * The S-Space package is free software: you can redistribute it and/or modify -- * it under the terms of the GNU General Public License version 2 as published -- * by the Free Software Foundation and distributed hereunder to you. -- * -- * THIS SOFTWARE IS PROVIDED ""AS IS"" AND NO REPRESENTATIONS OR WARRANTIES, -- * EXPRESS OR IMPLIED ARE MADE. BY WAY OF EXAMPLE, BUT NOT LIMITATION, WE MAKE -- * NO REPRESENTATIONS OR WARRANTIES OF MERCHANT- ABILITY OR FITNESS FOR ANY -- * PARTICULAR PURPOSE OR THAT THE USE OF THE LICENSED SOFTWARE OR DOCUMENTATION -- * WILL NOT INFRINGE ANY THIRD PARTY PATENTS, COPYRIGHTS, TRADEMARKS OR OTHER -- * RIGHTS. -- * -- * You should have received a copy of the GNU General Public License -- * along with this program. If not, see . -- */ -- --package edu.ucla.sspace.graph; -- --import java.util.ArrayList; --import java.util.Iterator; --import java.util.List; --import java.util.Set; -- -- --public class GraphRandomizer { -- -- public static void shufflePreserveDegreeInMemory(Graph g) { -- -- List edges = new ArrayList(g.edges()); -- -- // Decide on number of iterations -- int swapIterations = 3 * g.size(); -- for (int s = 0; s < swapIterations; ++s) { -- -- // Pick two vertices from which the edges will be selected -- int i = (int)(Math.random() * edges.size()); -- int j = i; -- // Pick another vertex that is not v1 -- while (i == j) -- j = (int)(Math.random() * edges.size()); -- -- T e1 = edges.get(i); -- T e2 = edges.get(j); -- -- // Swap their end points -- T swapped1 = e1.clone(e1.from(), e2.to()); -- T swapped2 = e2.clone(e2.from(), e1.to()); -- -- // Check that the new edges do not already exist in the graph -- if (g.contains(swapped1) -- || g.contains(swapped2)) -- continue; -- -- // Remove the old edges -- g.remove(e1); -- g.remove(e2); -- -- // Put in the swapped-end-point edges -- g.add(swapped1); -- g.add(swapped2); -- -- // Update the in-memory edges set so that if these edges are drawn -- // again, they don't point to old edges -- edges.set(i, swapped1); -- edges.set(j, swapped2); -- } -- } -- -- public static void shufflePreserveDegree(Graph g) { -- -- // Copy the vertices to a list to support random access -- List vertices = new ArrayList(g.vertices()); -- -- // Decide on number of iterations -- int swapIterations = 3 * g.size(); -- for (int i = 0; i < swapIterations; ++i) { -- -- // Pick two vertices from which the edges will be selected -- int v1 = vertices.get((int)(Math.random() * vertices.size())); -- int v2 = v1; -- // Pick another vertex that is not v1 -- while (v1 == v2) -- v2 = vertices.get((int)(Math.random() * vertices.size())); -- -- // From the two vertices, select an edge from each of their adjacency -- // lists. -- T e1 = getRandomEdge(g.getAdjacencyList(v1)); -- T e2 = getRandomEdge(g.getAdjacencyList(v2)); -- -- // Swap their end points -- T swapped1 = e1.clone(e1.from(), e2.to()); -- T swapped2 = e2.clone(e2.from(), e1.to()); -- -- // Check that the new edges do not already exist in the graph -- if (g.contains(swapped1) -- || g.contains(swapped2)) -- continue; -- -- // Remove the old edges -- g.remove(e1); -- g.remove(e2); -- -- // Put in the swapped-end-point edges -- g.add(swapped1); -- g.add(swapped2); -- } -- } -- -- private static T getRandomEdge(Set edges) { -- int edgeToSelect = (int)(edges.size() * Math.random()); -- Iterator it = edges.iterator(); -- int i = 0; -- while (it.hasNext()) { -- T edge = it.next(); -- if (i == edgeToSelect) -- return edge; -- i++; -- } -- throw new AssertionError(""Random edge selection logic is incorrect""); -- } --} -\ No newline at end of file -diff --git a/src/edu/ucla/sspace/graph/SimpleWeightedEdge.java b/src/edu/ucla/sspace/graph/SimpleWeightedEdge.java -index eae597ea..c45988e2 100644 ---- a/src/edu/ucla/sspace/graph/SimpleWeightedEdge.java -+++ b/src/edu/ucla/sspace/graph/SimpleWeightedEdge.java -@@ -70,8 +70,7 @@ public boolean equals(Object o) { - * {@inheritDoc} - */ - public int hashCode() { -- long bits = Double.doubleToLongBits(weight); -- return from ^ to ^ (int)(bits ^ (bits >>> 32)); -+ return from ^ to; - } - - /** -diff --git a/src/edu/ucla/sspace/graph/SparseDirectedEdgeSet.java b/src/edu/ucla/sspace/graph/SparseDirectedEdgeSet.java -index a0dd1768..5b3ba736 100644 ---- a/src/edu/ucla/sspace/graph/SparseDirectedEdgeSet.java -+++ b/src/edu/ucla/sspace/graph/SparseDirectedEdgeSet.java -@@ -43,7 +43,9 @@ - * @author David Jurgens - */ - public class SparseDirectedEdgeSet extends AbstractSet -- implements EdgeSet { -+ implements EdgeSet, java.io.Serializable { -+ -+ private static final long serialVersionUID = 1L; - - /** - * The vertex to which all edges in the set are connected -@@ -149,8 +151,13 @@ public SparseDirectedEdgeSet copy(IntSet vertices) { - /** - * {@inheritDoc} - */ -- public boolean disconnect(int vertex) { -- return inEdges.remove(vertex) | outEdges.remove(vertex); -+ public int disconnect(int vertex) { -+ int removed = 0; -+ if (inEdges.remove(vertex)) -+ removed++; -+ if (outEdges.remove(vertex)) -+ removed++; -+ return removed; - } - - /** -diff --git a/src/edu/ucla/sspace/graph/SparseDirectedTypedEdgeSet.java b/src/edu/ucla/sspace/graph/SparseDirectedTypedEdgeSet.java -index 2c6828a1..e267bddb 100644 ---- a/src/edu/ucla/sspace/graph/SparseDirectedTypedEdgeSet.java -+++ b/src/edu/ucla/sspace/graph/SparseDirectedTypedEdgeSet.java -@@ -59,9 +59,10 @@ - - - /** -- * An {@link EdgeSet} implementation that stores {@link TypedEdge} instances for -- * a vertex. This class provides additional methods beyond the {@code EdgeSet} -- * interface for interacting with edges on the basis of their type. -+ * An {@link EdgeSet} implementation that stores {@link DirectedTypedEdge} -+ * instances for a vertex. This class provides additional methods beyond the -+ * {@code EdgeSet} interface for interacting with edges on the basis of their -+ * type and their orientation. - */ - public class SparseDirectedTypedEdgeSet - extends AbstractSet> -@@ -305,6 +306,8 @@ public SparseDirectedTypedEdgeSet copy(IntSet vertices) { - * reasons. I was hoping the DirectedMultigraph.copy() could be sped up by - * copying the raw data faster than the Edge-based data, but this - * implementation actually slows down DirectedMultigraph.copy() by 100X. -+ * It's being left in as a future study on how to fix it to speed up the -+ * copy operation. - */ - - // public SparseDirectedTypedEdgeSet copy(Set vertices) { -@@ -340,19 +343,28 @@ public SparseDirectedTypedEdgeSet copy(IntSet vertices) { - // } - - /** -- * Removes all edges to {@code v}. -+ * {@inheritDoc} - */ -- public boolean disconnect(int v) { -+ public int disconnect(int v) { - if (connected.remove(v)) { -+ int removed = 0; - BitSet b = inEdges.remove(v); -- if (b != null) -- size -= b.cardinality(); -+ if (b != null) { -+ int edges = b.cardinality(); -+ size -= edges; -+ removed += edges; -+ } - b = outEdges.remove(v); -- if (b != null) -- size -= b.cardinality(); -- return true; -+ if (b != null) { -+ int edges = b.cardinality(); -+ size -= edges; -+ removed += edges; -+ } -+ assert removed > 0 : -+ ""connected removed an edge that wasn't listed elsewhere""; -+ return removed; - } -- return false; -+ return 0; - } - - /** -diff --git a/src/edu/ucla/sspace/graph/SparseSymmetricEdgeSet.java b/src/edu/ucla/sspace/graph/SparseSymmetricEdgeSet.java -deleted file mode 100755 -index 82a5e461..00000000 ---- a/src/edu/ucla/sspace/graph/SparseSymmetricEdgeSet.java -+++ /dev/null -@@ -1,175 +0,0 @@ --/* -- * Copyright 2011 David Jurgens -- * -- * This file is part of the S-Space package and is covered under the terms and -- * conditions therein. -- * -- * The S-Space package is free software: you can redistribute it and/or modify -- * it under the terms of the GNU General Public License version 2 as published -- * by the Free Software Foundation and distributed hereunder to you. -- * -- * THIS SOFTWARE IS PROVIDED ""AS IS"" AND NO REPRESENTATIONS OR WARRANTIES, -- * EXPRESS OR IMPLIED ARE MADE. BY WAY OF EXAMPLE, BUT NOT LIMITATION, WE MAKE -- * NO REPRESENTATIONS OR WARRANTIES OF MERCHANT- ABILITY OR FITNESS FOR ANY -- * PARTICULAR PURPOSE OR THAT THE USE OF THE LICENSED SOFTWARE OR DOCUMENTATION -- * WILL NOT INFRINGE ANY THIRD PARTY PATENTS, COPYRIGHTS, TRADEMARKS OR OTHER -- * RIGHTS. -- * -- * You should have received a copy of the GNU General Public License -- * along with this program. If not, see . -- */ -- --package edu.ucla.sspace.graph; -- --import java.util.AbstractSet; --import java.util.Collections; --import java.util.Iterator; --import java.util.Set; -- --import edu.ucla.sspace.util.OpenIntSet; -- -- --/** -- * -- */ --public class SparseSymmetricEdgeSet { -- -- private final int rootVertex; -- -- private final OpenIntSet edges; -- -- public SparseSymmetricEdgeSet(int rootVertex) { -- this.rootVertex = rootVertex; -- edges = new OpenIntSet(); -- } -- -- /** -- * Adds the edge to this set if one of the vertices is the root vertex and -- * if the non-root vertex has a greater index that this vertex. -- */ -- public boolean add(Edge e) { -- int toAdd = -1; -- if (e.from() == rootVertex) { -- toAdd = e.to(); -- } -- else { -- if (e.to() != rootVertex) -- return false; -- // else e.to() == rootVertex -- toAdd = e.from(); -- } -- -- // Only add the vertex if the index for it is greated than this vertex. -- // In a set of EdgeSets, this ensure that for two indices i,j only one -- // edge is ever present -- if (rootVertex < toAdd) -- return edges.add(toAdd); -- return false; -- } -- -- /** -- * {@inheritDoc} -- */ -- public Set connected() { -- // REMINDER: wrap to prevent adding self edges? -- return edges; -- } -- -- /** -- * {@inheritDoc} -- */ -- public boolean connects(int vertex) { -- return edges.contains(vertex); -- } -- -- /** -- * {@inheritDoc} -- */ -- public boolean contains(Object o) { -- if (o instanceof Edge) { -- Edge e = (Edge)o; -- if (e.to() == rootVertex) -- return e.from() > rootVertex && edges.contains(e.from()); -- else -- return e.from() == rootVertex && edges.contains(e.to()); -- } -- return false; -- } -- -- /** -- * {@inheritDoc} -- */ -- public boolean disconnect(int vertex) { -- return edges.remove(vertex); -- } -- -- /** -- * {@inheritDoc} -- */ -- public Set getEdges(int vertex) { -- return (edges.contains(vertex)) -- ? Collections.singleton(new SimpleEdge(rootVertex, vertex)) -- : null; -- } -- -- /** -- * {@inheritDoc} -- */ -- public int getRoot() { -- return rootVertex; -- } -- -- /** -- * {@inheritDoc} -- */ -- public Iterator iterator() { -- return new EdgeIterator(); -- } -- -- /** -- * {@inheritDoc} -- */ -- public boolean remove(Object o) { -- if (o instanceof Edge) { -- Edge e = (Edge)o; -- if (e.to() == rootVertex) -- return e.from() > rootVertex && edges.remove(e.from()); -- else -- return e.from() == rootVertex && edges.remove(e.to()); -- } -- return false; -- } -- -- /** -- * {@inheritDoc} -- */ -- public int size() { -- return edges.size(); -- } -- -- /** -- * An iterator over the edges in this set that constructs {@link Edge} -- * instances as it traverses through the set of connected vertices. -- */ -- private class EdgeIterator implements Iterator { -- -- private Iterator otherVertices; -- -- public EdgeIterator() { -- otherVertices = edges.iterator(); -- } -- -- public boolean hasNext() { -- return otherVertices.hasNext(); -- } -- -- public Edge next() { -- Integer i = otherVertices.next(); -- return new SimpleEdge(rootVertex, i); -- } -- -- public void remove() { -- otherVertices.remove(); -- } -- } --} -\ No newline at end of file -diff --git a/src/edu/ucla/sspace/graph/SparseTypedEdgeSet.java b/src/edu/ucla/sspace/graph/SparseTypedEdgeSet.java -index 56fc0271..1dffc854 100644 ---- a/src/edu/ucla/sspace/graph/SparseTypedEdgeSet.java -+++ b/src/edu/ucla/sspace/graph/SparseTypedEdgeSet.java -@@ -278,15 +278,16 @@ public SparseTypedEdgeSet copy(IntSet vertices) { - } - - /** -- * Removes all edges to {@code v}. -+ * {@inheritDoc} - */ -- public boolean disconnect(int v) { -+ public int disconnect(int v) { - BitSet b = edges.remove(v); - if (b != null) { -- size -= b.cardinality(); -- return true; -+ int edges = b.cardinality(); -+ size -= edges; -+ return edges; - } -- return false; -+ return 0; - } - - /** -@@ -518,7 +519,10 @@ else if (curIndex != oldIndex) { - } - } - -- -+ /** -+ * A utility class for exposing the objects for types of the edges in this -+ * set, which are otherwise represented as bits. -+ */ - private class Types extends AbstractSet { - - public boolean contains(Object o) { -diff --git a/src/edu/ucla/sspace/graph/SparseUndirectedEdgeSet.java b/src/edu/ucla/sspace/graph/SparseUndirectedEdgeSet.java -index 67c991c6..705fc71f 100644 ---- a/src/edu/ucla/sspace/graph/SparseUndirectedEdgeSet.java -+++ b/src/edu/ucla/sspace/graph/SparseUndirectedEdgeSet.java -@@ -41,7 +41,9 @@ - * A {@link EdgeSet} implementation for holding {@link Edge} instances. - */ - public class SparseUndirectedEdgeSet extends AbstractSet -- implements EdgeSet { -+ implements EdgeSet, java.io.Serializable { -+ -+ private static final long serialVersionUID = 1L; - - /** - * The vertex that is connected to all the edges in this set -@@ -135,8 +137,8 @@ public SparseUndirectedEdgeSet copy(IntSet vertices) { - /** - * {@inheritDoc} - */ -- public boolean disconnect(int vertex) { -- return edges.remove(vertex); -+ public int disconnect(int vertex) { -+ return (edges.remove(vertex)) ? 1 : 0; - } - - /** -diff --git a/src/edu/ucla/sspace/graph/SparseWeightedEdgeSet.java b/src/edu/ucla/sspace/graph/SparseWeightedEdgeSet.java -index 448a0cec..bb022d0c 100644 ---- a/src/edu/ucla/sspace/graph/SparseWeightedEdgeSet.java -+++ b/src/edu/ucla/sspace/graph/SparseWeightedEdgeSet.java -@@ -49,7 +49,7 @@ - * two vertices will only have at most one edge between them. If an edge exists - * for vertices {@code i} and {@code j} with weight {@code w}1, then - * adding a new edge to the same vertices with weight {@code w}2 will -- * not add a parallel edge and increase the set of this set, even though the -+ * not add a parallel edge and increase the size of this set, even though the - * edges are not equal. Rather, the weight on the edge between the two vertices - * is changed to {@code w}2. Similarly, any contains or removal - * operation will return its value based on the {@code WeightedEdge}'s vertices -@@ -176,12 +176,12 @@ public SparseWeightedEdgeSet copy(IntSet vertices) { - /** - * {@inheritDoc} - */ -- public boolean disconnect(int vertex) { -+ public int disconnect(int vertex) { - if (edges.containsKey(vertex)) { - edges.remove(vertex); -- return true; -+ return 1; - } -- return false; -+ return 0; - } - - /** -diff --git a/src/edu/ucla/sspace/graph/UndirectedMultigraph.java b/src/edu/ucla/sspace/graph/UndirectedMultigraph.java -index 90983a6c..f74e9b63 100644 ---- a/src/edu/ucla/sspace/graph/UndirectedMultigraph.java -+++ b/src/edu/ucla/sspace/graph/UndirectedMultigraph.java -@@ -53,8 +53,9 @@ - import gnu.trove.iterator.TIntIterator; - import gnu.trove.iterator.TIntObjectIterator; - import gnu.trove.map.TIntObjectMap; -+import gnu.trove.map.TObjectIntMap; - import gnu.trove.map.hash.TIntObjectHashMap; --import gnu.trove.procedure.TObjectProcedure; -+import gnu.trove.map.hash.TObjectIntHashMap; - import gnu.trove.set.TIntSet; - import gnu.trove.set.hash.TIntHashSet; - -@@ -73,9 +74,9 @@ public class UndirectedMultigraph - private static final long serialVersionUID = 1L; - - /** -- * The set of types contained in this graph. -+ * The count of the type distribution for all edges in the graph. - */ -- private final Set types; -+ private final TObjectIntMap typeCounts; - - /** - * The set of vertices in this mutligraph. This set is maintained -@@ -105,7 +106,7 @@ public class UndirectedMultigraph - * Creates an empty graph with node edges - */ - public UndirectedMultigraph() { -- types = new HashSet(); -+ typeCounts = new TObjectIntHashMap(); - vertexToEdges = new TIntObjectHashMap>(); - subgraphs = new ArrayList>(); - size = 0; -@@ -144,7 +145,7 @@ public boolean add(TypedEdge e) { - vertexToEdges.put(e.from(), from); - } - if (from.add(e)) { -- types.add(e.edgeType()); -+ updateTypeCounts(e.edgeType(), 1); - SparseTypedEdgeSet to = vertexToEdges.get(e.to()); - if (to == null) { - to = new SparseTypedEdgeSet(e.to()); -@@ -162,7 +163,7 @@ public boolean add(TypedEdge e) { - */ - public void clear() { - vertexToEdges.clear(); -- types.clear(); -+ typeCounts.clear(); - size = 0; - } - -@@ -231,8 +232,6 @@ public UndirectedMultigraph copy(Set toCopy) { - if (!vertexToEdges.containsKey(v)) - throw new IllegalArgumentException( - ""Request copy of non-present vertex: "" + v); --// SparseTypedEdgeSet edges = vertexToEdges.get(v); --// g.vertexToEdges.put(v, edges.copy(toCopy)); - - g.add(v); - SparseTypedEdgeSet edges = vertexToEdges.get(v); -@@ -246,8 +245,6 @@ public UndirectedMultigraph copy(Set toCopy) { - g.add(e); - } - } --// if (toCopy.size() > 0) --// System.out.printf(""Copy %d vertices (%d), %d edges%n"", g.order(), toCopy.size(), g.size()); - return g; - } - -@@ -283,7 +280,7 @@ public Set> edges(T t) { - * Returns the set of edge types currently present in this graph. - */ - public Set edgeTypes() { -- return Collections.unmodifiableSet(types); -+ return Collections.unmodifiableSet(typeCounts.keySet()); - } - - /** -@@ -292,7 +289,7 @@ public Set edgeTypes() { - @Override public boolean equals(Object o) { - if (o instanceof UndirectedMultigraph) { - UndirectedMultigraph dm = (UndirectedMultigraph)(o); -- if (dm.types.equals(types)) { -+ if (dm.typeCounts.equals(typeCounts)) { - return vertexToEdges.equals(dm.vertexToEdges); - } - return false; -@@ -300,7 +297,7 @@ public Set edgeTypes() { - else if (o instanceof Multigraph) { - @SuppressWarnings(""unchecked"") - Multigraph> m = (Multigraph>)o; -- if (m.edgeTypes().equals(types)) { -+ if (m.edgeTypes().equals(typeCounts.keySet())) { - return m.order() == order() - && m.size() == size() - && m.vertices().equals(vertices()) -@@ -371,7 +368,8 @@ public boolean hasCycles() { - * {@inheritDoc} - */ - public int hashCode() { -- return vertexToEdges.keySet().hashCode() ^ (types.hashCode() * size); -+ return vertexToEdges.keySet().hashCode() ^ -+ (typeCounts.keySet().hashCode() * size); - } - - /** -@@ -397,6 +395,8 @@ public boolean remove(int vertex) { - // Check whether removing this vertex has caused us to remove - // the last edge for this type in the graph. If so, the graph - // no longer has this type and we need to update the state. -+ for (TypedEdge e : edges) -+ updateTypeCounts(e.edgeType(), -1); - - // Update any of the subgraphs that had this vertex to notify them - // that it was removed -@@ -416,7 +416,7 @@ public boolean remove(int vertex) { - if (s.vertexSubset.remove(vertex)) { - Iterator subgraphTypesIter = s.validTypes.iterator(); - while (subgraphTypesIter.hasNext()) { -- if (!types.contains(subgraphTypesIter.next())) -+ if (!typeCounts.containsKey(subgraphTypesIter.next())) - subgraphTypesIter.remove(); - } - } -@@ -434,26 +434,27 @@ public boolean remove(TypedEdge edge) { - if (edges != null && edges.remove(edge)) { - vertexToEdges.get(edge.from()).remove(edge); - size--; -+ - // Check whether we've just removed the last edge for this type - // in the graph. If so, the graph no longer has this type and - // we need to update the state. -- -- // TODO !! -- -- -- // Remove this edge type from all the subgraphs as well -- Iterator> sIt = subgraphs.iterator(); -- while (sIt.hasNext()) { -- WeakReference ref = sIt.next(); -- Subgraph s = ref.get(); -- // Check whether this subgraph was already gc'd (the -- // subgraph was no longer in use) and if so, remove the -- // ref from the list to avoid iterating over it again -- if (s == null) { -- sIt.remove(); -- continue; -+ updateTypeCounts(edge.edgeType(), -1); -+ -+ if (!typeCounts.containsKey(edge.edgeType())) { -+ // Remove this edge type from all the subgraphs as well -+ Iterator> sIt = subgraphs.iterator(); -+ while (sIt.hasNext()) { -+ WeakReference ref = sIt.next(); -+ Subgraph s = ref.get(); -+ // Check whether this subgraph was already gc'd (the -+ // subgraph was no longer in use) and if so, remove the -+ // ref from the list to avoid iterating over it again -+ if (s == null) { -+ sIt.remove(); -+ continue; -+ } -+ s.validTypes.remove(edge.edgeType()); - } -- // FILL IN... - } - - return true; -@@ -466,26 +467,13 @@ public boolean remove(TypedEdge edge) { - */ - public int size() { - return size; --// CountingProcedure count = new CountingProcedure(); --// vertexToEdges.forEachValue(count); --// return count.count / 2; -- } -- -- private class CountingProcedure -- implements TObjectProcedure> { -- -- int count = 0; -- public boolean execute(SparseTypedEdgeSet edges) { -- count += edges.size(); -- return true; -- } - } - - /** - * {@inheritDoc} - */ - public UndirectedMultigraph subgraph(Set subset) { -- Subgraph sub = new Subgraph(types, subset); -+ Subgraph sub = new Subgraph(typeCounts.keySet(), subset); - subgraphs.add(new WeakReference(sub)); - return sub; - } -@@ -496,7 +484,7 @@ public UndirectedMultigraph subgraph(Set subset) { - public UndirectedMultigraph subgraph(Set subset, Set edgeTypes) { - if (edgeTypes.isEmpty()) - throw new IllegalArgumentException(""Must specify at least one type""); -- if (!types.containsAll(edgeTypes)) { -+ if (!typeCounts.keySet().containsAll(edgeTypes)) { - throw new IllegalArgumentException( - ""Cannot create subgraph with more types than exist""); - } -@@ -513,11 +501,31 @@ public String toString() { - return ""{ vertices: "" + vertices() + "", edges: "" + edges() + ""}""; - } - -+ /** -+ * Updates how many edges have this type in the graph -+ */ -+ private void updateTypeCounts(T type, int delta) { -+ if (!typeCounts.containsKey(type)) { -+ assert delta > 0 -+ : ""removing edge type that was not originally present""; -+ typeCounts.put(type, delta); -+ } -+ else { -+ int curCount = typeCounts.get(type); -+ int newCount = curCount + delta; -+ assert newCount >= 0 -+ : ""removing edge type that was not originally present""; -+ if (newCount == 0) -+ typeCounts.remove(type); -+ else -+ typeCounts.put(type, newCount); -+ } -+ } -+ - /** - * {@inheritDoc} - */ - public IntSet vertices() { -- // TODO: make this unmodifiable - return PrimitiveCollections.unmodifiableSet( - TroveIntSet.wrap(vertexToEdges.keySet())); - } -@@ -551,8 +559,8 @@ public Iterator> iterator() { - public boolean remove(Object o) { - if (o instanceof TypedEdge) { - TypedEdge e = (TypedEdge)o; -- return UndirectedMultigraph.this.types. -- contains(e.edgeType()) -+ return UndirectedMultigraph.this.typeCounts. -+ containsKey(e.edgeType()) - && UndirectedMultigraph.this.remove((TypedEdge)o); - } - return false; -@@ -886,7 +894,8 @@ public boolean hasCycles() { - * {@inheritDoc} - */ - public int hashCode() { -- return vertices().hashCode() ^ (types.hashCode() * size()); -+ return vertices().hashCode() ^ -+ (validTypes.hashCode() * size()); - } - - /** -diff --git a/src/edu/ucla/sspace/mains/FixedDurationTemporalRandomIndexingMain.java b/src/edu/ucla/sspace/mains/FixedDurationTemporalRandomIndexingMain.java -index 53a3bf3e..8466e9a5 100644 ---- a/src/edu/ucla/sspace/mains/FixedDurationTemporalRandomIndexingMain.java -+++ b/src/edu/ucla/sspace/mains/FixedDurationTemporalRandomIndexingMain.java -@@ -27,7 +27,6 @@ - import edu.ucla.sspace.common.SemanticSpaceIO.SSpaceFormat; - import edu.ucla.sspace.common.Similarity; - import edu.ucla.sspace.common.Similarity.SimType; --import edu.ucla.sspace.common.WordComparator; - - import edu.ucla.sspace.ri.IndexVectorUtil; - -@@ -43,6 +42,8 @@ - - import edu.ucla.sspace.util.CombinedIterator; - import edu.ucla.sspace.util.MultiMap; -+import edu.ucla.sspace.util.NearestNeighborFinder; -+import edu.ucla.sspace.util.SimpleNearestNeighborFinder; - import edu.ucla.sspace.util.SortedMultiMap; - import edu.ucla.sspace.util.TimeSpan; - import edu.ucla.sspace.util.TreeMultiMap; -@@ -172,12 +173,6 @@ public class FixedDurationTemporalRandomIndexingMain { - */ - private boolean printShiftRankings; - -- /** -- * The word comparator used for computing similarity scores when calculating -- * the semantic shift. -- */ -- private WordComparator wordComparator; -- - /** - * A mapping from each word to the vectors that account for its temporal - * semantics according to the specified time span -@@ -368,9 +363,6 @@ public void run(String[] args) throws Exception { - numThreads = argOptions.getIntOption(""threads""); - } - -- // initialize the word comparator based on the number of threads -- wordComparator = new WordComparator(numThreads); -- - overwrite = true; - if (argOptions.hasOption(""overwrite"")) { - overwrite = argOptions.getBooleanOption(""overwrite""); -@@ -680,12 +672,13 @@ private void printWordNeighbors(String dateString, - LOGGER.info(""printing the most similar words for the semantic partition"" + - "" starting at: "" + dateString); - -+ NearestNeighborFinder nnf = -+ new SimpleNearestNeighborFinder(semanticPartition); -+ - // generate the similarity lists - for (String toExamine : interestingWords) { - SortedMultiMap mostSimilar = -- wordComparator.getMostSimilar(toExamine, semanticPartition, -- interestingWordNeighbors, -- Similarity.SimType.COSINE); -+ nnf.getMostSimilar(toExamine, interestingWordNeighbors); - - if (mostSimilar != null) { - File neighborFile = -diff --git a/src/edu/ucla/sspace/mains/LexSubWordsiMain.java b/src/edu/ucla/sspace/mains/LexSubWordsiMain.java -index 262ac826..62d0a3a0 100644 ---- a/src/edu/ucla/sspace/mains/LexSubWordsiMain.java -+++ b/src/edu/ucla/sspace/mains/LexSubWordsiMain.java -@@ -6,7 +6,6 @@ - import edu.ucla.sspace.common.Similarity; - import edu.ucla.sspace.common.Similarity.SimType; - import edu.ucla.sspace.common.StaticSemanticSpace; --import edu.ucla.sspace.common.WordComparator; - - import edu.ucla.sspace.hal.LinearWeighting; - -@@ -15,7 +14,9 @@ - import edu.ucla.sspace.text.corpora.SemEvalLexSubReader; - - import edu.ucla.sspace.util.MultiMap; -+import edu.ucla.sspace.util.NearestNeighborFinder; - import edu.ucla.sspace.util.SerializableUtil; -+import edu.ucla.sspace.util.SimpleNearestNeighborFinder; - - import edu.ucla.sspace.vector.SparseDoubleVector; - import edu.ucla.sspace.vector.Vector; -@@ -59,7 +60,7 @@ public static void main(String[] args) { - } - - public static class LexSubWordsi implements Wordsi { -- private final WordComparator comparator; -+ private final NearestNeighborFinder comparator; - - private final PrintWriter output; - -@@ -67,9 +68,9 @@ public static class LexSubWordsi implements Wordsi { - - public LexSubWordsi(String outFile, String sspaceFile) { - try { -- comparator = new WordComparator(); - output = new PrintWriter(outFile); - wordsiSpace = new StaticSemanticSpace(sspaceFile); -+ comparator = new SimpleNearestNeighborFinder(wordsiSpace); - } catch (IOException ioe) { - throw new IOError(ioe); - } -@@ -86,10 +87,9 @@ public void handleContextVector(String focus, - System.err.printf(""Processing %s\n"", secondary); - String bestSense = getBaseSense(focus, vector); - if (bestSense == null) -- return; -- -+ return; - MultiMap topWords = comparator.getMostSimilar( -- bestSense, wordsiSpace, 10, SimType.COSINE); -+ bestSense, 10); - output.printf(""%s ::"", secondary); - for (String term : topWords.values()) - output.printf("" %s"", term); -diff --git a/src/edu/ucla/sspace/text/LabeledParsedStringDocument.java b/src/edu/ucla/sspace/text/LabeledParsedStringDocument.java -index 2fe2ac90..acd25992 100644 ---- a/src/edu/ucla/sspace/text/LabeledParsedStringDocument.java -+++ b/src/edu/ucla/sspace/text/LabeledParsedStringDocument.java -@@ -64,6 +64,21 @@ public DependencyTreeNode[] parsedDocument() { - * {@inheritDoc} - */ - public String text() { -+ DependencyTreeNode[] nodes = parsedDocument(); -+ StringBuilder sb = new StringBuilder(nodes.length * 8); -+ for (int i = 0; i < nodes.length; ++i) { -+ String token = nodes[i].word(); -+ sb.append(token); -+ if (i+1 < nodes.length) -+ sb.append(' '); -+ } -+ return sb.toString(); -+ } -+ -+ /** -+ * {@inheritDoc} -+ */ -+ public String prettyPrintText() { - Pattern punctuation = Pattern.compile(""[!,-.:;?`]""); - DependencyTreeNode[] nodes = parsedDocument(); - StringBuilder sb = new StringBuilder(nodes.length * 8); -diff --git a/src/edu/ucla/sspace/text/ParsedDocument.java b/src/edu/ucla/sspace/text/ParsedDocument.java -index e5406df0..fcc9b566 100644 ---- a/src/edu/ucla/sspace/text/ParsedDocument.java -+++ b/src/edu/ucla/sspace/text/ParsedDocument.java -@@ -1,5 +1,5 @@ - /* -- * Copyright 2011 David Jurgens -+ * Copyright 2012 David Jurgens - * - * This file is part of the S-Space package and is covered under the terms and - * conditions therein. -@@ -40,7 +40,18 @@ public interface ParsedDocument extends Document { - - /** - * Returns the text of the parsed document without any of the -- * parsing-related annotation. -+ * parsing-related annotation, with each parsed token separated by -+ * whitespace. - */ - String text(); -+ -+ /** -+ * Returns a pretty-printed version of the document's text without any of -+ * the parsing-related annotation and using heuristics to appropriately -+ * space punctuation, quotes, and contractions. This methods is intended as -+ * only a useful way to displaying the document's text in a more readable -+ * format than {@link #text()}, but makes no claims as to reproducing the -+ * original surface form of the document prior to parsing. -+ */ -+ String prettyPrintText(); - } -diff --git a/src/edu/ucla/sspace/text/PukWaCDocumentIterator.java b/src/edu/ucla/sspace/text/PukWaCDocumentIterator.java -index 34e37664..7037e315 100644 ---- a/src/edu/ucla/sspace/text/PukWaCDocumentIterator.java -+++ b/src/edu/ucla/sspace/text/PukWaCDocumentIterator.java -@@ -1,5 +1,5 @@ - /* -- * Copyright 2011 David Jurgens -+ * Copyright 2012 David Jurgens - * - * This file is part of the S-Space package and is covered under the terms and - * conditions therein. -@@ -36,12 +36,14 @@ - * An iterator implementation that returns {@link Document} containg a single - * dependency parsed sentence given a file in the CoNLL Format which -- * is contained in the XML format provided in the WaCkypedia corpus. -+ * is contained in the XML format provided in the WaCkypedia corpus. See the WaCky group's -+ * website for more information on the PukWaC. - */ - public class PukWaCDocumentIterator implements Iterator { - - /** -- * The extractor used to build trees from the pukWaC documents -+ * The extractor used to build trees from the PukWaC documents - */ - private static final DependencyExtractor extractor = - new WaCKyDependencyExtractor(); -@@ -65,7 +67,7 @@ public class PukWaCDocumentIterator implements Iterator { - * Creates an {@code Iterator} over the file where each document returned - * contains the sequence of dependency parsed words composing a sentence.. - * -- * @param documentsFile the name of the pukWaC file containing dependency -+ * @param documentsFile the name of the PukWaC file containing dependency - * parsed sentences in the CoNLL - * Format separated by XML tags for the sentences and articles -@@ -100,7 +102,7 @@ else if (line.equals("""")) { - while ((line = documentsReader.readLine()) != null - && !line.equals("""")) { - -- // Unfortunately, the XML of the pukWaC is broken and some -+ // Unfortunately, the XML of the PukWaC is broken and some - // elements are inside the elements, so this code - // is needed to avoid putting those inside the CONLL data - if (line.contains(""CoNLL Format -- * -- *

-- * -- * This class is thread-safe. -+ * An iterator implementation that returns {@link Document} instances labled -+ * with the source URL from which its text was obtained, as specified in the -+ * ukWaC. See the WaCky group's -+ * website for more information on the ukWaC. - */ - public class UkWaCDocumentIterator implements Iterator { - -diff --git a/src/edu/ucla/sspace/tools/NearestNeighborFinderTool.java b/src/edu/ucla/sspace/tools/NearestNeighborFinderTool.java -index 756b24d2..cb48efa0 100644 ---- a/src/edu/ucla/sspace/tools/NearestNeighborFinderTool.java -+++ b/src/edu/ucla/sspace/tools/NearestNeighborFinderTool.java -@@ -28,6 +28,7 @@ - import edu.ucla.sspace.util.LoggerUtil; - import edu.ucla.sspace.util.MultiMap; - import edu.ucla.sspace.util.NearestNeighborFinder; -+import edu.ucla.sspace.util.PartitioningNearestNeighborFinder; - import edu.ucla.sspace.util.SerializableUtil; - - import java.io.File; -@@ -93,27 +94,29 @@ public static void main(String[] args) { - SemanticSpaceIO.load(options.getStringOption('C')); - int numWords = sspace.getWords().size(); - // See how many principle vectors to create -- int principleVectors = -1; -+ int numPrincipleVectors = -1; - if (options.hasOption('p')) { -- principleVectors = options.getIntOption('p'); -- if (principleVectors > numWords) { -+ numPrincipleVectors = options.getIntOption('p'); -+ if (numPrincipleVectors > numWords) { - throw new IllegalArgumentException( - ""Cannot have more principle vectors than "" + -- ""word vectors: "" + principleVectors); -+ ""word vectors: "" + numPrincipleVectors); - } -- else if (principleVectors < 1) { -+ else if (numPrincipleVectors < 1) { - throw new IllegalArgumentException( - ""Must have at least one principle vector""); - } - - } - else { -- principleVectors = -+ numPrincipleVectors = - Math.min((int)(Math.ceil(Math.log(numWords))), 1000); - System.err.printf(""Choosing a heuristically selected %d "" + -- ""principle vectors%n"", principleVectors); -+ ""principle vectors%n"", -+ numPrincipleVectors); - } -- nnf = new NearestNeighborFinder(sspace, principleVectors); -+ nnf = new PartitioningNearestNeighborFinder( -+ sspace, numPrincipleVectors); - } catch (IOException ioe) { - throw new IOError(ioe); - } -diff --git a/src/edu/ucla/sspace/tools/SemanticSpaceExplorer.java b/src/edu/ucla/sspace/tools/SemanticSpaceExplorer.java -index b22050f1..c7f697f1 100644 ---- a/src/edu/ucla/sspace/tools/SemanticSpaceExplorer.java -+++ b/src/edu/ucla/sspace/tools/SemanticSpaceExplorer.java -@@ -26,10 +26,12 @@ - import edu.ucla.sspace.common.SemanticSpace; - import edu.ucla.sspace.common.SemanticSpaceIO; - import edu.ucla.sspace.common.Similarity; --import edu.ucla.sspace.common.WordComparator; - - import edu.ucla.sspace.text.WordIterator; - -+import edu.ucla.sspace.util.NearestNeighborFinder; -+import edu.ucla.sspace.util.PartitioningNearestNeighborFinder; -+ - import edu.ucla.sspace.vector.SparseVector; - import edu.ucla.sspace.vector.Vector; - import edu.ucla.sspace.vector.VectorIO; -@@ -102,12 +104,6 @@ private enum Command { - } - } - -- /** -- * The comparator to be used when identifying the nearest neighbors to words -- * in a semantic space. -- */ -- private final WordComparator wordComparator; +- } - - /** - * The mapping from file name to the {@code SemanticSpace} that was loaded - * from that file. -@@ -125,11 +121,16 @@ private enum Command { - */ - private SemanticSpace current; - -+ /** -+ * The {@code NearestNeighborFinder} for the current {@code SemanticSpace} -+ * or {@code null} if the nearest terms have yet to be searched for. -+ */ -+ private NearestNeighborFinder currentNnf; -+ - /** - * Constructs an instance of {@code SemanticSpaceExplorer}. - */ -- private SemanticSpaceExplorer() { -- this.wordComparator = new WordComparator(); -+ private SemanticSpaceExplorer() { - fileNameToSSpace = new LinkedHashMap(); - aliasToFileName = new HashMap(); - current = null; -@@ -240,6 +241,7 @@ private boolean execute(Iterator commandTokens, PrintStream out) { - } - fileNameToSSpace.put(sspaceFileName, sspace); - current = sspace; -+ currentNnf = null; - break; - } - -@@ -319,31 +321,16 @@ private boolean execute(Iterator commandTokens, PrintStream out) { - return false; - } - } -- Similarity.SimType simType = Similarity.SimType.COSINE; -- if (commandTokens.hasNext()) { -- // Upper case since it's an enum -- String simTypeStr = commandTokens.next().toUpperCase(); -- try { -- simType = Similarity.SimType.valueOf(simTypeStr); -- } catch (IllegalArgumentException iae) { -- // See if the user provided a prefix of the similarity -- // measure's name -- for (Similarity.SimType t : Similarity.SimType.values()) -- if (t.name().startsWith(simTypeStr)) -- simType = t; -- // If no prefix was found, report an error -- if (simType == null) { -- out.println(""invalid similarity measure: "" +simTypeStr); -- return false; -- } -- } -- } - -+ // If this is the first time the nearest neighbors have been -+ // searched for, construct a new NNF -+ if (currentNnf == null) -+ currentNnf = new PartitioningNearestNeighborFinder(current); -+ - // Using the provided or default arguments find the closest - // neighbors to the target word in the current semantic space - SortedMultiMap mostSimilar = -- wordComparator.getMostSimilar(focusWord, current, neighbors, -- simType); -+ currentNnf.getMostSimilar(focusWord, neighbors); - - if (mostSimilar == null) { - out.println(focusWord + -@@ -375,7 +362,7 @@ private boolean execute(Iterator commandTokens, PrintStream out) { - out.println(""missing word argument""); - return false; - } -- String word2 = commandTokens.next(); -+ String word2 = commandTokens.next(); - - Similarity.SimType simType = Similarity.SimType.COSINE; - if (commandTokens.hasNext()) { -diff --git a/src/edu/ucla/sspace/tools/SimilarityListGenerator.java b/src/edu/ucla/sspace/tools/SimilarityListGenerator.java -index a0dbb5fd..9703a36b 100644 ---- a/src/edu/ucla/sspace/tools/SimilarityListGenerator.java -+++ b/src/edu/ucla/sspace/tools/SimilarityListGenerator.java -@@ -26,9 +26,10 @@ - import edu.ucla.sspace.common.Similarity.SimType; - import edu.ucla.sspace.common.SemanticSpace; - import edu.ucla.sspace.common.SemanticSpaceIO; --import edu.ucla.sspace.common.WordComparator; - - import edu.ucla.sspace.util.BoundedSortedMap; -+import edu.ucla.sspace.util.NearestNeighborFinder; -+import edu.ucla.sspace.util.PartitioningNearestNeighborFinder; - import edu.ucla.sspace.util.Pair; - import edu.ucla.sspace.util.SortedMultiMap; - -@@ -77,9 +78,10 @@ private void addOptions() { - ""whether to print the similarity score "" + - ""(default: false)"", - false, null, ""Program Options""); -- argOptions.addOption('s', ""similarityFunction"", -- ""name of a similarity function (default: cosine)"", -- true, ""String"", ""Program Options""); -+ // dj: current unsupported; will be next release -+ // argOptions.addOption('s', ""similarityFunction"", -+ // ""name of a similarity function (default: cosine)"", -+ // true, ""String"", ""Program Options""); - argOptions.addOption('n', ""numSimilar"", ""the number of similar words "" + - ""to print (default: 10)"", true, ""String"", - ""Program Options""); -@@ -153,12 +155,13 @@ public void run(String[] args) throws Exception { - // load the behavior options - final boolean printSimilarity = argOptions.hasOption('p'); - -- String similarityTypeName = (argOptions.hasOption('s')) -- ? argOptions.getStringOption('s').toUpperCase() : ""COSINE""; -+ // dj: setting the similarity type is currently unsupported but will be -+ // in the next release - -- SimType similarityType = SimType.valueOf(similarityTypeName); -- -- LOGGER.fine(""using similarity measure: "" + similarityType); -+ // String similarityTypeName = (argOptions.hasOption('s')) -+ // ? argOptions.getStringOption('s').toUpperCase() : ""COSINE""; -+ // SimType similarityType = SimType.valueOf(similarityTypeName); -+ // LOGGER.fine(""using similarity measure: "" + similarityType); - - final int numSimilar = (argOptions.hasOption('n')) - ? argOptions.getIntOption('n') : 10; -@@ -175,14 +178,14 @@ public void run(String[] args) throws Exception { - final PrintWriter outputWriter = new PrintWriter(output); - - final Set words = sspace.getWords(); -- WordComparator comparator = new WordComparator(numThreads); -+ NearestNeighborFinder nnf = -+ new PartitioningNearestNeighborFinder(sspace); - - - for (String word : words) { - // compute the k most-similar words to this word - SortedMultiMap mostSimilar = -- comparator.getMostSimilar(word, sspace, numSimilar, -- similarityType); -+ nnf.getMostSimilar(word, numSimilar); - - // once processing has finished write the k most-similar words to - // the output file. -diff --git a/src/edu/ucla/sspace/util/HashIndexer.java b/src/edu/ucla/sspace/util/HashIndexer.java -index 28a90a79..97807ca6 100644 ---- a/src/edu/ucla/sspace/util/HashIndexer.java -+++ b/src/edu/ucla/sspace/util/HashIndexer.java -@@ -37,7 +37,7 @@ - - /** - * A utility class for mapping a set of objects to unique indices based on -- * object equality. The indices returned by this class will always being at -+ * object equality. The indices returned by this class will always begin at - * {@code 0}. - * - *

This implementation provides faster {@link #index(Object)} performance -diff --git a/src/edu/ucla/sspace/util/PairCounter.java b/src/edu/ucla/sspace/util/PairCounter.java -index 89b9f2d1..21591721 100644 ---- a/src/edu/ucla/sspace/util/PairCounter.java -+++ b/src/edu/ucla/sspace/util/PairCounter.java -@@ -253,7 +253,7 @@ public Set> items() { - } - - /** -- * Returns an interator over the pairs that have been counted thusfar and -+ * Returns an iterator over the pairs that have been counted thusfar and - * their respective counts. - */ - public Iterator,Integer>> iterator() { -diff --git a/src/edu/ucla/sspace/util/NearestNeighborFinder.java b/src/edu/ucla/sspace/util/PartitioningNearestNeighborFinder.java -similarity index 90% -rename from src/edu/ucla/sspace/util/NearestNeighborFinder.java -rename to src/edu/ucla/sspace/util/PartitioningNearestNeighborFinder.java -index c92c8bc4..be4db381 100644 ---- a/src/edu/ucla/sspace/util/NearestNeighborFinder.java -+++ b/src/edu/ucla/sspace/util/PartitioningNearestNeighborFinder.java -@@ -65,10 +65,12 @@ - * A class for finding the k-nearest neighbors of one or more words. The - * {@code NearestNeighborFinder} operates by generating a set of principle - * vectors that reflect average words in a {@link SemanticSpace} and then -- * identifying mapping each principle vector to the set of words to which it is -- * closest. Finding the nearest neighbor then entails finding the -- * k-closest principle vectors and comparing only their words, rather -- * than all the words in the space. This dramatically reduces the search space. -+ * mapping each principle vector to the set of words to which it is closest. -+ * Finding the nearest neighbor then entails finding the k-closest -+ * principle vectors and comparing only their words, rather than all the words -+ * in the space. This dramatically reduces the search space by partitioning the -+ * vectors of the {@code SemanticSpace} into smaller sets, not all of which need -+ * to be searched. - * - *

The number of principle vectors is typically far less than the total - * number of vectors in the {@code SemanticSpace}, but should be more than the -@@ -77,8 +79,14 @@ - * (|Sspace| / p))}, where {@code p} is the number of principle components, - * {@code k} is the number of nearest neighbors to be found, and {@code - * |Sspace|} is the size of the semantic space. -+ * -+ *

Instances of this class are also serializable. If the backing {@code -+ * SemanticSpace} is also serializable, the space will be saved. However, if -+ * the space is not serializable, its contents will be converted to a static -+ * version and saved as a copy. - */ --public class NearestNeighborFinder implements java.io.Serializable { -+public class PartitioningNearestNeighborFinder -+ implements NearestNeighborFinder, java.io.Serializable { - - private static final long serialVersionUID = 1L; - -@@ -86,7 +94,7 @@ public class NearestNeighborFinder implements java.io.Serializable { - * The logger to which clustering status updates will be written. - */ - private static final Logger LOGGER = -- Logger.getLogger(NearestNeighborFinder.class.getName()); -+ Logger.getLogger(PartitioningNearestNeighborFinder.class.getName()); - - /** - * The semantic space from which the principle vectors are derived -@@ -104,29 +112,41 @@ public class NearestNeighborFinder implements java.io.Serializable { - */ - private transient WorkQueue workQueue; - -+ /** -+ * Creates a new {@code NearestNeighborFinder} for the {@link -+ * SemanticSpace}, using log10(|words|) principle vectors to -+ * efficiently search for neighbors. -+ * -+ * @param sspace a semantic space to search -+ */ -+ public PartitioningNearestNeighborFinder(SemanticSpace sspace) { -+ this(sspace, (int)(Math.ceil(Math.log10(sspace.getWords().size())))); -+ } -+ - /** - * Creates a new {@code NearestNeighborFinder} for the {@link - * SemanticSpace}, using the specified number of principle vectors to - * efficiently search for neighbors. - * - * @param sspace a semantic space to search -- * @param principleVectors the number of principle vectors to use in -+ * @param numPrincipleVectors the number of principle vectors to use in - * representing the content of the space. - */ -- public NearestNeighborFinder(SemanticSpace sspace, int principleVectors) { -+ public PartitioningNearestNeighborFinder(SemanticSpace sspace, -+ int numPrincipleVectors) { - if (sspace == null) - throw new NullPointerException(); -- if (principleVectors > sspace.getWords().size()) -+ if (numPrincipleVectors > sspace.getWords().size()) - throw new IllegalArgumentException( - ""Cannot have more principle vectors than "" + -- ""word vectors: "" + principleVectors); -- else if (principleVectors < 1) -+ ""word vectors: "" + numPrincipleVectors); -+ else if (numPrincipleVectors < 1) - throw new IllegalArgumentException( - ""Must have at least one principle vector""); - this.sspace = sspace; - principleVectorToNearestTerms = new HashMultiMap(); - workQueue = new WorkQueue(); -- computePrincipleVectors(principleVectors); -+ computePrincipleVectors(numPrincipleVectors); - } - - /** -@@ -193,7 +213,7 @@ public void run() { - // which it is closest - for (int i = sta; i < end; ++i) { - DoubleVector v = termVectors.get(i); -- double highestSim = Double.NEGATIVE_INFINITY; -+ double highestSim = -Double.MAX_VALUE; - int pVec = -1; - for (int j = 0; j < principles.length; ++j) { - DoubleVector principle = principles[j]; -@@ -246,12 +266,7 @@ public void run() { - } - - /** -- * Finds the k most similar words in the semantic space according to -- * the cosine similarity, returning a mapping from their similarity to the -- * word itself. -- * -- * @return the most similar words, or {@code null} if the provided word was -- * not in the semantic space. -+ * {@inheritDoc} - */ - public SortedMultiMap getMostSimilar( - final String word, int numberOfSimilarWords) { -@@ -276,12 +291,7 @@ public SortedMultiMap getMostSimilar( - } - - /** -- * Finds the k most similar words in the semantic space according to -- * the cosine similarity, returning a mapping from their similarity to the -- * word itself. -- * -- * @return the most similar words, or {@code null} if none of the provided -- * word were not in the semantic space. -+ * {@inheritDoc} - */ - public SortedMultiMap getMostSimilar( - Set terms, int numberOfSimilarWords) { -@@ -323,13 +333,8 @@ public SortedMultiMap getMostSimilar( - return mostSim; - } - +- public String getSearchDebug() { +- StringBuilder b = new StringBuilder(); +- b.append(""total_frac="" + fracUpload() + ""\ntransport_frac="" + fracTransportUpload() +- + ""\ntorrent_avg="" + getAverageUploadPerRunningTorrent()); +- b.append(""\ncan forward="" + canForwardSearch()); +- b.append(""\ncan respond="" + canRespondToSearch()); +- +- b.append(""\n\nforwarded searches size="" + forwardedSearches.size() + "" canceled size="" +- + canceledSearches.size() + "" sent size="" + sentSearches.size()); +- b.append(""\nbloom: stored="" + recentSearches.getPrevFilterNumElements() +- + "" est false positives="" +- + (100 * recentSearches.getPrevFilterFalsePositiveEst() + ""%"")); +- b.append(""\nbloom blocked|sent curr="" + bloomSearchesBlockedCurr + ""|"" +- + bloomSearchesSentCurr + "" prev="" + bloomSearchesBlockedPrev + ""|"" +- + bloomSearchesSentPrev); +- b.append(""\n\n"" + debugChannelIdErrorSetupErrorStats.getDebugStats()); +- +- long sum = 0, now = System.currentTimeMillis(), count = 0; +- +- // Include per-friend queue stats +- lock.lock(); +- try { +- Map counts = new HashMap(); +- for (DelayedSearchQueueEntry e : delayedSearchQueue.queuedSearches.values()) { +- +- count++; +- sum += (now - e.insertionTime); +- +- String nick = e.source.getRemoteFriend().getNick(); +- if (counts.containsKey(nick) == false) { +- counts.put(nick, new MutableInteger()); +- } +- counts.get(nick).v++; +- } +- for (String nick : counts.keySet()) { +- b.append(""\n\t"" + nick + "" -> "" + counts.get(nick).v); +- } +- b.append(""\n\nQueue size: "" + delayedSearchQueue.queuedSearches.size()); +- } finally { +- lock.unlock(); +- } +- +- b.append(""\nAverage queued search delay: "" + (double)sum/(double)count); +- +- return b.toString(); +- } +- +- public List getSearchResult(int searchId) { +- return textSearchManager.getResults(searchId); +- } +- +- private boolean handleHashSearch(final FriendConnection source, final OSF2FHashSearch msg) { +- +- // second, we might actually have this data +- final byte[] infohash = filelistManager.getMetainfoHash(msg.getInfohashhash()); +- +- if (infohash != null) { +- DownloadManager dm = AzureusCoreImpl.getSingleton().getGlobalManager().getDownloadManager(new HashWrapper(infohash)); +- +- if (dm != null) { +- logger.fine(""found match: "" + new String(Base64.encode(infohash))); +- +- logger.fine(""found dm match, we have this stuff""); +- +- // check if the torrent allow osf2f search peers +- boolean allowed = OverlayTransport.checkOSF2FAllowed(dm.getDownloadState().getPeerSources(), dm.getDownloadState().getNetworks()); +- if (!allowed) { +- logger.warning(""got search match for torrent "" + ""that does not allow osf2f peers""); +- return true; +- } +- +- boolean completedOrDownloading = FileListManager.completedOrDownloading(dm); +- if (!completedOrDownloading) { +- return true; +- } +- +- // check if we have the capacity to respond +- if (canRespondToSearch() == false) { +- return false; +- } +- +- // yeah, we actually have this stuff and we have spare capacity +- // create an overlay transport +- final int newChannelId = random.nextInt(); +- final int transportFakePathId = random.nextInt(); +- // set the path id for the overlay transport for something +- // random (since otherwise all transports for this infohash will +- // get the same pathid, which will limit it to be only one. The +- // path id set in the channel setup message will be +- // deterministic. It is the responsibility of the source to +- // monitor for duplicate paths +- +- // set the path id to something that will persist between +- // searches, for example a deterministic random seeded with +- // the infohashhash +- final int pathID = randomnessManager.getDeterministicRandomInt((int) msg.getInfohashhash()); +- +- // get the delay for this overlaytranport, that is the latency +- // component of the delay +- final int overlayDelay = overlayManager.getLatencyDelayForInfohash(source.getRemoteFriend(), infohash); +- +- TimerTask task = new TimerTask() { +- @Override +- public void run() { +- try { +- /* +- * check if the search got canceled while we were +- * sleeping +- */ +- if (!isSearchCanceled(msg.getSearchID())) { +- final OSF2FHashSearchResp response = new OSF2FHashSearchResp(OSF2FMessage.CURRENT_VERSION, msg.getSearchID(), newChannelId, pathID); +- +- final OverlayTransport transp = new OverlayTransport(source, newChannelId, infohash, transportFakePathId, false, overlayDelay); +- // register it with the friendConnection +- source.registerOverlayTransport(transp); +- // send the channel setup message +- source.sendChannelSetup(response, false); +- } +- } catch (OverlayRegistrationError e) { +- Debug.out(""got an error when registering incoming transport to '"" + source.getRemoteFriend().getNick() + ""': "" + e.message); +- } +- } +- +- }; +- delayedExecutor.queue(overlayDelay, task); +- +- // we are still forwarding if there are files in the torrent +- // that we chose not to download +- DiskManagerFileInfo[] diskManagerFileInfo = dm.getDiskManagerFileInfo(); +- for (DiskManagerFileInfo d : diskManagerFileInfo) { +- if (d.isSkipped()) { +- return true; +- } +- } +- /* +- * ok, we shouldn't forward this, already sent a hash response +- * and we have/are downloading all the files +- */ +- return false; +- } +- } +- +- return true; +- } +- +- private boolean isSearchCanceled(int searchId) { +- boolean canceled = false; +- lock.lock(); +- try { +- if (canceledSearches.containsKey(searchId)) { +- canceled = true; +- } +- } finally { +- lock.unlock(); +- } +- return canceled; +- } +- +- /** +- * Returns the probability of rejecting a search from this friend given the share of the +- * overall queue +- */ +- public double getFriendSearchDropProbability(Friend inFriend) { +- +- lock.lock(); +- try { +- +- // Always accept if we don't have any searches from friend. +- if (searchesPerFriend.get(inFriend) == null) { +- return 0; +- } +- +- // Reject proportionally to recent rate. Do not admit more than X/sec. +- // Also, proportional to processing queue size. +- double rateBound = delayedSearchQueue.searchCount / 80.0; +- double queueBound = (double)delayedSearchQueue.queuedSearches.size() / (double)MAX_SEARCH_QUEUE_LENGTH; +- +- return Math.max(rateBound, queueBound); +- +- } finally { +- lock.unlock(); +- } +- } +- +- private void handleIncomingHashSearchResponse(OSF2FHashSearch hashSearch, FriendConnection source, OSF2FHashSearchResp msg) { +- // great, we found someone that has what we searched for! +- // create the overlay transport +- byte[] infoHash = filelistManager.getMetainfoHash(hashSearch.getInfohashhash()); +- if (infoHash == null) { +- logger.warning(""got channel setup request, "" + ""but the infohash we searched for "" + ""is not in filelistmananger""); +- return; +- } +- +- DownloadManager dm = AzureusCoreImpl.getSingleton().getGlobalManager().getDownloadManager(new HashWrapper(infoHash)); +- if (dm == null) { +- logger.warning(""got channel setup request, "" + ""but the downloadmanager is null""); +- return; +- } +- +- if (source.hasRegisteredPath(msg.getPathID())) { +- logger.finer(""got channel setup response, "" + ""but path is already used: sending back a reset""); +- source.sendChannelRst(new OSF2FChannelReset(OSF2FMessage.CURRENT_VERSION, msg.getChannelID())); +- return; +- } +- +- OverlayTransport overlayTransport = new OverlayTransport(source, msg.getChannelID(), infoHash, msg.getPathID(), true, overlayManager.getLatencyDelayForInfohash(source.getRemoteFriend(), infoHash)); +- // register it with the friendConnection +- try { +- source.registerOverlayTransport(overlayTransport); +- // safe to start it since we know that the other party is interested +- overlayTransport.start(); +- } catch (OverlayRegistrationError e) { +- Debug.out(""got an error when registering outgoing transport: "" + e.message); +- return; +- } +- +- } +- +- public void handleIncomingSearch(FriendConnection source, OSF2FSearch msg) { +- lock.lock(); +- try { +- logger.finest(""got search: "" + msg.getDescription()); +- // first, check if we either sent or forwarded this search before +- if (forwardedSearches.containsKey(msg.getSearchID()) || sentSearches.containsKey(msg.getSearchID()) || delayedSearchQueue.isQueued(msg)) { +- return; +- } +- } finally { +- lock.unlock(); +- } +- +- boolean shouldForward = true; +- // second, check if we actually can do something about this +- if (msg instanceof OSF2FHashSearch) { +- shouldForward = handleHashSearch(source, (OSF2FHashSearch) msg); +- } else if (msg instanceof OSF2FTextSearch) { +- shouldForward = handleTextSearch(source, (OSF2FTextSearch) msg); +- } else { +- logger.warning(""received unrecgonized search type: "" + msg.getID() + "" / "" + msg.getClass().getCanonicalName()); +- } +- +- /* +- * check if we are at full capacity +- */ +- if (canForwardSearch() == false) { +- shouldForward = false; +- } +- +- if (shouldForward) { +- // ok, seems like we should attempt to forward this, put it in +- // the queue +- delayedSearchQueue.add(source, msg); +- } +- +- } +- +- public void handleIncomingSearchCancel(FriendConnection source, OSF2FSearchCancel msg) { +- +- boolean forward = false; +- lock.lock(); +- try { +- +- /* +- * if this is the first time we see the cancel, check if we +- * forwarded this search, if we did, send a cancel +- */ +- if (!canceledSearches.containsKey(msg.getSearchID())) { +- canceledSearches.put(msg.getSearchID(), System.currentTimeMillis()); +- /* +- * we only forward the cancel if we already sent the search +- */ +- if (forwardedSearches.containsKey(msg.getSearchID())) { +- forward = true; +- } else { +- logger.fine(""got search cancel for unknown search id""); +- } +- } +- } finally { +- lock.unlock(); +- } +- if (forward) { +- overlayManager.forwardSearchOrCancel(source, msg); +- } +- } +- +- /** +- * There are 2 possible explanations for getting a search response, either +- * we got a response for a search we sent ourselves, or we got a response +- * for a search we forwarded +- * +- * @param source +- * connection from where we got the setup +- * @param msg +- * the channel setup message +- */ +- public void handleIncomingSearchResponse(FriendConnection source, OSF2FSearchResp msg) { +- SentSearch sentSearch; +- lock.lock(); +- try { +- sentSearch = sentSearches.get(msg.getSearchID()); +- } finally { +- lock.unlock(); +- } +- // first, if might be a search we sent +- if (sentSearch != null) { +- logger.finest(""got response to search: "" + sentSearch.getSearch().getDescription()); +- OSF2FSearch search = sentSearch.getSearch(); +- // update response stats +- sentSearch.gotResponse(); +- /* +- * check if we got enough search responses to cancel this search +- * +- * we will still use the data, even if the search is canceled. I +- * mean, since it already made it here why not use it... +- */ +- if (sentSearch.getResponseNum() > mMaxSearchResponsesBeforeCancel) { +- /* +- * only send a cancel message once +- */ +- boolean sendCancel = false; +- lock.lock(); +- try { +- if (!canceledSearches.containsKey(msg.getSearchID())) { +- canceledSearches.put(msg.getSearchID(), System.currentTimeMillis()); +- logger.finer(""canceling search "" + msg); +- sendCancel = true; +- } +- } finally { +- lock.unlock(); +- } +- if (sendCancel) { +- overlayManager.sendSearchOrCancel(new OSF2FSearchCancel(OSF2FMessage.CURRENT_VERSION, msg.getSearchID()), true, false); +- } +- } +- if (search instanceof OSF2FHashSearch) { +- // ok, it was a hash search that we sent +- handleIncomingHashSearchResponse((OSF2FHashSearch) search, source, (OSF2FHashSearchResp) msg); +- } else if (search instanceof OSF2FTextSearch) { +- // this was from a text search we sent +- FileList fileList; +- try { +- OSF2FTextSearchResp textSearchResp = (OSF2FTextSearchResp) msg; +- fileList = FileListManager.decode_basic(textSearchResp.getFileList()); +- +- textSearchManager.gotSearchResponse(search.getSearchID(), source.getRemoteFriend(), fileList, textSearchResp.getChannelID(), source.hashCode()); +- +- logger.fine(""results so far:""); +- List res = getSearchResult(search.getSearchID()); +- for (TextSearchResult textSearchResult : res) { +- logger.fine(textSearchResult.toString()); +- } +- } catch (IOException e) { +- logger.warning(""got malformed search response""); +- } +- } else { +- logger.warning(""unknown search response type""); +- } +- } +- // sentsearch == null +- else { +- // ok, this is for a search we forwarded +- ForwardedSearch search; +- lock.lock(); +- try { +- search = forwardedSearches.get(msg.getSearchID()); +- if (search == null) { +- logger.warning(""got response for unknown search:"" + source + "":"" + msg.getDescription()); +- return; +- } +- +- logger.finest(""got response to forwarded search: "" + search.getSearch().getDescription()); +- +- if (canceledSearches.containsKey(msg.getSearchID())) { +- logger.finer(""not forwarding search, it is already canceled, "" + msg.getSearchID()); +- return; +- } +- } finally { +- lock.unlock(); +- } +- +- FriendConnection searcher = search.getSource(); +- FriendConnection responder = source; +- +- if (search.getResponseNum() > mMaxSearchResponsesBeforeCancel) { +- /* +- * we really shouldn't cancel other peoples searches, but if +- * they don't do it we have to +- */ +- lock.lock(); +- try { +- canceledSearches.put(msg.getSearchID(), System.currentTimeMillis()); +- } finally { +- lock.unlock(); +- } +- logger.finest(""Sending cancel for someone elses search!, searcher="" + searcher.getRemoteFriend() + "" responder="" + responder.getRemoteFriend() + "":\t"" + search); +- overlayManager.forwardSearchOrCancel(source, new OSF2FSearchCancel(OSF2FMessage.CURRENT_VERSION, msg.getSearchID())); +- } else { +- search.gotResponse(); +- // register the forwarding +- logger.finest(""registering overlay forward: "" + searcher.getRemoteFriend().getNick() + ""<->"" + responder.getRemoteFriend().getNick()); +- try { +- responder.registerOverlayForward(msg, searcher, search.getSearch(), false); +- searcher.registerOverlayForward(msg, responder, search.getSearch(), true); +- } catch (FriendConnection.OverlayRegistrationError e) { +- String direction = ""'"" + responder.getRemoteFriend().getNick() + ""'->'"" + searcher.getRemoteFriend().getNick() + ""'""; +- e.direction = direction; +- e.setupMessageSource = responder.getRemoteFriend().getNick(); +- logger.warning(""not forwarding overlay setup request "" + direction + e.message); +- debugChannelIdErrorSetupErrorStats.add(e); +- return; +- } +- +- // and send out the search +- if (msg instanceof OSF2FHashSearchResp) { +- searcher.sendChannelSetup((OSF2FHashSearchResp) msg.clone(), true); +- } else if (msg instanceof OSF2FTextSearchResp) { +- searcher.sendTextSearchResp((OSF2FTextSearchResp) msg.clone(), true); +- } else { +- Debug.out(""got unknown message: "" + msg.getDescription()); +- } +- } +- } +- +- } +- +- /** +- * +- * @param source +- * @param msg +- * @return +- */ +- private boolean handleTextSearch(final FriendConnection source, final OSF2FTextSearch msg) { +- +- boolean shouldForward = true; +- +- if (logger.isLoggable(Level.FINER)) { +- logger.finer(""handleTextSearch: "" + msg.getSearchString() + "" from "" + source.getRemoteFriend().getNick()); +- } +- +- String searchString = msg.getSearchString(); +- +- // common case is no filtering. +- if (filteredKeywords.length > 0) { +- StringTokenizer toks = new StringTokenizer(searchString); - - /** -- * Finds the k most similar words in the semantic space according to -- * the cosine similarity, returning a mapping from their similarity to the -- * word itself. -- * -- * @return the most similar words to the vector -+ * {@inheritDoc} - */ - public SortedMultiMap getMostSimilar( - final Vector v, int numberOfSimilarWords) { -@@ -434,5 +439,4 @@ private void writeObject(ObjectOutputStream out) throws IOException { - out.writeObject(copy); - } - } +- for (String filter : filteredKeywords) { +- if (searchString.contains(filter)) { +- logger.fine(""Blocking search due to filter: "" + searchString + "" matched by: "" + filter); +- return false; +- } +- } +- } - - } -\ No newline at end of file -diff --git a/src/edu/ucla/sspace/util/ReflectionUtil.java b/src/edu/ucla/sspace/util/ReflectionUtil.java -index a3d4ffb7..583f3b2b 100644 ---- a/src/edu/ucla/sspace/util/ReflectionUtil.java -+++ b/src/edu/ucla/sspace/util/ReflectionUtil.java -@@ -48,8 +48,4 @@ public static T getObjectInstance(String className) { - throw new Error(e); - } - } +- List results = filelistManager.handleSearch(source.getRemoteFriend(), searchString); +- +- if (results.size() > 0) { +- if (canRespondToSearch()) { +- logger.finer(""found matches: "" + results.size()); +- // long fileListSize = results.getFileNum(); +- +- List delayedExecutionTasks = new LinkedList(); +- long time = System.currentTimeMillis(); +- for (FileCollection c : results) { +- // send back a response +- int channelId = random.nextInt(); +- LinkedList list = new LinkedList(); +- list.add(c); +- byte[] encoded = FileListManager.encode_basic(new FileList(list), false); +- +- final OSF2FTextSearchResp resp = new OSF2FTextSearchResp(OSF2FMessage.CURRENT_VERSION, OSF2FMessage.FILE_LIST_TYPE_PARTIAL, msg.getSearchID(), channelId, encoded); +- int delay = overlayManager.getSearchDelayForInfohash(source.getRemoteFriend(), c.getUniqueIdBytes()); +- delayedExecutionTasks.add(new DelayedExecutionEntry(time + delay, 0, new TimerTask() { +- @Override +- public void run() { +- /* +- * check if the search got canceled while we were +- * sleeping +- */ +- if (!isSearchCanceled(msg.getSearchID())) { +- source.sendTextSearchResp(resp, false); +- } +- } +- })); +- } +- delayedExecutor.queue(delayedExecutionTasks); - --// public static T[] newArrayInstance(Class c, int length) { --// return Arrays.newInstance(c, length) --// } - } -diff --git a/src/edu/ucla/sspace/util/primitive/IntIntHashMultiMap.java b/src/edu/ucla/sspace/util/primitive/IntIntHashMultiMap.java -index bcd6e9d3..6577e230 100644 ---- a/src/edu/ucla/sspace/util/primitive/IntIntHashMultiMap.java -+++ b/src/edu/ucla/sspace/util/primitive/IntIntHashMultiMap.java -@@ -42,7 +42,11 @@ - import gnu.trove.procedure.TIntProcedure; - - /** -- * -+ * A {@link MultiMap} implementation for mapping {@code int} primitives as both -+ * keys and values using a hashing strategy. This class offers a noticeable -+ * performance improvement over the equivalent {@code -+ * HashMultiMap<Integer,Integer>} by operating and representing the keys -+ * and values only in their primitive state. - */ - public class IntIntHashMultiMap implements IntIntMultiMap { - -diff --git a/src/edu/ucla/sspace/util/primitive/IntIntMultiMap.java b/src/edu/ucla/sspace/util/primitive/IntIntMultiMap.java -index 499a7527..f511b3df 100644 ---- a/src/edu/ucla/sspace/util/primitive/IntIntMultiMap.java -+++ b/src/edu/ucla/sspace/util/primitive/IntIntMultiMap.java -@@ -35,7 +35,8 @@ - - - /** -- * -+ * A {@link MultiMap} subinterface for mapping {@code int} primitives as both -+ * keys and values. - */ - public interface IntIntMultiMap extends MultiMap { - -diff --git a/test/edu/ucla/sspace/dependency/BreadthFirstPathIteratorTest.java b/test/edu/ucla/sspace/dependency/BreadthFirstPathIteratorTest.java -index 2d956032..bd88c5fc 100644 ---- a/test/edu/ucla/sspace/dependency/BreadthFirstPathIteratorTest.java -+++ b/test/edu/ucla/sspace/dependency/BreadthFirstPathIteratorTest.java -@@ -38,29 +38,29 @@ - public class BreadthFirstPathIteratorTest extends PathIteratorTestBase { - - String conll = -- ""Anarchism anarchism NN 1 2 SBJ\n"" + -- ""is be VBZ 2 0 ROOT\n"" + -- ""a a DT 3 5 NMOD\n"" + -- ""political political JJ 4 5 NMOD\n"" + -- ""philosophy philosophy NN 5 2 PRD\n"" + -- ""encompassing encompass VVG 6 5 NMOD\n"" + -- ""theories theory NNS 7 6 OBJ\n"" + -- ""and and CC 8 7 CC\n"" + -- ""attitudes attitude NNS 9 7 COORD\n"" + -- ""which which WDT 10 11 SBJ\n"" + -- ""consider consider VVP 11 9 NMOD\n"" + -- ""the the DT 12 13 NMOD\n"" + -- ""state state NN 13 15 SBJ\n"" + -- ""to t TO 14 15 VMOD\n"" + -- ""be be VB 15 11 OBJ\n"" + -- ""unnecessary unnecessary JJ 16 15 PRD\n"" + -- "", , , 17 16 P\n"" + -- ""harmul harmful JJ 18 16 COORD\n"" + -- "", , , 19 16 P\n"" + -- ""and/ ad/ JJ 20 16 COORD\n"" + -- ""or or CC 21 16 CC\n"" + -- ""undesirable undesirable JJ 22 16 COORD\n"" + -- "". . SENT 23 2 P\n""; -+ ""Anarchism\tanarchism\tNN\t1\t2\tSBJ\n"" + -+ ""is\tbe\tVBZ\t2\t0\tROOT\n"" + -+ ""a\ta\tDT\t3\t5\tNMOD\n"" + -+ ""political\tpolitical\tJJ\t4\t5\tNMOD\n"" + -+ ""philosophy\tphilosophy\tNN\t5\t2\tPRD\n"" + -+ ""encompassing\tencompass\tVVG\t6\t5\tNMOD\n"" + -+ ""theories\ttheory\tNNS\t7\t6\tOBJ\n"" + -+ ""and\tand\tCC\t8\t7\tCC\n""+ -+ ""attitudes\tattitude\tNNS\t9\t7\tCOORD\n""+ -+ ""which\twhich\tWDT\t10\t11\tSBJ\n""+ -+ ""consider\tconsider\tVVP\t11\t9\tNMOD\n""+ -+ ""the\tthe\tDT\t12\t13\tNMOD\n""+ -+ ""state\tstate\tNN\t13\t15\tSBJ\n""+ -+ ""to\tt\tTO\t14\t15\tVMOD\n""+ -+ ""be\tbe\tVB\t15\t11\tOBJ\n""+ -+ ""unnecessary\tunnecessary\tJJ\t16\t15\tPRD\n""+ -+ "",\t,\t,\t17\t16\tP\n""+ -+ ""harmul\tharmful\tJJ\t18\t16\tCOORD\n""+ -+ "",\t,\t,\t19\t16\tP\n""+ -+ ""and/\tad/\tJJ\t20\t16\tCOORD\n""+ -+ ""or\tor\tCC\t21\t16\tCC\n""+ -+ ""undesirable\tundesirable\tJJ\t22\t16\tCOORD\n""+ -+ "".\t.\tSENT\t23\t2\tP\n""; - - static final Map PATH_START_COUNTS - = new TreeMap(); -diff --git a/test/edu/ucla/sspace/dependency/CoNLLDependencyExtractorTest.java b/test/edu/ucla/sspace/dependency/CoNLLDependencyExtractorTest.java -index f0667223..06e9ae30 100644 ---- a/test/edu/ucla/sspace/dependency/CoNLLDependencyExtractorTest.java -+++ b/test/edu/ucla/sspace/dependency/CoNLLDependencyExtractorTest.java -@@ -24,6 +24,8 @@ - import edu.ucla.sspace.text.StringDocument; - import edu.ucla.sspace.text.Document; - -+import java.io.BufferedReader; -+ - import java.util.Arrays; - import java.util.HashSet; - import java.util.LinkedList; -@@ -99,6 +101,8 @@ protected void evaluateRelations(DependencyTreeNode node, - // Check that all the neighbors are in the e - for (DependencyRelation rel : node.neighbors()) { - System.out.println(""relation: "" + rel); -+ if (!expectedRelations.contains(rel)) -+ System.out.printf(""FAIL: %s does not contain %s%n"", expectedRelations, rel); - assertTrue(expectedRelations.contains(rel)); - // Remove the relation from the list to double check that the - // neighbors are a list of duplicate relations. -@@ -120,7 +124,7 @@ protected void testFirstRoot(DependencyTreeNode[] relations, int index) { - DependencyRelation[] expectedRelations = new DependencyRelation[] { - new SimpleDependencyRelation(new SimpleDependencyTreeNode(""is"", ""VBZ""), - ""SBJ"", -- new SimpleDependencyTreeNode(""holt"", ""NNP"")), -+ new SimpleDependencyTreeNode(""Holt"", ""NNP"")), - new SimpleDependencyRelation(new SimpleDependencyTreeNode(""is"", ""VBZ""), - ""PRD"", - new SimpleDependencyTreeNode(""columnist"", ""NN"")), -@@ -145,7 +149,7 @@ protected void testSecondRoot(DependencyTreeNode[] relations, int index) { - DependencyRelation[] expectedRelations = new DependencyRelation[] { - new SimpleDependencyRelation(new SimpleDependencyTreeNode(""beskattning"", ""N""), - ""AT"", -- new SimpleDependencyTreeNode(""individuell"", ""AJ"")), -+ new SimpleDependencyTreeNode(""Individuell"", ""AJ"")), - new SimpleDependencyRelation(new SimpleDependencyTreeNode(""beskattning"", ""N""), - ""ET"", - new SimpleDependencyTreeNode(""av"", ""PR"")) -@@ -157,29 +161,29 @@ protected void testSecondRoot(DependencyTreeNode[] relations, int index) { - - @Test public void testSingleExtraction() throws Exception { - DependencyExtractor extractor = new CoNLLDependencyExtractor(); -- Document doc = new StringDocument(SINGLE_PARSE); -+ Document doc = new StringDocument(toTabs(SINGLE_PARSE)); - DependencyTreeNode[] nodes = extractor.readNextTree(doc.reader()); - - assertEquals(12, nodes.length); - - // Check the basics of the node. -- assertEquals(""review"", nodes[8].word()); -+ assertEquals(""Review"", nodes[8].word()); - assertEquals(""NNP"", nodes[8].pos()); - - // Test expected relation for each of the links for ""Review"". - DependencyRelation[] expectedRelations = new DependencyRelation[] { -- new SimpleDependencyRelation(new SimpleDependencyTreeNode(""review"", ""NNP""), -+ new SimpleDependencyRelation(new SimpleDependencyTreeNode(""Review"", ""NNP""), - ""NMOD"", - new SimpleDependencyTreeNode(""the"", ""DT"")), -- new SimpleDependencyRelation(new SimpleDependencyTreeNode(""review"", ""NNP""), -+ new SimpleDependencyRelation(new SimpleDependencyTreeNode(""Review"", ""NNP""), - ""NMOD"", -- new SimpleDependencyTreeNode(""literary"", ""NNP"")), -- new SimpleDependencyRelation(new SimpleDependencyTreeNode(""review"", ""NNP""), -+ new SimpleDependencyTreeNode(""Literary"", ""NNP"")), -+ new SimpleDependencyRelation(new SimpleDependencyTreeNode(""Review"", ""NNP""), - ""ADV"", - new SimpleDependencyTreeNode(""in"", ""IN"")), - new SimpleDependencyRelation(new SimpleDependencyTreeNode(""for"", ""IN""), - ""PMOD"", -- new SimpleDependencyTreeNode(""review"", ""NNP"")) -+ new SimpleDependencyTreeNode(""Review"", ""NNP"")) - }; - - evaluateRelations(nodes[8], new LinkedList(Arrays.asList(expectedRelations))); -@@ -187,14 +191,18 @@ protected void testSecondRoot(DependencyTreeNode[] relations, int index) { - - @Test public void testDoubleExtraction() throws Exception { - DependencyExtractor extractor = new CoNLLDependencyExtractor(); -- Document doc = new StringDocument(DOUBLE_PARSE); -- DependencyTreeNode[] relations = extractor.readNextTree(doc.reader()); -+ Document doc = new StringDocument(""\n\n"" + -+ toTabs(SINGLE_PARSE) + -+ ""\n"" + -+ toTabs(SECOND_PARSE)); -+ BufferedReader reader = doc.reader(); -+ DependencyTreeNode[] relations = extractor.readNextTree(reader); - assertTrue(relations != null); - assertEquals(12, relations.length); - - testFirstRoot(relations, 2); - -- relations = extractor.readNextTree(doc.reader()); -+ relations = extractor.readNextTree(reader); - assertTrue(relations != null); - assertEquals(4, relations.length); - -@@ -203,7 +211,7 @@ protected void testSecondRoot(DependencyTreeNode[] relations, int index) { - - @Test public void testRootNode() throws Exception { - DependencyExtractor extractor = new CoNLLDependencyExtractor(); -- Document doc = new StringDocument(SINGLE_PARSE); -+ Document doc = new StringDocument(toTabs(SINGLE_PARSE)); - DependencyTreeNode[] relations = extractor.readNextTree(doc.reader()); - - assertEquals(12, relations.length); -@@ -213,7 +221,7 @@ protected void testSecondRoot(DependencyTreeNode[] relations, int index) { - - @Test public void testConcatonatedTrees() throws Exception { - DependencyExtractor extractor = new CoNLLDependencyExtractor(); -- Document doc = new StringDocument(CONCATONATED_PARSE); -+ Document doc = new StringDocument(toTabs(CONCATONATED_PARSE)); - DependencyTreeNode[] relations = extractor.readNextTree(doc.reader()); - - assertEquals(16, relations.length); -@@ -223,11 +231,26 @@ protected void testSecondRoot(DependencyTreeNode[] relations, int index) { - - @Test public void testConcatonatedTreesZeroOffset() throws Exception { - DependencyExtractor extractor = new CoNLLDependencyExtractor(); -- Document doc = new StringDocument(DOUBLE_ZERO_OFFSET_PARSE); -+ Document doc = new StringDocument(toTabs(DOUBLE_ZERO_OFFSET_PARSE)); - DependencyTreeNode[] relations = extractor.readNextTree(doc.reader()); - - assertEquals(16, relations.length); - testFirstRoot(relations, 2); - testSecondRoot(relations, 13); - } -+ -+ static String toTabs(String doc) { -+ StringBuilder sb = new StringBuilder(); -+ String[] arr = doc.split(""\n""); -+ for (String line : arr) { -+ String[] cols = line.split(""\\s+""); -+ for (int i = 0; i < cols.length; ++i) { -+ sb.append(cols[i]); -+ if (i + 1 < cols.length) -+ sb.append('\t'); -+ } -+ sb.append('\n'); -+ } -+ return sb.toString(); -+ } - } -diff --git a/test/edu/ucla/sspace/dependency/WaCKyDependencyExtractorTest.java b/test/edu/ucla/sspace/dependency/WaCKyDependencyExtractorTest.java -index 8f466cdc..74a1ebbe 100644 ---- a/test/edu/ucla/sspace/dependency/WaCKyDependencyExtractorTest.java -+++ b/test/edu/ucla/sspace/dependency/WaCKyDependencyExtractorTest.java -@@ -24,6 +24,8 @@ - import edu.ucla.sspace.text.StringDocument; - import edu.ucla.sspace.text.Document; - -+import java.io.BufferedReader; -+ - import java.util.Arrays; - import java.util.HashSet; - import java.util.LinkedList; -@@ -90,29 +92,29 @@ public class WaCKyDependencyExtractorTest extends CoNLLDependencyExtractorTest { - - @Test public void testSingleExtraction() throws Exception { - DependencyExtractor extractor = new WaCKyDependencyExtractor(); -- Document doc = new StringDocument(SINGLE_PARSE); -+ Document doc = new StringDocument(toTabs(SINGLE_PARSE)); - DependencyTreeNode[] nodes = extractor.readNextTree(doc.reader()); - - assertEquals(12, nodes.length); - - // Check the basics of the node. -- assertEquals(""review"", nodes[8].word()); -+ assertEquals(""Review"", nodes[8].word()); - assertEquals(""NNP"", nodes[8].pos()); - - // Test expected relation for each of the links for ""Review"". - DependencyRelation[] expectedRelations = new DependencyRelation[] { -- new SimpleDependencyRelation(new SimpleDependencyTreeNode(""review"", ""NNP""), -+ new SimpleDependencyRelation(new SimpleDependencyTreeNode(""Review"", ""NNP""), - ""NMOD"", - new SimpleDependencyTreeNode(""the"", ""DT"")), -- new SimpleDependencyRelation(new SimpleDependencyTreeNode(""review"", ""NNP""), -+ new SimpleDependencyRelation(new SimpleDependencyTreeNode(""Review"", ""NNP""), - ""NMOD"", -- new SimpleDependencyTreeNode(""literary"", ""NNP"")), -- new SimpleDependencyRelation(new SimpleDependencyTreeNode(""review"", ""NNP""), -+ new SimpleDependencyTreeNode(""Literary"", ""NNP"")), -+ new SimpleDependencyRelation(new SimpleDependencyTreeNode(""Review"", ""NNP""), - ""ADV"", - new SimpleDependencyTreeNode(""in"", ""IN"")), - new SimpleDependencyRelation(new SimpleDependencyTreeNode(""for"", ""IN""), - ""PMOD"", -- new SimpleDependencyTreeNode(""review"", ""NNP"")) -+ new SimpleDependencyTreeNode(""Review"", ""NNP"")) - }; - - evaluateRelations(nodes[8], new LinkedList(Arrays.asList(expectedRelations))); -@@ -120,14 +122,15 @@ public class WaCKyDependencyExtractorTest extends CoNLLDependencyExtractorTest { - - @Test public void testDoubleExtraction() throws Exception { - DependencyExtractor extractor = new WaCKyDependencyExtractor(); -- Document doc = new StringDocument(DOUBLE_PARSE); -- DependencyTreeNode[] relations = extractor.readNextTree(doc.reader()); -+ Document doc = new StringDocument(toTabs(DOUBLE_PARSE)); -+ BufferedReader reader = doc.reader(); -+ DependencyTreeNode[] relations = extractor.readNextTree(reader); - assertTrue(relations != null); - assertEquals(12, relations.length); - - testFirstRoot(relations, 2); - -- relations = extractor.readNextTree(doc.reader()); -+ relations = extractor.readNextTree(reader); - assertTrue(relations != null); - assertEquals(4, relations.length); - -@@ -136,7 +139,7 @@ public class WaCKyDependencyExtractorTest extends CoNLLDependencyExtractorTest { - - @Test public void testRootNode() throws Exception { - DependencyExtractor extractor = new WaCKyDependencyExtractor(); -- Document doc = new StringDocument(SINGLE_PARSE); -+ Document doc = new StringDocument(toTabs(SINGLE_PARSE)); - DependencyTreeNode[] relations = extractor.readNextTree(doc.reader()); - - assertEquals(12, relations.length); -@@ -146,7 +149,7 @@ public class WaCKyDependencyExtractorTest extends CoNLLDependencyExtractorTest { - - @Test public void testConcatonatedTrees() throws Exception { - DependencyExtractor extractor = new WaCKyDependencyExtractor(); -- Document doc = new StringDocument(CONCATONATED_PARSE); -+ Document doc = new StringDocument(toTabs(CONCATONATED_PARSE)); - DependencyTreeNode[] relations = extractor.readNextTree(doc.reader()); - - assertEquals(16, relations.length); -@@ -156,11 +159,26 @@ public class WaCKyDependencyExtractorTest extends CoNLLDependencyExtractorTest { - - @Test public void testConcatonatedTreesZeroOffset() throws Exception { - DependencyExtractor extractor = new WaCKyDependencyExtractor(); -- Document doc = new StringDocument(DOUBLE_ZERO_OFFSET_PARSE); -+ Document doc = new StringDocument(toTabs(DOUBLE_ZERO_OFFSET_PARSE)); - DependencyTreeNode[] relations = extractor.readNextTree(doc.reader()); - - assertEquals(16, relations.length); - testFirstRoot(relations, 2); - testSecondRoot(relations, 13); - } --} -+ -+ static String toTabs(String doc) { -+ StringBuilder sb = new StringBuilder(); -+ String[] arr = doc.split(""\n""); -+ for (String line : arr) { -+ String[] cols = line.split(""\\s+""); -+ for (int i = 0; i < cols.length; ++i) { -+ sb.append(cols[i]); -+ if (i + 1 < cols.length) -+ sb.append('\t'); -+ } -+ sb.append('\n'); -+ } -+ return sb.toString(); -+ } -+} -\ No newline at end of file -diff --git a/test/edu/ucla/sspace/graph/DirectedMultigraphTests.java b/test/edu/ucla/sspace/graph/DirectedMultigraphTests.java -index eb6102f5..73424070 100644 ---- a/test/edu/ucla/sspace/graph/DirectedMultigraphTests.java -+++ b/test/edu/ucla/sspace/graph/DirectedMultigraphTests.java -@@ -36,1182 +36,1182 @@ - */ - public class DirectedMultigraphTests { - --// @Test public void testConstructor() { --// Set vertices = new HashSet(); --// DirectedMultigraph g = new DirectedMultigraph(); --// assertEquals(0, g.order()); --// assertEquals(0, g.size()); --// } +- } else { +- // not enough capacity :-( +- shouldForward = false; +- } +- } +- +- return shouldForward; +- } +- +- public void sendDirectedHashSearch(FriendConnection target, byte[] infoHash) { +- +- long metainfohashhash = filelistManager.getInfoHashhash(infoHash); +- +- int newSearchId = 0; +- while (newSearchId == 0) { +- newSearchId = random.nextInt(); +- } +- OSF2FHashSearch search = new OSF2FHashSearch(OSF2FMessage.CURRENT_VERSION, newSearchId, metainfohashhash); +- lock.lock(); +- try { +- sentSearches.put(newSearchId, new SentSearch(search)); +- } finally { +- lock.unlock(); +- } +- overlayManager.sendDirectedSearch(target, search); +- +- } +- +- public long getInfoHashHashFromSearchId(int searchId) { +- lock.lock(); +- try { +- SentSearch sentSearch = sentSearches.get(searchId); +- if (sentSearch != null && sentSearch.search instanceof OSF2FHashSearch) { +- return ((OSF2FHashSearch) sentSearch.search).getInfohashhash(); +- } +- } finally { +- lock.unlock(); +- } +- return -1; +- } +- +- public void sendHashSearch(byte[] infoHash) { +- long metainfohashhash = filelistManager.getInfoHashhash(infoHash); +- +- int newSearchId = 0; +- while (newSearchId == 0) { +- newSearchId = random.nextInt(); +- } +- OSF2FSearch search = new OSF2FHashSearch(OSF2FMessage.CURRENT_VERSION, newSearchId, metainfohashhash); +- +- // these should go in the slow (forward) path so to route around slow +- // nodes +- sendSearch(newSearchId, search, false); +- } +- +- private void sendSearch(int newSearchId, OSF2FSearch search, boolean skipQueue) { +- lock.lock(); +- try { +- sentSearches.put(newSearchId, new SentSearch(search)); +- } finally { +- lock.unlock(); +- } +- overlayManager.sendSearchOrCancel(search, skipQueue, false); +- } +- +- public int sendTextSearch(String searchString, TextSearchListener listener) { +- int newSearchId = 0; +- while (newSearchId == 0) { +- newSearchId = random.nextInt(); +- } +- +- if (FileCollection.containsKeyword(searchString)) { +- searchString = searchString.replaceAll("":"", "";""); +- searchString = handleKeyWords(searchString); +- } +- +- OSF2FSearch search = new OSF2FTextSearch(OSF2FMessage.CURRENT_VERSION, OSF2FMessage.FILE_LIST_TYPE_PARTIAL, newSearchId, searchString); +- textSearchManager.sentSearch(newSearchId, searchString, listener); +- sendSearch(newSearchId, search, false); +- return newSearchId; +- } +- +- private static String handleKeyWords(String searchString) { +- searchString = FileCollection.removeWhiteSpaceAfteKeyChars(searchString); +- String[] interestingKeyWords = new String[] { ""id"", ""sha1"", ""ed2k"" }; +- int[] interestingKeyWordExectedKeyLen = { 20, 20, 16 }; +- StringBuilder b = new StringBuilder(); +- String[] split = searchString.split("" ""); +- for (String s : split) { +- // check for id +- String toAdd = s; +- for (int i = 0; i < interestingKeyWords.length; i++) { +- String fromId = convertToBase64(s, interestingKeyWords[i], interestingKeyWordExectedKeyLen[i]); +- if (fromId != null) { +- toAdd = fromId; +- } +- } +- b.append(toAdd + "" ""); +- if (!toAdd.equals(s)) { +- logger.fine(""converted search: "" + s + ""->"" + toAdd); +- } +- } +- return b.toString().trim(); +- } +- +- private static String convertToBase64(String searchTerm, String _keyword, int expectedBytes) { +- for (String sep : FileCollection.KEYWORDENDINGS) { +- String keyword = _keyword + sep; +- if (searchTerm.contains(keyword)) { +- logger.finer(""converting base: "" + searchTerm); +- try { +- String baseXHash = searchTerm.substring(keyword.length()); +- logger.finer(""basex hash: "" + baseXHash); +- String hash = ShareManagerTools.baseXtoBase64(baseXHash, expectedBytes); +- String toAdd = keyword + hash; +- logger.finer(""new string: "" + toAdd); +- return toAdd; +- } catch (UnsupportedEncodingException e) { +- // TODO Auto-generated catch block +- e.printStackTrace(); +- } +- } +- } +- return null; +- } +- +- static class DebugChannelIdEntry implements Comparable { +- final int count; +- final String name; +- +- public DebugChannelIdEntry(String name, int count) { +- super(); +- this.name = name; +- this.count = count; +- } +- +- public int compareTo(DebugChannelIdEntry o) { +- if (o.count > count) { +- return 1; +- } else if (o.count == count) { +- return 0; +- } else { +- return -1; +- } +- } +- } +- +- private static class DebugChannelSetupErrorStats { +- private final LinkedList errorList = new LinkedList(); +- +- int MAX_SIZE = 10000; +- +- public void add(FriendConnection.OverlayRegistrationError error) { +- lock.lock(); +- try { +- if (errorList.size() > MAX_SIZE) { +- errorList.removeLast(); +- } +- errorList.addFirst(error); +- } finally { +- lock.unlock(); +- } +- } +- +- public String getDebugStats() { +- StringBuilder b = new StringBuilder(); +- HashMap errorsPerFriend = new HashMap(); +- HashMap errorsPerPair = new HashMap(); +- lock.lock(); +- try { +- +- for (FriendConnection.OverlayRegistrationError error : errorList) { +- final String s = error.setupMessageSource; +- if (!errorsPerFriend.containsKey(s)) { +- errorsPerFriend.put(s, 0); +- } +- errorsPerFriend.put(s, errorsPerFriend.get(s) + 1); +- +- String d = error.direction; +- if (!errorsPerPair.containsKey(d)) { +- errorsPerPair.put(d, 0); +- } +- errorsPerPair.put(d, errorsPerPair.get(d) + 1); +- } +- +- ArrayList friendTotalOrder = new ArrayList(); +- for (String f : errorsPerFriend.keySet()) { +- friendTotalOrder.add(new DebugChannelIdEntry(f, errorsPerFriend.get(f))); +- } +- Collections.sort(friendTotalOrder); +- b.append(""by source:\n""); +- for (DebugChannelIdEntry e : friendTotalOrder) { +- b.append("" "" + e.name + "" "" + e.count + ""\n""); +- } +- +- ArrayList byPairOrder = new ArrayList(); +- for (String f : errorsPerPair.keySet()) { +- byPairOrder.add(new DebugChannelIdEntry(f, errorsPerPair.get(f))); +- } +- Collections.sort(byPairOrder); +- b.append(""by pair:\n""); +- for (DebugChannelIdEntry e : byPairOrder) { +- b.append("" "" + e.name + "" "" + e.count + ""\n""); +- } +- +- } finally { +- lock.unlock(); +- } +- return b.toString(); +- } +- } +- +- class DelayedSearchQueue { +- +- long lastSearchesPerSecondLogTime = 0; +- long lastBytesPerSecondCount = 0; +- int searchCount = 0; +- +- private long mDelay; +- private final LinkedBlockingQueue queue = new LinkedBlockingQueue(); +- private final HashMap queuedSearches = new HashMap(); +- +- public DelayedSearchQueue(long delay) { +- this.mDelay = delay; +- Thread t = new Thread(new DelayedSearchQueueThread()); +- t.setDaemon(true); +- t.setName(SEARCH_QUEUE_THREAD_NAME); +- t.start(); +- } +- +- /** +- * Warning -- changing this won't re-order things already in the queue, so if you add something with +- * a much smaller delay than the current head of the queue, it will wait until that's removed before +- * sending the new message. +- */ +- public void setDelay( long inDelay ) { +- this.mDelay = inDelay; +- } +- +- public void add(FriendConnection source, OSF2FSearch search) { +- +- if (lastSearchesPerSecondLogTime + 1000 < System.currentTimeMillis()) { +- +- lock.lock(); +- try { +- logger.fine(""Searches/sec: "" + searchCount + "" bytes: "" +- + lastBytesPerSecondCount + "" searchQueueSize: "" +- + queuedSearches.size()); +- } finally { +- lock.unlock(); +- } +- +- lastSearchesPerSecondLogTime = System.currentTimeMillis(); +- searchCount = 0; +- lastBytesPerSecondCount = 0; +- } +- +- searchCount++; +- lastBytesPerSecondCount += FriendConnectionQueue.getMessageLen(search); +- +- lock.lock(); +- try { +- +- // Flush the accounting info every 60 seconds +- if (SearchManager.this.lastSearchAccountingFlush + 60*1000 < System.currentTimeMillis()) { +- lastSearchAccountingFlush = System.currentTimeMillis(); +- searchesPerFriend.clear(); +- } +- +- // If the search queue is more than half full, start dropping searches +- // proportional to how much of the total queue each person is +- // consuming +- if (queuedSearches.size() > 0.25 * MAX_SEARCH_QUEUE_LENGTH) { +- if (searchesPerFriend.containsKey(source.getRemoteFriend())) { +- int outstanding = searchesPerFriend.get(source.getRemoteFriend()).v; +- +- // We add a hard limit on the number of searches from any one person. +- if (outstanding > 0.15 * MAX_SEARCH_QUEUE_LENGTH) { +- logger.fine(""Dropping due to 25% of total queue consumption "" + source.getRemoteFriend().getNick() + "" "" + outstanding + "" / "" + MAX_SEARCH_QUEUE_LENGTH); +- return; +- } +- +- // In other cases, we drop proportional to the consumption of the overall queue. +- double acceptProb = (double) outstanding / (double) queuedSearches.size(); +- if (random.nextDouble() < acceptProb) { +- if (logger.isLoggable(Level.FINE)) { +- logger.fine(""*** RED for search from "" + source + "" outstanding: "" +- + outstanding + "" total: "" + queuedSearches.size()); +- } +- return; +- } +- } +- } +- +- if (queuedSearches.size() > MAX_SEARCH_QUEUE_LENGTH) { +- if (logger.isLoggable(Level.FINER)) { +- logger.finer(""not forwarding search, queue length too large. id: "" +- + search.getSearchID()); +- } +- return; +- } +- if (!queuedSearches.containsKey(search.getSearchID())) { +- logger.finest(""adding search to forward queue, will forward in "" + mDelay +- + "" ms""); +- DelayedSearchQueueEntry entry = new DelayedSearchQueueEntry(search, source, +- System.currentTimeMillis() + mDelay); +- +- if (searchesPerFriend.containsKey(source.getRemoteFriend()) == false) { +- searchesPerFriend.put(source.getRemoteFriend(), new SearchManager.MutableInteger()); +- } +- searchesPerFriend.get(source.getRemoteFriend()).v++; +- logger.finest(""Search for friend: "" + source.getRemoteFriend().getNick() + "" "" +- + searchesPerFriend.get(source.getRemoteFriend()).v); +- +- queuedSearches.put(search.getSearchID(), entry); +- queue.add(entry); +- +- } else { +- logger.finer(""search already in queue, not adding""); +- } +- } finally { +- lock.unlock(); +- } +- } - --// @Test(expected=NullPointerException.class) public void testConstructor2NullArg() { --// Graph g = new SparseUndirectedGraph((Graph>)null); --// } +- /* +- * make sure to already have the lock when calling this +- */ +- public boolean isQueued(OSF2FSearch search) { +- return queuedSearches.containsKey(search.getSearchID()); +- } - --// @Test public void testAdd() { --// DirectedMultigraph g = new DirectedMultigraph(); --// assertTrue(g.add(0)); --// assertEquals(1, g.order()); --// assertTrue(g.contains(0)); --// // second add should have no effect --// assertFalse(g.add(0)); --// assertEquals(1, g.order()); --// assertTrue(g.contains(0)); +- class DelayedSearchQueueThread implements Runnable { - --// assertTrue(g.add(1)); --// assertEquals(2, g.order()); --// assertTrue(g.contains(1)); --// } +- public void run() { +- while (true) { +- try { +- DelayedSearchQueueEntry e = queue.take(); +- long timeUntilSend = e.dontSendBefore - System.currentTimeMillis(); +- if (timeUntilSend > 0) { +- logger.finer(""got search ("" + e.search.getDescription() + "") to forward, waiting "" + timeUntilSend + "" ms until sending""); +- Thread.sleep(timeUntilSend); +- } +- forwardSearch(e.source, e.search); +- /* +- * remove the search from the queuedSearchesMap +- */ +- lock.lock(); +- try { +- queuedSearches.remove(e.search.getSearchID()); +- // If searchesPerFriend was flushed while this search was in the +- // queue, the get() call will return null. +- if (searchesPerFriend.containsKey(e.source.getRemoteFriend())) { +- searchesPerFriend.get(e.source.getRemoteFriend()).v--; +- } +- } finally { +- lock.unlock(); +- } +- /* +- * if we didn't sleep at all, sleep the min time between +- * searches +- */ +- if (timeUntilSend < 1) { +- double ms = 1000.0 / FriendConnection.MAX_OUTGOING_SEARCH_RATE; +- int msFloor = (int) Math.floor(ms); +- int nanosLeft = (int) Math.round((ms - msFloor) * 1000000.0); +- logger.finest(""sleeping "" + msFloor + ""ms + "" + nanosLeft + "" ns""); +- Thread.sleep(msFloor, Math.min(999999, nanosLeft)); +- } - --// @Test public void testEquals() { --// DirectedMultigraph g1 = new DirectedMultigraph(); --// DirectedMultigraph g2 = new DirectedMultigraph(); --// for (int i = 0; i < 10; ++i) { --// for (int j = i + 1; j < 10; ++j) { --// g1.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); --// g2.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); --// } --// } --// assertEquals(g1, g2); +- } catch (Exception e1) { +- logger.warning(""*** Delayed search queue thread error: "" + e1.toString()); +- e1.printStackTrace(); +- BackendErrorLog.get().logException(e1); +- } +- } +- } +- } +- } - --// g1 = new DirectedMultigraph(); --// g2 = new DirectedMultigraph(); --// for (int i = 0; i < 10; ++i) { --// for (int j = i + 1; j < 10; ++j) { --// g1.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); --// g2.add(new SimpleDirectedTypedEdge(""type-1"",j, i)); --// } --// } +- static class DelayedSearchQueueEntry { +- final long dontSendBefore; +- final OSF2FSearch search; +- final FriendConnection source; +- final long insertionTime; +- +- public DelayedSearchQueueEntry(OSF2FSearch search, FriendConnection source, long dontSendBefore) { +- this.insertionTime = System.currentTimeMillis(); +- this.search = search; +- this.source = source; +- this.dontSendBefore = dontSendBefore; +- } +- } - --// assertFalse(g1.equals(g2)); --// assertFalse(g2.equals(g1)); --// } +- class ForwardedSearch { +- private int responsesForwarded = 0; +- private final OSF2FSearch search; +- private final FriendConnection source; +- private final long time; - --// @Test public void testEqualGeneric() { --// DirectedMultigraph g1 = new DirectedMultigraph(); --// Graph> g2 = new GenericGraph>(); --// for (int i = 0; i < 10; ++i) { --// for (int j = i + 1; j < 10; ++j) { --// g1.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); --// g2.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); --// } --// } --// assertEquals(g1, g2); --// } +- public ForwardedSearch(FriendConnection source, OSF2FSearch search) { +- this.time = System.currentTimeMillis(); +- this.source = source; +- this.search = search; - --// @Test public void testContainsEdge() { --// DirectedMultigraph g = new DirectedMultigraph(); --// for (int i = 0; i < 100; ++i) --// for (int j = i + 1; j < 100; ++j) --// g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); +- } - --// for (int i = 0; i < 100; ++i) { --// for (int j = i + 1; j < 100; ++j) { --// g.contains(new SimpleDirectedTypedEdge(""type-1"",i, j)); --// g.contains(new SimpleDirectedTypedEdge(""type-1"",j, i)); --// g.contains(i, j); --// g.contains(j, i); --// } --// } --// } +- public long getAge() { +- return System.currentTimeMillis() - this.time; +- } - --// @Test public void testAddEdge() { --// DirectedMultigraph g = new DirectedMultigraph(); --// assertTrue(g.add(new SimpleDirectedTypedEdge(""type-1"",0, 1))); --// assertEquals(2, g.order()); --// assertEquals(1, g.size()); --// assertTrue(g.contains(new SimpleDirectedTypedEdge(""type-1"",0, 1))); +- public int getResponseNum() { +- return responsesForwarded; +- } - --// g.add(new SimpleDirectedTypedEdge(""type-1"",0, 2)); --// assertEquals(3, g.order()); --// assertEquals(2, g.size()); --// assertTrue(g.contains(new SimpleDirectedTypedEdge(""type-1"",0, 2))); +- public OSF2FSearch getSearch() { +- return search; +- } - --// g.add(new SimpleDirectedTypedEdge(""type-1"",3, 4)); --// assertEquals(5, g.order()); --// assertEquals(3, g.size()); --// assertTrue(g.contains(new SimpleDirectedTypedEdge(""type-1"",3, 4))); --// } +- public int getSearchId() { +- return search.getSearchID(); +- } - --// @Test public void testRemoveLesserVertexWithEdges() { --// DirectedMultigraph g = new DirectedMultigraph(); +- public FriendConnection getSource() { +- return source; +- } - --// for (int i = 1; i < 100; ++i) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",0, i); --// g.add(e); --// } -+ @Test public void testConstructor() { -+ Set vertices = new HashSet(); -+ DirectedMultigraph g = new DirectedMultigraph(); -+ assertEquals(0, g.order()); -+ assertEquals(0, g.size()); +- public void gotResponse() { +- responsesForwarded++; +- } +- +- public boolean isTimedOut() { +- return getAge() > MAX_SEARCH_AGE; +- } +- } +- +- static class RotatingBloomFilter { +- private static final int OBJECTS_TO_STORE = 1000000; +- +- private static final int SIZE_IN_BITS = 10240 * 1024; +- +- private long currentFilterCreated; +- private final LinkedList filters = new LinkedList(); +- private final int maxBuckets; +- private final long maxFilterAge; +- +- public RotatingBloomFilter(long totalAge, int buckets) { +- this.maxBuckets = buckets; +- this.maxFilterAge = (totalAge / buckets) + 1; +- rotate(); +- } +- +- public boolean contains(int searchId, int searchValue) { +- byte[] bytes = bytesFromInts(searchId, searchValue); +- for (BloomFilter f : filters) { +- if (f.test(bytes)) { +- return true; +- } +- } +- +- return false; +- } +- +- public double getPrevFilterFalsePositiveEst() { +- if (filters.size() > 1) { +- return filters.get(1).getPredictedFalsePositiveRate(); +- } else { +- return filters.getFirst().getPredictedFalsePositiveRate(); +- } +- } +- +- public int getPrevFilterNumElements() { +- if (filters.size() > 1) { +- return filters.get(1).getUniqueObjectsStored(); +- } else { +- return filters.getFirst().getUniqueObjectsStored(); +- } +- } +- +- public void insert(int searchId, int searchValue) { +- byte[] bytes = bytesFromInts(searchId, searchValue); +- filters.getFirst().insert(bytes); +- } +- +- private void rotate() { +- +- if (filters.size() > 0) { +- BloomFilter prevFilter = filters.getFirst(); +- String str = ""Rotating bloom filter: objects="" + prevFilter.getUniqueObjectsStored() + "" predicted false positive rate="" + (100 * prevFilter.getPredictedFalsePositiveRate() + ""%""); +- logger.info(str); +- } +- currentFilterCreated = System.currentTimeMillis(); +- try { +- filters.addFirst(new BloomFilter(SIZE_IN_BITS, OBJECTS_TO_STORE)); +- } catch (NoSuchAlgorithmException e) { +- // TODO Auto-generated catch block +- e.printStackTrace(); +- } +- if (filters.size() > maxBuckets) { +- filters.removeLast(); +- } +- } +- +- public boolean rotateIfNeeded() { +- long currentFilterAge = System.currentTimeMillis() - currentFilterCreated; +- if (currentFilterAge > maxFilterAge) { +- rotate(); +- return true; +- } +- return false; +- } +- +- private static byte[] bytesFromInts(int int1, int int2) { +- byte[] bytes = new byte[8]; +- +- bytes[0] = (byte) (int1 >>> 24); +- bytes[1] = (byte) (int1 >>> 16); +- bytes[2] = (byte) (int1 >>> 8); +- bytes[3] = (byte) int1; +- +- bytes[4] = (byte) (int2 >>> 24); +- bytes[5] = (byte) (int2 >>> 16); +- bytes[6] = (byte) (int2 >>> 8); +- bytes[7] = (byte) int2; +- return bytes; +- } +- +- public static void main(String[] args) { +- OSF2FMain.getSingelton(); +- logger.setLevel(Level.FINE); +- Random rand = new Random(); +- +- RotatingBloomFilter bf = new RotatingBloomFilter(60 * 1000, 4); +- +- Set inserts = new HashSet(); +- for (int j = 0; j < 8; j++) { +- for (int i = 0; i < 20000; i++) { +- int r1 = rand.nextInt(); +- int r2 = rand.nextInt(); +- byte[] bytes = bytesFromInts(r1, r2); +- inserts.add(new String(Base64.encode(bytes))); +- bf.insert(r1, r2); +- if (!bf.contains(r1, r2)) { +- System.err.println(""insert failes (does not contain it anymore)""); +- } +- } +- bf.rotate(); +- } +- +- int fps = 0, to_check = 200000; +- for (int i = 0; i < to_check; i++) { +- int int1; +- int int2; +- byte[] bytes; +- do { +- int1 = rand.nextInt(); +- int2 = rand.nextInt(); +- bytes = bytesFromInts(int1, int2); +- } while (inserts.contains(new String(Base64.encode(bytes))) == true); +- if (bf.contains(int1, int2) == true) +- fps++; +- } +- +- System.out.println(""false positive check, "" + fps + ""/"" + to_check); +- +- System.out.println(""mem: "" + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory())); +- +- } +- +- } +- +- class SentSearch { +- private int responses = 0; +- private final OSF2FSearch search; +- +- private final long time; +- +- public SentSearch(OSF2FSearch search) { +- this.search = search; +- this.time = System.currentTimeMillis(); +- } +- +- public long getAge() { +- return System.currentTimeMillis() - this.time; +- } +- +- public int getResponseNum() { +- return responses; +- } +- +- public OSF2FSearch getSearch() { +- return search; +- } +- +- public void gotResponse() { +- responses++; +- } +- +- public boolean isTimedOut() { +- return getAge() > MAX_SEARCH_AGE; +- } +- +- } +- +- public interface TextSearchListener { +- public void searchResponseReceived(TextSearchResponseItem r); +- } +- +- class TextSearchManager { +- private final ConcurrentHashMap responses; +- private final ConcurrentHashMap listeners; +- +- public TextSearchManager() { +- responses = new ConcurrentHashMap(); +- listeners = new ConcurrentHashMap(); +- } +- +- public List getResults(int searchId) { +- TextSearchResponse resps = responses.get(searchId); +- +- HashMap result = new HashMap(); +- +- if (resps != null) { +- /* +- * group into file collections +- */ +- for (TextSearchResponseItem item : resps.getItems()) { +- for (FileCollection collection : item.getFileList().getElements()) { +- if (result.containsKey(collection.getUniqueID())) { +- TextSearchResult existing = result.get(collection.getUniqueID()); +- existing.merge(item, collection); +- } else { +- // mark stuff that we already have +- boolean alreadyInLibrary = true; +- GlobalManager globalManager = AzureusCoreImpl.getSingleton().getGlobalManager(); +- DownloadManager dm = globalManager.getDownloadManager(new HashWrapper(collection.getUniqueIdBytes())); +- if (dm == null) { +- alreadyInLibrary = false; +- } +- result.put(collection.getUniqueID(), new TextSearchResult(item, collection, alreadyInLibrary)); +- } +- } +- } +- +- // /* +- // * verify that we didn't get any bad data +- // */ +- // for (TextSearchResult item : result.values()) { +- // FileCollection collection = item.getCollection(); +- // String searchString = resps.getSearchString(); +- // boolean collectionMatch = collection.nameMatch(searchString); +- // +- // Set filteredFiles = new +- // HashSet(); +- // List allChildren = collection.getChildren(); +- // for (int i = 0; i < allChildren.size(); i++) { +- // FileListFile f = allChildren.get(i); +- // if (filteredFiles.contains(f)) { +- // continue; +- // } +- // if (collectionMatch) { +- // filteredFiles.add(f); +- // } else if (f.searchMatch(searchString)) { +- // filteredFiles.add(f); +- // } else { +- // logger.fine(""got search result that doesn't match search: "" + +- // f.getFileName() + "" ! "" + searchString); +- // } +- // } +- // logger.fine(collection.getName() + "" totalResp: "" + +- // allChildren.size() + "" afterFiler="" + filteredFiles.size()); +- // collection.setChildren(new +- // ArrayList(filteredFiles)); +- // } +- +- return new ArrayList(result.values()); +- } +- logger.fine(""no responses for searchId="" + searchId); +- return new ArrayList(); +- } +- +- public void gotSearchResponse(int searchId, Friend throughFriend, FileList fileList, int channelId, int connectionId) { +- TextSearchResponse r = responses.get(searchId); +- if (r != null) { +- long age = System.currentTimeMillis() - r.getTime(); +- TextSearchResponseItem item = new TextSearchResponseItem(throughFriend, fileList, age, channelId, connectionId); +- r.add(item); +- TextSearchListener listener = listeners.get(searchId); +- if (listener != null) { +- listener.searchResponseReceived(item); +- } +- } else { +- logger.warning(""got response for unknown search""); +- } +- } +- +- public void sentSearch(int searchId, String searchString, TextSearchListener listener) { +- responses.put(searchId, new TextSearchResponse(searchString)); +- if (listener != null) { +- listeners.put(searchId, listener); +- } +- } +- +- public void clearOldResponses() { +- for (Iterator iterator = responses.keySet().iterator(); iterator.hasNext();) { +- Integer key = iterator.next(); +- TextSearchResponse response = responses.get(key); +- if (System.currentTimeMillis() - response.getTime() > 10 * 60 * 1000) { +- iterator.remove(); +- listeners.remove(key); +- } +- +- } +- } +- } +- +- public boolean isSearchInBloomFilter(OSF2FSearch search) { +- lock.lock(); +- try { +- int searchID = search.getSearchID(); +- int valueID = search.getValueID(); +- if (recentSearches.contains(searchID, valueID)) { +- bloomSearchesBlockedCurr++; +- } +- } finally { +- lock.unlock(); +- } +- return false; +- } ++ public static final String SEARCH_QUEUE_THREAD_NAME = ""DelayedSearchQueue""; ++ ++ private final static BigFatLock lock = OverlayManager.lock; ++ private static Logger logger = Logger.getLogger(SearchManager.class.getName()); ++ // search sources are remembered for 1 minute, any replies after this will ++ // be dropped ++ public static final long MAX_SEARCH_AGE = 60 * 1000; ++ public static final int MAX_SEARCH_QUEUE_LENGTH = 100; ++ // private static final int MAX_SEARCH_RESP_BEFORE_CANCEL = ++ // COConfigurationManager.getIntParameter(""f2f_search_max_paths""); ++ ++ protected int mMaxSearchResponsesBeforeCancel = COConfigurationManager ++ .getIntParameter(""f2f_search_max_paths""); ++ ++ // don't respond if average torrent upload rate is less than 10K/s ++ private static final double NO_RESPONSE_TORRENT_AVERAGE_RATE = 10000; ++ ++ private static final double NO_RESPONSE_TOTAL_FRAC_OF_MAX_UPLOAD = 0.9; ++ ++ private static final double NO_RESPONSE_TRANSPORT_FRAC_OF_MAX_UPLOAD = 0.75; ++ /* ++ * this is to avoid searches living forever, search uid are remembered for ++ * 45min-1h, there are 4 bloom filter buckets that are rotating, each one ++ * containing 15minutes worth of searches ++ */ ++ private static final int RECENT_SEARCH_BUCKETS = 4; ++ ++ private static final long RECENT_SEARCH_MEMORY = 20 * 60 * 1000; ++ // static final int SEARCH_DELAY = ++ // COConfigurationManager.getIntParameter(""f2f_search_forward_delay""); ++ protected int mSearchDelay = COConfigurationManager.getIntParameter(""f2f_search_forward_delay""); ++ ++ /** ++ * This Map is protected by the BigFatLock: lock. We use this to drop ++ * searches from friends that are crowding the outgoing search queue early, ++ * thus allowing friends that send searches more rarely to get through. ++ * ++ * This map is emptied once every 60 seconds to deal with accounting errors ++ * that may accumulate. ++ */ ++ class MutableInteger { ++ public int v = 0; ++ } ++ ++ long lastSearchAccountingFlush = System.currentTimeMillis(); ++ private final Map searchesPerFriend = new HashMap(); ++ ++ private int bloomSearchesBlockedCurr = 0; ++ ++ private int bloomSearchesBlockedPrev = 0; ++ private int bloomSearchesSentCurr = 0; ++ private int bloomSearchesSentPrev = 0; ++ ++ private final HashMap canceledSearches; ++ private final DebugChannelSetupErrorStats debugChannelIdErrorSetupErrorStats = new DebugChannelSetupErrorStats(); ++ ++ private final DelayedSearchQueue delayedSearchQueue; ++ ++ // private final DeterministicDelayResponseQueue delayedResponseQueue; ++ ++ private final FileListManager filelistManager; ++ ++ private final HashMap forwardedSearches; ++ private int forwardedSearchNum = 0; ++ private List hashSearchStats = new LinkedList(); ++ ++ private boolean includeLanUploads; ++ private final double NO_FORWARD_FRAC_OF_MAX_UPLOAD = 0.9; ++ private final OverlayManager overlayManager; ++ ++ private final Random random = new Random(); ++ private final RandomnessManager randomnessManager; ++ ++ private int rateLimitInKBps; ++ ++ private final RotatingBloomFilter recentSearches; ++ private final HashMap sentSearches; ++ private final GlobalManagerStats stats; ++ private final TextSearchManager textSearchManager; ++ ++ private List textSearchStats = new LinkedList(); ++ ++ private final DelayedExecutor delayedExecutor; ++ ++ private String[] filteredKeywords = new String[0]; ++ ++ public SearchManager(OverlayManager overlayManager, FileListManager filelistManager, ++ RandomnessManager randomnessManager, GlobalManagerStats stats) { ++ this.stats = stats; ++ this.delayedExecutor = DelayedExecutorService.getInstance().getVariableDelayExecutor(); ++ // this.delayedResponseQueue = new DeterministicDelayResponseQueue(); ++ this.overlayManager = overlayManager; ++ this.sentSearches = new HashMap(); ++ this.forwardedSearches = new HashMap(); ++ this.canceledSearches = new HashMap(); ++ this.filelistManager = filelistManager; ++ this.randomnessManager = randomnessManager; ++ this.textSearchManager = new TextSearchManager(); ++ this.recentSearches = new RotatingBloomFilter(RECENT_SEARCH_MEMORY, RECENT_SEARCH_BUCKETS); ++ this.delayedSearchQueue = new DelayedSearchQueue(mSearchDelay); ++ COConfigurationManager.addAndFireParameterListeners(new String[] { ""LAN Speed Enabled"", ++ ""Max Upload Speed KBs"", ""oneswarm.search.filter.keywords"", ""f2f_search_max_paths"", ++ ""f2f_search_forward_delay"" }, new ParameterListener() { ++ public void parameterChanged(String parameterName) { ++ includeLanUploads = !COConfigurationManager ++ .getBooleanParameter(""LAN Speed Enabled""); ++ rateLimitInKBps = COConfigurationManager.getIntParameter(""Max Upload Speed KBs""); ++ ++ StringList keywords = COConfigurationManager ++ .getStringListParameter(""oneswarm.search.filter.keywords""); ++ if (keywords != null) { ++ String[] neu = new String[keywords.size()]; ++ for (int i = 0; i < keywords.size(); i++) { ++ String firstTok = (new StringTokenizer(keywords.get(i))).nextToken(); ++ neu[i] = firstTok; ++ } ++ filteredKeywords = neu; ++ logger.fine(""Updated filtered keywords "" + keywords.size()); ++ } ++ ++ mMaxSearchResponsesBeforeCancel = COConfigurationManager ++ .getIntParameter(""f2f_search_max_paths""); ++ ++ mSearchDelay = COConfigurationManager.getIntParameter(""f2f_search_forward_delay""); ++ delayedSearchQueue.setDelay(mSearchDelay); ++ } ++ }); ++ } ++ ++ private boolean canForwardSearch() { ++ double util = fracUpload(); ++ if (util == -1 || util < NO_FORWARD_FRAC_OF_MAX_UPLOAD) { ++ return true; ++ } else { ++ logger.finest(""not forwarding search (overloaded, util="" + util + "")""); ++ return false; ++ } + } + -+ @Test(expected=NullPointerException.class) public void testConstructor2NullArg() { -+ Graph g = new SparseUndirectedGraph((Graph>)null); -+ } ++ private boolean canRespondToSearch() { ++ double totalUtil = fracUpload(); ++ if (totalUtil == -1) { ++ return true; ++ } ++ // ok, check if we are using more than 90% of total ++ if (totalUtil < NO_RESPONSE_TOTAL_FRAC_OF_MAX_UPLOAD) { ++ return true; ++ } ++ double transUtil = fracTransportUpload(); ++ // check if we are using more than 75% for transports ++ if (transUtil < NO_RESPONSE_TRANSPORT_FRAC_OF_MAX_UPLOAD) { ++ return true; ++ } ++ ++ double torrentAvgSpeed = getAverageUploadPerRunningTorrent(); ++ if (torrentAvgSpeed == -1) { ++ return true; ++ } ++ if (torrentAvgSpeed > NO_RESPONSE_TORRENT_AVERAGE_RATE) { ++ return true; ++ } ++ if (logger.isLoggable(Level.FINER)) { ++ logger.finer(""not responding to search (overloaded, util="" + transUtil + "")""); ++ } ++ return false; ++ } ++ ++ public void clearTimedOutSearches() { ++ lock.lock(); ++ try { ++ /* ++ * check if we need to rotate the bloom filter of recent searches ++ */ ++ boolean rotated = recentSearches.rotateIfNeeded(); ++ if (rotated) { ++ bloomSearchesBlockedPrev = bloomSearchesBlockedCurr; ++ bloomSearchesBlockedCurr = 0; ++ bloomSearchesSentPrev = bloomSearchesSentCurr; ++ bloomSearchesSentCurr = 0; ++ } ++ ++ for (Iterator iterator = forwardedSearches.values().iterator(); iterator ++ .hasNext();) { ++ ForwardedSearch fs = iterator.next(); ++ if (fs.isTimedOut()) { ++ iterator.remove(); ++ } ++ } ++ ++ for (Iterator iterator = sentSearches.values().iterator(); iterator ++ .hasNext();) { ++ SentSearch sentSearch = iterator.next(); ++ if (sentSearch.isTimedOut()) { ++ iterator.remove(); ++ if (sentSearch.getSearch() instanceof OSF2FHashSearch) { ++ hashSearchStats.add(sentSearch.getResponseNum()); ++ } else if (sentSearch.getSearch() instanceof OSF2FTextSearch) { ++ textSearchStats.add(sentSearch.getResponseNum()); ++ } ++ } ++ } ++ ++ /* ++ * Delete any expired canceled searches ++ */ ++ LinkedList toDelete = new LinkedList(); ++ for (Integer key : canceledSearches.keySet()) { ++ long age = System.currentTimeMillis() - canceledSearches.get(key); ++ if (age > MAX_SEARCH_AGE) { ++ toDelete.add(key); ++ } ++ } ++ ++ for (Integer key : toDelete) { ++ canceledSearches.remove(key); ++ } ++ ++ textSearchManager.clearOldResponses(); ++ } finally { ++ lock.unlock(); ++ } ++ } ++ ++ public List debugCanceledSearches() { ++ List l = new LinkedList(); ++ lock.lock(); ++ try { ++ for (Integer s : canceledSearches.keySet()) { ++ l.add(""search="" + Integer.toHexString(s) + "" age="" ++ + ((System.currentTimeMillis() - canceledSearches.get(s)) / 1000) + ""s""); ++ } ++ } finally { ++ lock.unlock(); ++ } ++ return l; ++ } ++ ++ public List debugForwardedSearches() { ++ List l = new LinkedList(); ++ lock.lock(); ++ try { ++ for (ForwardedSearch f : forwardedSearches.values()) { ++ l.add(""search="" + Integer.toHexString(f.getSearchId()) + "" responses="" ++ + f.getResponseNum() + "" age="" + (f.getAge() / 1000) + ""s""); ++ } ++ } finally { ++ lock.unlock(); ++ } ++ return l; ++ } ++ ++ public List debugSentSearches() { ++ List l = new LinkedList(); ++ lock.lock(); ++ try { ++ for (SentSearch s : sentSearches.values()) { ++ l.add(""search="" + Integer.toHexString(s.getSearch().getSearchID()) + "" responses="" ++ + s.getResponseNum() + "" age="" + (s.getAge() / 1000) + ""s""); ++ } ++ } finally { ++ lock.unlock(); ++ } ++ return l; ++ } ++ ++ private void forwardSearch(FriendConnection source, OSF2FSearch search) { ++ lock.lock(); ++ try { ++ ++ // check if search is canceled or forwarded first ++ int searchID = search.getSearchID(); ++ if (forwardedSearches.containsKey(searchID)) { ++ logger.finest(""not forwarding search, already forwarded. id: "" + searchID); ++ return; ++ } ++ ++ if (canceledSearches.containsKey(searchID)) { ++ logger.finest(""not forwarding search, cancel received. id: "" + searchID); ++ return; ++ } ++ ++ int valueID = search.getValueID(); ++ if (recentSearches.contains(searchID, valueID)) { ++ bloomSearchesBlockedCurr++; ++ logger.finest(""not forwarding search, in recent filter. id: "" + searchID); ++ return; ++ } ++ bloomSearchesSentCurr++; ++ forwardedSearchNum++; ++ if (logger.isLoggable(Level.FINEST)) { ++ logger.finest(""forwarding search "" + search.getDescription() + "" id: "" + searchID); ++ } ++ forwardedSearches.put(searchID, new ForwardedSearch(source, search)); ++ recentSearches.insert(searchID, valueID); ++ } finally { ++ lock.unlock(); ++ } ++ ++ overlayManager.forwardSearchOrCancel(source, search.clone()); ++ } ++ ++ private double fracTransportUpload() { ++ ++ if (rateLimitInKBps < 1) { ++ return -1; ++ } ++ long uploadRate = overlayManager.getTransportSendRate(includeLanUploads); ++ ++ double util = uploadRate / (rateLimitInKBps * 1024.0); ++ return util; ++ } ++ ++ private double fracUpload() { ++ ++ if (rateLimitInKBps < 1) { ++ return -1; ++ } ++ long uploadRate; ++ if (!includeLanUploads) { ++ uploadRate = stats.getProtocolSendRateNoLAN() + stats.getDataSendRateNoLAN(); ++ } else { ++ uploadRate = stats.getProtocolSendRate() + stats.getDataSendRate(); ++ } ++ ++ double util = uploadRate / (rateLimitInKBps * 1024.0); ++ return util; ++ } ++ ++ public int getAndClearForwardedSearchNum() { ++ lock.lock(); ++ try { ++ int ret = forwardedSearchNum; ++ forwardedSearchNum = 0; ++ return ret; ++ } finally { ++ lock.unlock(); ++ } ++ } ++ ++ public List getAndClearHashSearchStats() { ++ lock.lock(); ++ try { ++ List ret = hashSearchStats; ++ hashSearchStats = new LinkedList(); ++ return ret; ++ } finally { ++ lock.unlock(); ++ } ++ } ++ ++ public List getAndClearTextSearchStats() { ++ lock.lock(); ++ try { ++ List ret = textSearchStats; ++ textSearchStats = new LinkedList(); ++ return ret; ++ } finally { ++ lock.unlock(); ++ } ++ } ++ ++ @SuppressWarnings(""unchecked"") ++ private double getAverageUploadPerRunningTorrent() { ++ LinkedList dms = new LinkedList(); ++ final List downloadManagers = AzureusCoreImpl.getSingleton() ++ .getGlobalManager().getDownloadManagers(); ++ dms.addAll(downloadManagers); ++ ++ long total = 0; ++ int num = 0; ++ ++ for (DownloadManager dm : dms) { ++ final DownloadManagerStats s = dm.getStats(); ++ if (s == null) { ++ continue; ++ } ++ final PEPeerManager p = dm.getPeerManager(); ++ if (p == null) { ++ continue; ++ } ++ ++ if (p.getNbPeers() == 0 && p.getNbSeeds() == 0) { ++ continue; ++ } ++ ++ long uploadRate = s.getDataSendRate() + s.getProtocolSendRate(); ++ total += uploadRate; ++ num++; ++ } ++ if (num == 0) { ++ return -1; ++ } ++ ++ return ((double) total) / num; ++ ++ } ++ ++ public String getSearchDebug() { ++ StringBuilder b = new StringBuilder(); ++ b.append(""total_frac="" + fracUpload() + ""\ntransport_frac="" + fracTransportUpload() ++ + ""\ntorrent_avg="" + getAverageUploadPerRunningTorrent()); ++ b.append(""\ncan forward="" + canForwardSearch()); ++ b.append(""\ncan respond="" + canRespondToSearch()); ++ ++ b.append(""\n\nforwarded searches size="" + forwardedSearches.size() + "" canceled size="" ++ + canceledSearches.size() + "" sent size="" + sentSearches.size()); ++ b.append(""\nbloom: stored="" + recentSearches.getPrevFilterNumElements() ++ + "" est false positives="" ++ + (100 * recentSearches.getPrevFilterFalsePositiveEst() + ""%"")); ++ b.append(""\nbloom blocked|sent curr="" + bloomSearchesBlockedCurr + ""|"" ++ + bloomSearchesSentCurr + "" prev="" + bloomSearchesBlockedPrev + ""|"" ++ + bloomSearchesSentPrev); ++ b.append(""\n\n"" + debugChannelIdErrorSetupErrorStats.getDebugStats()); ++ ++ long sum = 0, now = System.currentTimeMillis(), count = 0; ++ ++ // Include per-friend queue stats ++ lock.lock(); ++ try { ++ Map counts = new HashMap(); ++ for (DelayedSearchQueueEntry e : delayedSearchQueue.queuedSearches.values()) { ++ ++ count++; ++ sum += (now - e.insertionTime); ++ ++ String nick = e.source.getRemoteFriend().getNick(); ++ if (counts.containsKey(nick) == false) { ++ counts.put(nick, new MutableInteger()); ++ } ++ counts.get(nick).v++; ++ } ++ for (String nick : counts.keySet()) { ++ b.append(""\n\t"" + nick + "" -> "" + counts.get(nick).v); ++ } ++ b.append(""\n\nQueue size: "" + delayedSearchQueue.queuedSearches.size()); ++ } finally { ++ lock.unlock(); ++ } ++ ++ b.append(""\nAverage queued search delay: "" + (double) sum / (double) count); ++ ++ return b.toString(); ++ } ++ ++ public List getSearchResult(int searchId) { ++ return textSearchManager.getResults(searchId); ++ } ++ ++ private boolean handleHashSearch(final FriendConnection source, final OSF2FHashSearch msg) { ++ ++ // we might actually have this data ++ final byte[] infohash = filelistManager.getMetainfoHash(msg.getInfohashhash()); ++ ++ // Check if this is a service ++ SharedService service = ServiceSharingManager.getInstance().handleSearch(msg); ++ if (service != null) { ++ try { ++ // TODO: support artificial delays and merge with normal search ++ // handling code ++ final int newChannelId = random.nextInt(); ++ final int transportFakePathId = random.nextInt(); ++ final int pathID = randomnessManager.getDeterministicRandomInt((int) msg ++ .getInfohashhash()); ++ final OSF2FHashSearchResp response = new OSF2FHashSearchResp( ++ OSF2FMessage.CURRENT_VERSION, msg.getSearchID(), newChannelId, pathID); ++ ++ ServiceConnection conn = new ServiceConnection(service, source, newChannelId, ++ transportFakePathId, true); ++ // register it with the friendConnection ++ source.registerOverlayTransport(conn); ++ // send the channel setup message ++ source.sendChannelSetup(response, false); ++ } catch (OverlayRegistrationError e) { ++ Debug.out(""got an error when registering incoming transport to '"" ++ + source.getRemoteFriend().getNick() + ""': "" + e.message); ++ } ++ return false; ++ } else if (infohash != null) { ++ DownloadManager dm = AzureusCoreImpl.getSingleton().getGlobalManager() ++ .getDownloadManager(new HashWrapper(infohash)); ++ ++ if (dm != null) { ++ logger.fine(""found match: "" + new String(Base64.encode(infohash))); ++ ++ // check if the torrent allow osf2f search peers ++ boolean allowed = OverlayTransport.checkOSF2FAllowed(dm.getDownloadState() ++ .getPeerSources(), dm.getDownloadState().getNetworks()); ++ if (!allowed) { ++ logger.warning(""got search match for torrent "" ++ + ""that does not allow osf2f peers""); ++ return true; ++ } ++ ++ boolean completedOrDownloading = FileListManager.completedOrDownloading(dm); ++ if (!completedOrDownloading) { ++ return true; ++ } ++ ++ // check if we have the capacity to respond ++ if (canRespondToSearch() == false) { ++ return false; ++ } ++ ++ // yeah, we actually have this stuff and we have spare capacity ++ // create an overlay transport ++ final int newChannelId = random.nextInt(); ++ final int transportFakePathId = random.nextInt(); ++ // set the path id for the overlay transport for something ++ // random (since otherwise all transports for this infohash will ++ // get the same pathid, which will limit it to be only one. The ++ // path id set in the channel setup message will be ++ // deterministic. It is the responsibility of the source to ++ // monitor for duplicate paths ++ ++ // set the path id to something that will persist between ++ // searches, for example a deterministic random seeded with ++ // the infohashhash ++ final int pathID = randomnessManager.getDeterministicRandomInt((int) msg ++ .getInfohashhash()); ++ ++ // get the delay for this overlaytranport, that is the latency ++ // component of the delay ++ final int overlayDelay = overlayManager.getLatencyDelayForInfohash( ++ source.getRemoteFriend(), infohash); ++ ++ TimerTask task = new TimerTask() { ++ @Override ++ public void run() { ++ try { ++ /* ++ * check if the search got canceled while we were ++ * sleeping ++ */ ++ if (!isSearchCanceled(msg.getSearchID())) { ++ final OSF2FHashSearchResp response = new OSF2FHashSearchResp( ++ OSF2FMessage.CURRENT_VERSION, msg.getSearchID(), ++ newChannelId, pathID); ++ ++ final OverlayTransport transp = new OverlayTransport(source, ++ newChannelId, infohash, transportFakePathId, false, ++ overlayDelay); ++ // register it with the friendConnection ++ source.registerOverlayTransport(transp); ++ // send the channel setup message ++ source.sendChannelSetup(response, false); ++ } ++ } catch (OverlayRegistrationError e) { ++ Debug.out(""got an error when registering incoming transport to '"" ++ + source.getRemoteFriend().getNick() + ""': "" + e.message); ++ } ++ } ++ ++ }; ++ delayedExecutor.queue(overlayDelay, task); ++ ++ // we are still forwarding if there are files in the torrent ++ // that we chose not to download ++ DiskManagerFileInfo[] diskManagerFileInfo = dm.getDiskManagerFileInfo(); ++ for (DiskManagerFileInfo d : diskManagerFileInfo) { ++ if (d.isSkipped()) { ++ return true; ++ } ++ } ++ /* ++ * ok, we shouldn't forward this, already sent a hash response ++ * and we have/are downloading all the files ++ */ ++ return false; ++ } ++ } ++ ++ return true; ++ } ++ ++ private boolean isSearchCanceled(int searchId) { ++ boolean canceled = false; ++ lock.lock(); ++ try { ++ if (canceledSearches.containsKey(searchId)) { ++ canceled = true; ++ } ++ } finally { ++ lock.unlock(); ++ } ++ return canceled; ++ } ++ ++ /** ++ * Returns the probability of rejecting a search from this friend given the ++ * share of the overall queue ++ */ ++ public double getFriendSearchDropProbability(Friend inFriend) { ++ ++ lock.lock(); ++ try { ++ ++ // Always accept if we don't have any searches from friend. ++ if (searchesPerFriend.get(inFriend) == null) { ++ return 0; ++ } ++ ++ // Reject proportionally to recent rate. Do not admit more than ++ // X/sec. ++ // Also, proportional to processing queue size. ++ double rateBound = delayedSearchQueue.searchCount / 80.0; ++ double queueBound = (double) delayedSearchQueue.queuedSearches.size() ++ / (double) MAX_SEARCH_QUEUE_LENGTH; ++ ++ return Math.max(rateBound, queueBound); ++ ++ } finally { ++ lock.unlock(); ++ } ++ } ++ ++ private void handleIncomingHashSearchResponse(OSF2FHashSearch hashSearch, ++ FriendConnection source, OSF2FHashSearchResp msg) { ++ // great, we found someone that has what we searched for! ++ // create the overlay transport ++ byte[] infoHash = filelistManager.getMetainfoHash(hashSearch.getInfohashhash()); ++ if (infoHash == null) { ++ logger.warning(""got channel setup request, "" + ""but the infohash we searched for "" ++ + ""is not in filelistmananger""); ++ return; ++ } ++ ++ DownloadManager dm = AzureusCoreImpl.getSingleton().getGlobalManager() ++ .getDownloadManager(new HashWrapper(infoHash)); ++ if (dm == null) { ++ logger.warning(""got channel setup request, "" + ""but the downloadmanager is null""); ++ return; ++ } ++ ++ if (source.hasRegisteredPath(msg.getPathID())) { ++ logger.finer(""got channel setup response, "" ++ + ""but path is already used: sending back a reset""); ++ source.sendChannelRst(new OSF2FChannelReset(OSF2FMessage.CURRENT_VERSION, msg ++ .getChannelID())); ++ return; ++ } ++ ++ OverlayTransport overlayTransport = new OverlayTransport(source, msg.getChannelID(), ++ infoHash, msg.getPathID(), true, overlayManager.getLatencyDelayForInfohash( ++ source.getRemoteFriend(), infoHash)); ++ // register it with the friendConnection ++ try { ++ source.registerOverlayTransport(overlayTransport); ++ // safe to start it since we know that the other party is interested ++ overlayTransport.start(); ++ } catch (OverlayRegistrationError e) { ++ Debug.out(""got an error when registering outgoing transport: "" + e.message); ++ return; ++ } ++ ++ } ++ ++ public void handleIncomingSearch(FriendConnection source, OSF2FSearch msg) { ++ lock.lock(); ++ try { ++ logger.finest(""got search: "" + msg.getDescription()); ++ // first, check if we either sent or forwarded this search before ++ if (forwardedSearches.containsKey(msg.getSearchID()) ++ || sentSearches.containsKey(msg.getSearchID()) ++ || delayedSearchQueue.isQueued(msg)) { ++ return; ++ } ++ } finally { ++ lock.unlock(); ++ } ++ ++ boolean shouldForward = true; ++ // second, check if we actually can do something about this ++ if (msg instanceof OSF2FHashSearch) { ++ shouldForward = handleHashSearch(source, (OSF2FHashSearch) msg); ++ } else if (msg instanceof OSF2FTextSearch) { ++ shouldForward = handleTextSearch(source, (OSF2FTextSearch) msg); ++ } else { ++ logger.warning(""received unrecgonized search type: "" + msg.getID() + "" / "" ++ + msg.getClass().getCanonicalName()); ++ } ++ ++ /* ++ * check if we are at full capacity ++ */ ++ if (canForwardSearch() == false) { ++ shouldForward = false; ++ } ++ ++ if (shouldForward) { ++ // ok, seems like we should attempt to forward this, put it in ++ // the queue ++ delayedSearchQueue.add(source, msg); ++ } ++ ++ } ++ ++ public void handleIncomingSearchCancel(FriendConnection source, OSF2FSearchCancel msg) { ++ ++ boolean forward = false; ++ lock.lock(); ++ try { ++ ++ /* ++ * if this is the first time we see the cancel, check if we ++ * forwarded this search, if we did, send a cancel ++ */ ++ if (!canceledSearches.containsKey(msg.getSearchID())) { ++ canceledSearches.put(msg.getSearchID(), System.currentTimeMillis()); ++ /* ++ * we only forward the cancel if we already sent the search ++ */ ++ if (forwardedSearches.containsKey(msg.getSearchID())) { ++ forward = true; ++ } else { ++ logger.fine(""got search cancel for unknown search id""); ++ } ++ } ++ } finally { ++ lock.unlock(); ++ } ++ if (forward) { ++ overlayManager.forwardSearchOrCancel(source, msg); ++ } ++ } ++ ++ /** ++ * There are 2 possible explanations for getting a search response, either ++ * we got a response for a search we sent ourselves, or we got a response ++ * for a search we forwarded ++ * ++ * @param source ++ * connection from where we got the setup ++ * @param msg ++ * the channel setup message ++ */ ++ public void handleIncomingSearchResponse(FriendConnection source, OSF2FSearchResp msg) { ++ SentSearch sentSearch; ++ lock.lock(); ++ try { ++ sentSearch = sentSearches.get(msg.getSearchID()); ++ } finally { ++ lock.unlock(); ++ } ++ // first, if might be a search we sent ++ if (sentSearch != null) { ++ logger.finest(""got response to search: "" + sentSearch.getSearch().getDescription()); ++ OSF2FSearch search = sentSearch.getSearch(); ++ // update response stats ++ sentSearch.gotResponse(); ++ /* ++ * check if we got enough search responses to cancel this search ++ * ++ * we will still use the data, even if the search is canceled. I ++ * mean, since it already made it here why not use it... ++ */ ++ if (sentSearch.getResponseNum() > mMaxSearchResponsesBeforeCancel) { ++ /* ++ * only send a cancel message once ++ */ ++ boolean sendCancel = false; ++ lock.lock(); ++ try { ++ if (!canceledSearches.containsKey(msg.getSearchID())) { ++ canceledSearches.put(msg.getSearchID(), System.currentTimeMillis()); ++ logger.finer(""canceling search "" + msg); ++ sendCancel = true; ++ } ++ } finally { ++ lock.unlock(); ++ } ++ if (sendCancel) { ++ overlayManager.sendSearchOrCancel(new OSF2FSearchCancel( ++ OSF2FMessage.CURRENT_VERSION, msg.getSearchID()), true, false); ++ } ++ } ++ if (search instanceof OSF2FHashSearch) { ++ // ok, it was a hash search that we sent ++ handleIncomingHashSearchResponse((OSF2FHashSearch) search, source, ++ (OSF2FHashSearchResp) msg); ++ } else if (search instanceof OSF2FTextSearch) { ++ // this was from a text search we sent ++ FileList fileList; ++ try { ++ OSF2FTextSearchResp textSearchResp = (OSF2FTextSearchResp) msg; ++ fileList = FileListManager.decode_basic(textSearchResp.getFileList()); ++ ++ textSearchManager.gotSearchResponse(search.getSearchID(), ++ source.getRemoteFriend(), fileList, textSearchResp.getChannelID(), ++ source.hashCode()); ++ ++ logger.fine(""results so far:""); ++ List res = getSearchResult(search.getSearchID()); ++ for (TextSearchResult textSearchResult : res) { ++ logger.fine(textSearchResult.toString()); ++ } ++ } catch (IOException e) { ++ logger.warning(""got malformed search response""); ++ } ++ } else { ++ logger.warning(""unknown search response type""); ++ } ++ } ++ // sentsearch == null ++ else { ++ // ok, this is for a search we forwarded ++ ForwardedSearch search; ++ lock.lock(); ++ try { ++ search = forwardedSearches.get(msg.getSearchID()); ++ if (search == null) { ++ logger.warning(""got response for unknown search:"" + source + "":"" ++ + msg.getDescription()); ++ return; ++ } ++ ++ logger.finest(""got response to forwarded search: "" ++ + search.getSearch().getDescription()); ++ ++ if (canceledSearches.containsKey(msg.getSearchID())) { ++ logger.finer(""not forwarding search, it is already canceled, "" ++ + msg.getSearchID()); ++ return; ++ } ++ } finally { ++ lock.unlock(); ++ } ++ ++ FriendConnection searcher = search.getSource(); ++ FriendConnection responder = source; + -+ @Test public void testAdd() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ assertTrue(g.add(0)); -+ assertEquals(1, g.order()); -+ assertTrue(g.contains(0)); -+ // second add should have no effect -+ assertFalse(g.add(0)); -+ assertEquals(1, g.order()); -+ assertTrue(g.contains(0)); ++ if (search.getResponseNum() > mMaxSearchResponsesBeforeCancel) { ++ /* ++ * we really shouldn't cancel other peoples searches, but if ++ * they don't do it we have to ++ */ ++ lock.lock(); ++ try { ++ canceledSearches.put(msg.getSearchID(), System.currentTimeMillis()); ++ } finally { ++ lock.unlock(); ++ } ++ logger.finest(""Sending cancel for someone elses search!, searcher="" ++ + searcher.getRemoteFriend() + "" responder="" + responder.getRemoteFriend() ++ + "":\t"" + search); ++ overlayManager.forwardSearchOrCancel(source, new OSF2FSearchCancel( ++ OSF2FMessage.CURRENT_VERSION, msg.getSearchID())); ++ } else { ++ search.gotResponse(); ++ // register the forwarding ++ logger.finest(""registering overlay forward: "" ++ + searcher.getRemoteFriend().getNick() + ""<->"" ++ + responder.getRemoteFriend().getNick()); ++ try { ++ responder.registerOverlayForward(msg, searcher, search.getSearch(), false); ++ searcher.registerOverlayForward(msg, responder, search.getSearch(), true); ++ } catch (FriendConnection.OverlayRegistrationError e) { ++ String direction = ""'"" + responder.getRemoteFriend().getNick() + ""'->'"" ++ + searcher.getRemoteFriend().getNick() + ""'""; ++ e.direction = direction; ++ e.setupMessageSource = responder.getRemoteFriend().getNick(); ++ logger.warning(""not forwarding overlay setup request "" + direction + e.message); ++ debugChannelIdErrorSetupErrorStats.add(e); ++ return; ++ } ++ ++ // and send out the search ++ if (msg instanceof OSF2FHashSearchResp) { ++ searcher.sendChannelSetup((OSF2FHashSearchResp) msg.clone(), true); ++ } else if (msg instanceof OSF2FTextSearchResp) { ++ searcher.sendTextSearchResp((OSF2FTextSearchResp) msg.clone(), true); ++ } else { ++ Debug.out(""got unknown message: "" + msg.getDescription()); ++ } ++ } ++ } + -+ assertTrue(g.add(1)); -+ assertEquals(2, g.order()); -+ assertTrue(g.contains(1)); + } + -+ @Test public void testEquals() { -+ DirectedMultigraph g1 = new DirectedMultigraph(); -+ DirectedMultigraph g2 = new DirectedMultigraph(); -+ for (int i = 0; i < 10; ++i) { -+ for (int j = i + 1; j < 10; ++j) { -+ g1.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); -+ g2.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); ++ /** ++ * ++ * @param source ++ * @param msg ++ * @return ++ */ ++ private boolean handleTextSearch(final FriendConnection source, final OSF2FTextSearch msg) { ++ ++ boolean shouldForward = true; ++ ++ if (logger.isLoggable(Level.FINER)) { ++ logger.finer(""handleTextSearch: "" + msg.getSearchString() + "" from "" ++ + source.getRemoteFriend().getNick()); ++ } ++ ++ String searchString = msg.getSearchString(); ++ ++ // common case is no filtering. ++ if (filteredKeywords.length > 0) { ++ StringTokenizer toks = new StringTokenizer(searchString); ++ ++ for (String filter : filteredKeywords) { ++ if (searchString.contains(filter)) { ++ logger.fine(""Blocking search due to filter: "" + searchString + "" matched by: "" ++ + filter); ++ return false; ++ } + } + } -+ assertEquals(g1, g2); + -+ g1 = new DirectedMultigraph(); -+ g2 = new DirectedMultigraph(); -+ for (int i = 0; i < 10; ++i) { -+ for (int j = i + 1; j < 10; ++j) { -+ g1.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); -+ g2.add(new SimpleDirectedTypedEdge(""type-1"",j, i)); ++ List results = filelistManager.handleSearch(source.getRemoteFriend(), ++ searchString); ++ ++ if (results.size() > 0) { ++ if (canRespondToSearch()) { ++ logger.finer(""found matches: "" + results.size()); ++ // long fileListSize = results.getFileNum(); ++ ++ List delayedExecutionTasks = new LinkedList(); ++ long time = System.currentTimeMillis(); ++ for (FileCollection c : results) { ++ // send back a response ++ int channelId = random.nextInt(); ++ LinkedList list = new LinkedList(); ++ list.add(c); ++ byte[] encoded = FileListManager.encode_basic(new FileList(list), false); ++ ++ final OSF2FTextSearchResp resp = new OSF2FTextSearchResp( ++ OSF2FMessage.CURRENT_VERSION, OSF2FMessage.FILE_LIST_TYPE_PARTIAL, ++ msg.getSearchID(), channelId, encoded); ++ int delay = overlayManager.getSearchDelayForInfohash(source.getRemoteFriend(), ++ c.getUniqueIdBytes()); ++ delayedExecutionTasks.add(new DelayedExecutionEntry(time + delay, 0, ++ new TimerTask() { ++ @Override ++ public void run() { ++ /* ++ * check if the search got canceled while we ++ * were sleeping ++ */ ++ if (!isSearchCanceled(msg.getSearchID())) { ++ source.sendTextSearchResp(resp, false); ++ } ++ } ++ })); ++ } ++ delayedExecutor.queue(delayedExecutionTasks); ++ ++ } else { ++ // not enough capacity :-( ++ shouldForward = false; + } + } + -+ assertFalse(g1.equals(g2)); -+ assertFalse(g2.equals(g1)); ++ return shouldForward; + } + -+ @Test public void testEqualGeneric() { -+ DirectedMultigraph g1 = new DirectedMultigraph(); -+ Graph> g2 = new GenericGraph>(); -+ for (int i = 0; i < 10; ++i) { -+ for (int j = i + 1; j < 10; ++j) { -+ g1.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); -+ g2.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); ++ public void sendDirectedHashSearch(FriendConnection target, byte[] infoHash) { ++ ++ long metainfohashhash = filelistManager.getInfoHashhash(infoHash); ++ ++ int newSearchId = 0; ++ while (newSearchId == 0) { ++ newSearchId = random.nextInt(); ++ } ++ OSF2FHashSearch search = new OSF2FHashSearch(OSF2FMessage.CURRENT_VERSION, newSearchId, ++ metainfohashhash); ++ lock.lock(); ++ try { ++ sentSearches.put(newSearchId, new SentSearch(search)); ++ } finally { ++ lock.unlock(); ++ } ++ overlayManager.sendDirectedSearch(target, search); ++ ++ } ++ ++ public long getInfoHashHashFromSearchId(int searchId) { ++ lock.lock(); ++ try { ++ SentSearch sentSearch = sentSearches.get(searchId); ++ if (sentSearch != null && sentSearch.search instanceof OSF2FHashSearch) { ++ return ((OSF2FHashSearch) sentSearch.search).getInfohashhash(); + } ++ } finally { ++ lock.unlock(); + } -+ assertEquals(g1, g2); ++ return -1; + } + -+ @Test public void testContainsEdge() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ for (int i = 0; i < 100; ++i) -+ for (int j = i + 1; j < 100; ++j) -+ g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); ++ public void sendHashSearch(byte[] infoHash) { ++ long metainfohashhash = filelistManager.getInfoHashhash(infoHash); + -+ for (int i = 0; i < 100; ++i) { -+ for (int j = i + 1; j < 100; ++j) { -+ g.contains(new SimpleDirectedTypedEdge(""type-1"",i, j)); -+ g.contains(new SimpleDirectedTypedEdge(""type-1"",j, i)); -+ g.contains(i, j); -+ g.contains(j, i); -+ } ++ int newSearchId = 0; ++ while (newSearchId == 0) { ++ newSearchId = random.nextInt(); + } ++ OSF2FSearch search = new OSF2FHashSearch(OSF2FMessage.CURRENT_VERSION, newSearchId, ++ metainfohashhash); ++ ++ // these should go in the slow (forward) path so to route around slow ++ // nodes ++ sendSearch(newSearchId, search, false); + } + -+ @Test public void testAddEdge() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ assertTrue(g.add(new SimpleDirectedTypedEdge(""type-1"",0, 1))); -+ assertEquals(2, g.order()); -+ assertEquals(1, g.size()); -+ assertTrue(g.contains(new SimpleDirectedTypedEdge(""type-1"",0, 1))); ++ private void sendSearch(int newSearchId, OSF2FSearch search, boolean skipQueue) { ++ lock.lock(); ++ try { ++ sentSearches.put(newSearchId, new SentSearch(search)); ++ } finally { ++ lock.unlock(); ++ } ++ overlayManager.sendSearchOrCancel(search, skipQueue, false); ++ } + -+ g.add(new SimpleDirectedTypedEdge(""type-1"",0, 2)); -+ assertEquals(3, g.order()); -+ assertEquals(2, g.size()); -+ assertTrue(g.contains(new SimpleDirectedTypedEdge(""type-1"",0, 2))); ++ public int sendTextSearch(String searchString, TextSearchListener listener) { ++ int newSearchId = 0; ++ while (newSearchId == 0) { ++ newSearchId = random.nextInt(); ++ } + -+ g.add(new SimpleDirectedTypedEdge(""type-1"",3, 4)); -+ assertEquals(5, g.order()); -+ assertEquals(3, g.size()); -+ assertTrue(g.contains(new SimpleDirectedTypedEdge(""type-1"",3, 4))); ++ if (FileCollection.containsKeyword(searchString)) { ++ searchString = searchString.replaceAll("":"", "";""); ++ searchString = handleKeyWords(searchString); ++ } ++ ++ OSF2FSearch search = new OSF2FTextSearch(OSF2FMessage.CURRENT_VERSION, ++ OSF2FMessage.FILE_LIST_TYPE_PARTIAL, newSearchId, searchString); ++ textSearchManager.sentSearch(newSearchId, searchString, listener); ++ sendSearch(newSearchId, search, false); ++ return newSearchId; + } + -+ @Test public void testRemoveLesserVertexWithEdges() { -+ DirectedMultigraph g = new DirectedMultigraph(); ++ private static String handleKeyWords(String searchString) { ++ searchString = FileCollection.removeWhiteSpaceAfteKeyChars(searchString); ++ String[] interestingKeyWords = new String[] { ""id"", ""sha1"", ""ed2k"" }; ++ int[] interestingKeyWordExectedKeyLen = { 20, 20, 16 }; ++ StringBuilder b = new StringBuilder(); ++ String[] split = searchString.split("" ""); ++ for (String s : split) { ++ // check for id ++ String toAdd = s; ++ for (int i = 0; i < interestingKeyWords.length; i++) { ++ String fromId = convertToBase64(s, interestingKeyWords[i], ++ interestingKeyWordExectedKeyLen[i]); ++ if (fromId != null) { ++ toAdd = fromId; ++ } ++ } ++ b.append(toAdd + "" ""); ++ if (!toAdd.equals(s)) { ++ logger.fine(""converted search: "" + s + ""->"" + toAdd); ++ } ++ } ++ return b.toString().trim(); ++ } + -+ for (int i = 1; i < 100; ++i) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",0, i); -+ g.add(e); ++ private static String convertToBase64(String searchTerm, String _keyword, int expectedBytes) { ++ for (String sep : FileCollection.KEYWORDENDINGS) { ++ String keyword = _keyword + sep; ++ if (searchTerm.contains(keyword)) { ++ logger.finer(""converting base: "" + searchTerm); ++ try { ++ String baseXHash = searchTerm.substring(keyword.length()); ++ logger.finer(""basex hash: "" + baseXHash); ++ String hash = ShareManagerTools.baseXtoBase64(baseXHash, expectedBytes); ++ String toAdd = keyword + hash; ++ logger.finer(""new string: "" + toAdd); ++ return toAdd; ++ } catch (UnsupportedEncodingException e) { ++ // TODO Auto-generated catch block ++ e.printStackTrace(); ++ } ++ } + } - --// assertTrue(g.contains(0)); --// assertTrue(g.remove(0)); --// assertEquals(99, g.order()); --// assertEquals(0, g.size()); --// } -- --// @Test public void testRemoveHigherVertexWithEdges() { --// DirectedMultigraph g = new DirectedMultigraph(); -- --// for (int i = 0; i < 99; ++i) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",100, i); --// g.add(e); --// } -+ assertTrue(g.contains(0)); -+ assertTrue(g.remove(0)); -+ assertEquals(99, g.order()); -+ assertEquals(0, g.size()); ++ return null; + } + -+ @Test public void testRemoveHigherVertexWithEdges() { -+ DirectedMultigraph g = new DirectedMultigraph(); ++ static class DebugChannelIdEntry implements Comparable { ++ final int count; ++ final String name; + -+ for (int i = 0; i < 99; ++i) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",100, i); -+ g.add(e); ++ public DebugChannelIdEntry(String name, int count) { ++ super(); ++ this.name = name; ++ this.count = count; ++ } ++ ++ public int compareTo(DebugChannelIdEntry o) { ++ if (o.count > count) { ++ return 1; ++ } else if (o.count == count) { ++ return 0; ++ } else { ++ return -1; ++ } + } - --// assertTrue(g.contains(100)); --// assertTrue(g.remove(100)); --// assertEquals(99, g.order()); --// assertEquals(0, g.size()); --// } -- -- --// @Test public void testRemoveVertex() { --// DirectedMultigraph g = new DirectedMultigraph(); --// for (int i = 0; i < 100; ++i) { --// g.add(i); --// } -- --// for (int i = 99; i >= 0; --i) { --// assertTrue(g.remove(i)); --// assertEquals(i, g.order()); --// assertFalse(g.contains(i)); --// assertFalse(g.remove(i)); --// } --// } -- --// @Test public void testRemoveEdge() { --// DirectedMultigraph g = new DirectedMultigraph(); -- --// for (int i = 1; i < 100; ++i) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",0, i); --// g.add(e); --// } -+ assertTrue(g.contains(100)); -+ assertTrue(g.remove(100)); -+ assertEquals(99, g.order()); -+ assertEquals(0, g.size()); + } + ++ private static class DebugChannelSetupErrorStats { ++ private final LinkedList errorList = new LinkedList(); + -+ @Test public void testRemoveVertex() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ for (int i = 0; i < 100; ++i) { -+ g.add(i); ++ int MAX_SIZE = 10000; ++ ++ public void add(FriendConnection.OverlayRegistrationError error) { ++ lock.lock(); ++ try { ++ if (errorList.size() > MAX_SIZE) { ++ errorList.removeLast(); ++ } ++ errorList.addFirst(error); ++ } finally { ++ lock.unlock(); ++ } + } + -+ for (int i = 99; i >= 0; --i) { -+ assertTrue(g.remove(i)); -+ assertEquals(i, g.order()); -+ assertFalse(g.contains(i)); -+ assertFalse(g.remove(i)); ++ public String getDebugStats() { ++ StringBuilder b = new StringBuilder(); ++ HashMap errorsPerFriend = new HashMap(); ++ HashMap errorsPerPair = new HashMap(); ++ lock.lock(); ++ try { ++ ++ for (FriendConnection.OverlayRegistrationError error : errorList) { ++ final String s = error.setupMessageSource; ++ if (!errorsPerFriend.containsKey(s)) { ++ errorsPerFriend.put(s, 0); ++ } ++ errorsPerFriend.put(s, errorsPerFriend.get(s) + 1); ++ ++ String d = error.direction; ++ if (!errorsPerPair.containsKey(d)) { ++ errorsPerPair.put(d, 0); ++ } ++ errorsPerPair.put(d, errorsPerPair.get(d) + 1); ++ } ++ ++ ArrayList friendTotalOrder = new ArrayList(); ++ for (String f : errorsPerFriend.keySet()) { ++ friendTotalOrder.add(new DebugChannelIdEntry(f, errorsPerFriend.get(f))); ++ } ++ Collections.sort(friendTotalOrder); ++ b.append(""by source:\n""); ++ for (DebugChannelIdEntry e : friendTotalOrder) { ++ b.append("" "" + e.name + "" "" + e.count + ""\n""); ++ } ++ ++ ArrayList byPairOrder = new ArrayList(); ++ for (String f : errorsPerPair.keySet()) { ++ byPairOrder.add(new DebugChannelIdEntry(f, errorsPerPair.get(f))); ++ } ++ Collections.sort(byPairOrder); ++ b.append(""by pair:\n""); ++ for (DebugChannelIdEntry e : byPairOrder) { ++ b.append("" "" + e.name + "" "" + e.count + ""\n""); ++ } ++ ++ } finally { ++ lock.unlock(); ++ } ++ return b.toString(); + } + } + -+ @Test public void testRemoveEdge() { -+ DirectedMultigraph g = new DirectedMultigraph(); ++ class DelayedSearchQueue { + -+ for (int i = 1; i < 100; ++i) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",0, i); -+ g.add(e); ++ long lastSearchesPerSecondLogTime = 0; ++ long lastBytesPerSecondCount = 0; ++ int searchCount = 0; ++ ++ private long mDelay; ++ private final LinkedBlockingQueue queue = new LinkedBlockingQueue(); ++ private final HashMap queuedSearches = new HashMap(); ++ ++ public DelayedSearchQueue(long delay) { ++ this.mDelay = delay; ++ Thread t = new Thread(new DelayedSearchQueueThread()); ++ t.setDaemon(true); ++ t.setName(SEARCH_QUEUE_THREAD_NAME); ++ t.start(); + } - --// for (int i = 99; i > 0; --i) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",0, i); --// assertTrue(g.remove(e)); --// assertEquals(i-1, g.size()); --// assertFalse(g.contains(e)); --// assertFalse(g.remove(e)); --// } --// } -- --// @Test public void testVertexIterator() { --// DirectedMultigraph g = new DirectedMultigraph(); --// Set control = new HashSet(); --// for (int i = 0; i < 100; ++i) { --// g.add(i); --// control.add(i); --// } --// assertEquals(control.size(), g.order()); --// for (Integer i : g.vertices()) --// assertTrue(control.contains(i)); --// } -- --// @Test public void testEdgeIterator() { --// DirectedMultigraph g = new DirectedMultigraph(); --// Set> control = new HashSet>(); --// for (int i = 1; i <= 100; ++i) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",0, i); --// g.add(e); --// control.add(e); --// } -- --// assertEquals(control.size(), g.size()); --// assertEquals(control.size(), g.edges().size()); --// int returned = 0; --// for (Edge e : g.edges()) { --// assertTrue(control.remove(e)); --// returned++; --// } --// assertEquals(g.size(), returned); --// assertEquals(0, control.size()); --// } -- --// @Test public void testEdgeIteratorSmall() { --// DirectedMultigraph g = new DirectedMultigraph(); --// Set> control = new HashSet>(); --// for (int i = 1; i <= 5; ++i) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",0, i); --// assertTrue(g.add(e)); --// control.add(e); --// } -- --// assertEquals(control.size(), g.size()); --// assertEquals(control.size(), g.edges().size()); --// int returned = 0; --// for (Edge e : g.edges()) { --// System.out.println(e); --// assertTrue(control.contains(e)); --// returned++; --// } --// assertEquals(control.size(), returned); --// } -- --// @Test public void testEdgeIteratorSmallReverse() { --// DirectedMultigraph g = new DirectedMultigraph(); --// Set> control = new HashSet>(); --// for (int i = 1; i <= 5; ++i) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, 0); --// g.add(e); --// control.add(e); --// } -- --// assertEquals(control.size(), g.size()); --// assertEquals(control.size(), g.edges().size()); --// int returned = 0; --// for (Edge e : g.edges()) { --// System.out.println(e); --// assertTrue(control.contains(e)); --// returned++; --// } --// assertEquals(control.size(), returned); --// } -- -- --// @Test public void testAdjacentEdges() { --// DirectedMultigraph g = new DirectedMultigraph(); --// Set> control = new HashSet>(); --// for (int i = 1; i <= 100; ++i) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",0, i); --// g.add(e); --// control.add(e); --// } -+ for (int i = 99; i > 0; --i) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",0, i); -+ assertTrue(g.remove(e)); -+ assertEquals(i-1, g.size()); -+ assertFalse(g.contains(e)); -+ assertFalse(g.remove(e)); ++ ++ /** ++ * Warning -- changing this won't re-order things already in the queue, ++ * so if you add something with a much smaller delay than the current ++ * head of the queue, it will wait until that's removed before sending ++ * the new message. ++ */ ++ public void setDelay(long inDelay) { ++ this.mDelay = inDelay; ++ } ++ ++ public void add(FriendConnection source, OSF2FSearch search) { ++ ++ if (lastSearchesPerSecondLogTime + 1000 < System.currentTimeMillis()) { ++ ++ lock.lock(); ++ try { ++ logger.fine(""Searches/sec: "" + searchCount + "" bytes: "" ++ + lastBytesPerSecondCount + "" searchQueueSize: "" ++ + queuedSearches.size()); ++ } finally { ++ lock.unlock(); ++ } ++ ++ lastSearchesPerSecondLogTime = System.currentTimeMillis(); ++ searchCount = 0; ++ lastBytesPerSecondCount = 0; ++ } ++ ++ searchCount++; ++ lastBytesPerSecondCount += FriendConnectionQueue.getMessageLen(search); ++ ++ lock.lock(); ++ try { ++ ++ // Flush the accounting info every 60 seconds ++ if (SearchManager.this.lastSearchAccountingFlush + 60 * 1000 < System ++ .currentTimeMillis()) { ++ lastSearchAccountingFlush = System.currentTimeMillis(); ++ searchesPerFriend.clear(); ++ } ++ ++ // If the search queue is more than half full, start dropping ++ // searches ++ // proportional to how much of the total queue each person is ++ // consuming ++ if (queuedSearches.size() > 0.25 * MAX_SEARCH_QUEUE_LENGTH) { ++ if (searchesPerFriend.containsKey(source.getRemoteFriend())) { ++ int outstanding = searchesPerFriend.get(source.getRemoteFriend()).v; ++ ++ // We add a hard limit on the number of searches from ++ // any one person. ++ if (outstanding > 0.15 * MAX_SEARCH_QUEUE_LENGTH) { ++ logger.fine(""Dropping due to 25% of total queue consumption "" ++ + source.getRemoteFriend().getNick() + "" "" + outstanding ++ + "" / "" + MAX_SEARCH_QUEUE_LENGTH); ++ return; ++ } ++ ++ // In other cases, we drop proportional to the ++ // consumption of the overall queue. ++ double acceptProb = (double) outstanding / (double) queuedSearches.size(); ++ if (random.nextDouble() < acceptProb) { ++ if (logger.isLoggable(Level.FINE)) { ++ logger.fine(""*** RED for search from "" + source + "" outstanding: "" ++ + outstanding + "" total: "" + queuedSearches.size()); ++ } ++ return; ++ } ++ } ++ } ++ ++ if (queuedSearches.size() > MAX_SEARCH_QUEUE_LENGTH) { ++ if (logger.isLoggable(Level.FINER)) { ++ logger.finer(""not forwarding search, queue length too large. id: "" ++ + search.getSearchID()); ++ } ++ return; ++ } ++ if (!queuedSearches.containsKey(search.getSearchID())) { ++ logger.finest(""adding search to forward queue, will forward in "" + mDelay ++ + "" ms""); ++ DelayedSearchQueueEntry entry = new DelayedSearchQueueEntry(search, source, ++ System.currentTimeMillis() + mDelay); ++ ++ if (searchesPerFriend.containsKey(source.getRemoteFriend()) == false) { ++ searchesPerFriend.put(source.getRemoteFriend(), ++ new SearchManager.MutableInteger()); ++ } ++ searchesPerFriend.get(source.getRemoteFriend()).v++; ++ logger.finest(""Search for friend: "" + source.getRemoteFriend().getNick() + "" "" ++ + searchesPerFriend.get(source.getRemoteFriend()).v); ++ ++ queuedSearches.put(search.getSearchID(), entry); ++ queue.add(entry); ++ ++ } else { ++ logger.finer(""search already in queue, not adding""); ++ } ++ } finally { ++ lock.unlock(); ++ } ++ } ++ ++ /* ++ * make sure to already have the lock when calling this ++ */ ++ public boolean isQueued(OSF2FSearch search) { ++ return queuedSearches.containsKey(search.getSearchID()); ++ } ++ ++ class DelayedSearchQueueThread implements Runnable { ++ ++ public void run() { ++ while (true) { ++ try { ++ DelayedSearchQueueEntry e = queue.take(); ++ long timeUntilSend = e.dontSendBefore - System.currentTimeMillis(); ++ if (timeUntilSend > 0) { ++ logger.finer(""got search ("" + e.search.getDescription() ++ + "") to forward, waiting "" + timeUntilSend ++ + "" ms until sending""); ++ Thread.sleep(timeUntilSend); ++ } ++ forwardSearch(e.source, e.search); ++ /* ++ * remove the search from the queuedSearchesMap ++ */ ++ lock.lock(); ++ try { ++ queuedSearches.remove(e.search.getSearchID()); ++ // If searchesPerFriend was flushed while this ++ // search was in the ++ // queue, the get() call will return null. ++ if (searchesPerFriend.containsKey(e.source.getRemoteFriend())) { ++ searchesPerFriend.get(e.source.getRemoteFriend()).v--; ++ } ++ } finally { ++ lock.unlock(); ++ } ++ /* ++ * if we didn't sleep at all, sleep the min time between ++ * searches ++ */ ++ if (timeUntilSend < 1) { ++ double ms = 1000.0 / FriendConnection.MAX_OUTGOING_SEARCH_RATE; ++ int msFloor = (int) Math.floor(ms); ++ int nanosLeft = (int) Math.round((ms - msFloor) * 1000000.0); ++ logger.finest(""sleeping "" + msFloor + ""ms + "" + nanosLeft + "" ns""); ++ Thread.sleep(msFloor, Math.min(999999, nanosLeft)); ++ } ++ ++ } catch (Exception e1) { ++ logger.warning(""*** Delayed search queue thread error: "" + e1.toString()); ++ e1.printStackTrace(); ++ BackendErrorLog.get().logException(e1); ++ } ++ } ++ } + } + } + -+ @Test public void testVertexIterator() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ Set control = new HashSet(); -+ for (int i = 0; i < 100; ++i) { -+ g.add(i); -+ control.add(i); ++ static class DelayedSearchQueueEntry { ++ final long dontSendBefore; ++ final OSF2FSearch search; ++ final FriendConnection source; ++ final long insertionTime; ++ ++ public DelayedSearchQueueEntry(OSF2FSearch search, FriendConnection source, ++ long dontSendBefore) { ++ this.insertionTime = System.currentTimeMillis(); ++ this.search = search; ++ this.source = source; ++ this.dontSendBefore = dontSendBefore; + } -+ assertEquals(control.size(), g.order()); -+ for (Integer i : g.vertices()) -+ assertTrue(control.contains(i)); + } + -+ @Test public void testEdgeIterator() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ Set> control = new HashSet>(); -+ for (int i = 1; i <= 100; ++i) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",0, i); -+ g.add(e); -+ control.add(e); ++ class ForwardedSearch { ++ private int responsesForwarded = 0; ++ private final OSF2FSearch search; ++ private final FriendConnection source; ++ private final long time; ++ ++ public ForwardedSearch(FriendConnection source, OSF2FSearch search) { ++ this.time = System.currentTimeMillis(); ++ this.source = source; ++ this.search = search; ++ + } + -+ assertEquals(control.size(), g.size()); -+ assertEquals(control.size(), g.edges().size()); -+ int returned = 0; -+ for (Edge e : g.edges()) { -+ assertTrue(control.remove(e)); -+ returned++; ++ public long getAge() { ++ return System.currentTimeMillis() - this.time; + } -+ assertEquals(g.size(), returned); -+ assertEquals(0, control.size()); -+ } + -+ @Test public void testEdgeIteratorSmall() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ Set> control = new HashSet>(); -+ for (int i = 1; i <= 5; ++i) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",0, i); -+ assertTrue(g.add(e)); -+ control.add(e); ++ public int getResponseNum() { ++ return responsesForwarded; + } + -+ assertEquals(control.size(), g.size()); -+ assertEquals(control.size(), g.edges().size()); -+ int returned = 0; -+ for (Edge e : g.edges()) { -+ System.out.println(e); -+ assertTrue(control.contains(e)); -+ returned++; ++ public OSF2FSearch getSearch() { ++ return search; + } -+ assertEquals(control.size(), returned); -+ } + -+ @Test public void testEdgeIteratorSmallReverse() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ Set> control = new HashSet>(); -+ for (int i = 1; i <= 5; ++i) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, 0); -+ g.add(e); -+ control.add(e); ++ public int getSearchId() { ++ return search.getSearchID(); + } + -+ assertEquals(control.size(), g.size()); -+ assertEquals(control.size(), g.edges().size()); -+ int returned = 0; -+ for (Edge e : g.edges()) { -+ System.out.println(e); -+ assertTrue(control.contains(e)); -+ returned++; ++ public FriendConnection getSource() { ++ return source; ++ } ++ ++ public void gotResponse() { ++ responsesForwarded++; ++ } ++ ++ public boolean isTimedOut() { ++ return getAge() > MAX_SEARCH_AGE; + } -+ assertEquals(control.size(), returned); + } + ++ static class RotatingBloomFilter { ++ private static final int OBJECTS_TO_STORE = 1000000; + -+ @Test public void testAdjacentEdges() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ Set> control = new HashSet>(); -+ for (int i = 1; i <= 100; ++i) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",0, i); -+ g.add(e); -+ control.add(e); ++ private static final int SIZE_IN_BITS = 10240 * 1024; ++ ++ private long currentFilterCreated; ++ private final LinkedList filters = new LinkedList(); ++ private final int maxBuckets; ++ private final long maxFilterAge; ++ ++ public RotatingBloomFilter(long totalAge, int buckets) { ++ this.maxBuckets = buckets; ++ this.maxFilterAge = (totalAge / buckets) + 1; ++ rotate(); + } - --// Set> test = g.getAdjacencyList(0); --// assertEquals(control, test); --// } -- --// @Test public void testAdjacencyListSize() { --// DirectedMultigraph g = new DirectedMultigraph(); -- --// // fully connected --// for (int i = 0; i < 10; i++) { --// for (int j = i+1; j < 10; ++j) --// g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); --// } -- --// // (n * (n-1)) / 2 --// assertEquals( (10 * 9) / 2, g.size()); --// assertEquals(10, g.order()); -+ Set> test = g.getAdjacencyList(0); -+ assertEquals(control, test); ++ ++ public boolean contains(int searchId, int searchValue) { ++ byte[] bytes = bytesFromInts(searchId, searchValue); ++ for (BloomFilter f : filters) { ++ if (f.test(bytes)) { ++ return true; ++ } ++ } ++ ++ return false; ++ } ++ ++ public double getPrevFilterFalsePositiveEst() { ++ if (filters.size() > 1) { ++ return filters.get(1).getPredictedFalsePositiveRate(); ++ } else { ++ return filters.getFirst().getPredictedFalsePositiveRate(); ++ } ++ } ++ ++ public int getPrevFilterNumElements() { ++ if (filters.size() > 1) { ++ return filters.get(1).getUniqueObjectsStored(); ++ } else { ++ return filters.getFirst().getUniqueObjectsStored(); ++ } ++ } ++ ++ public void insert(int searchId, int searchValue) { ++ byte[] bytes = bytesFromInts(searchId, searchValue); ++ filters.getFirst().insert(bytes); ++ } ++ ++ private void rotate() { ++ ++ if (filters.size() > 0) { ++ BloomFilter prevFilter = filters.getFirst(); ++ String str = ""Rotating bloom filter: objects="" ++ + prevFilter.getUniqueObjectsStored() + "" predicted false positive rate="" ++ + (100 * prevFilter.getPredictedFalsePositiveRate() + ""%""); ++ logger.info(str); ++ } ++ currentFilterCreated = System.currentTimeMillis(); ++ try { ++ filters.addFirst(new BloomFilter(SIZE_IN_BITS, OBJECTS_TO_STORE)); ++ } catch (NoSuchAlgorithmException e) { ++ // TODO Auto-generated catch block ++ e.printStackTrace(); ++ } ++ if (filters.size() > maxBuckets) { ++ filters.removeLast(); ++ } ++ } ++ ++ public boolean rotateIfNeeded() { ++ long currentFilterAge = System.currentTimeMillis() - currentFilterCreated; ++ if (currentFilterAge > maxFilterAge) { ++ rotate(); ++ return true; ++ } ++ return false; ++ } ++ ++ private static byte[] bytesFromInts(int int1, int int2) { ++ byte[] bytes = new byte[8]; ++ ++ bytes[0] = (byte) (int1 >>> 24); ++ bytes[1] = (byte) (int1 >>> 16); ++ bytes[2] = (byte) (int1 >>> 8); ++ bytes[3] = (byte) int1; ++ ++ bytes[4] = (byte) (int2 >>> 24); ++ bytes[5] = (byte) (int2 >>> 16); ++ bytes[6] = (byte) (int2 >>> 8); ++ bytes[7] = (byte) int2; ++ return bytes; ++ } ++ ++ public static void main(String[] args) { ++ OSF2FMain.getSingelton(); ++ logger.setLevel(Level.FINE); ++ Random rand = new Random(); ++ ++ RotatingBloomFilter bf = new RotatingBloomFilter(60 * 1000, 4); ++ ++ Set inserts = new HashSet(); ++ for (int j = 0; j < 8; j++) { ++ for (int i = 0; i < 20000; i++) { ++ int r1 = rand.nextInt(); ++ int r2 = rand.nextInt(); ++ byte[] bytes = bytesFromInts(r1, r2); ++ inserts.add(new String(Base64.encode(bytes))); ++ bf.insert(r1, r2); ++ if (!bf.contains(r1, r2)) { ++ System.err.println(""insert failes (does not contain it anymore)""); ++ } ++ } ++ bf.rotate(); ++ } ++ ++ int fps = 0, to_check = 200000; ++ for (int i = 0; i < to_check; i++) { ++ int int1; ++ int int2; ++ byte[] bytes; ++ do { ++ int1 = rand.nextInt(); ++ int2 = rand.nextInt(); ++ bytes = bytesFromInts(int1, int2); ++ } while (inserts.contains(new String(Base64.encode(bytes))) == true); ++ if (bf.contains(int1, int2) == true) ++ fps++; ++ } ++ ++ System.out.println(""false positive check, "" + fps + ""/"" + to_check); ++ ++ System.out.println(""mem: "" ++ + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory())); ++ ++ } ++ + } + -+ @Test public void testAdjacencyListSize() { -+ DirectedMultigraph g = new DirectedMultigraph(); ++ class SentSearch { ++ private int responses = 0; ++ private final OSF2FSearch search; + -+ // fully connected -+ for (int i = 0; i < 10; i++) { -+ for (int j = i+1; j < 10; ++j) -+ g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); -+ } ++ private final long time; + -+ // (n * (n-1)) / 2 -+ assertEquals( (10 * 9) / 2, g.size()); -+ assertEquals(10, g.order()); - --// Set> adjList = g.getAdjacencyList(0); --// assertEquals(9, adjList.size()); -+ Set> adjList = g.getAdjacencyList(0); -+ assertEquals(9, adjList.size()); - --// adjList = g.getAdjacencyList(1); --// assertEquals(9, adjList.size()); -+ adjList = g.getAdjacencyList(1); -+ assertEquals(9, adjList.size()); - --// adjList = g.getAdjacencyList(2); --// assertEquals(9, adjList.size()); -+ adjList = g.getAdjacencyList(2); -+ assertEquals(9, adjList.size()); - --// adjList = g.getAdjacencyList(3); --// assertEquals(9, adjList.size()); -+ adjList = g.getAdjacencyList(3); -+ assertEquals(9, adjList.size()); - --// adjList = g.getAdjacencyList(5); --// assertEquals(9, adjList.size()); --// } -+ adjList = g.getAdjacencyList(5); -+ assertEquals(9, adjList.size()); -+ } - - --// @Test public void testAdjacentEdgesRemove() { --// DirectedMultigraph g = new DirectedMultigraph(); --// Set> control = new HashSet>(); --// for (int i = 1; i <= 100; ++i) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",0, i); --// g.add(e); --// control.add(e); --// } -+ @Test public void testAdjacentEdgesRemove() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ Set> control = new HashSet>(); -+ for (int i = 1; i <= 100; ++i) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",0, i); -+ g.add(e); -+ control.add(e); ++ public SentSearch(OSF2FSearch search) { ++ this.search = search; ++ this.time = System.currentTimeMillis(); + } - --// Set> test = g.getAdjacencyList(0); --// assertEquals(control, test); -- --// Edge removed = new SimpleDirectedTypedEdge(""type-1"",0, 1); --// assertTrue(test.remove(removed)); --// assertTrue(control.remove(removed)); --// assertEquals(control, test); --// assertEquals(99, g.size()); --// } -- --// @Test public void testAdjacentEdgesAdd() { --// DirectedMultigraph g = new DirectedMultigraph(); --// Set> control = new HashSet>(); --// for (int i = 1; i <= 100; ++i) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",0, i); --// g.add(e); --// control.add(e); --// } -+ Set> test = g.getAdjacencyList(0); -+ assertEquals(control, test); + -+ Edge removed = new SimpleDirectedTypedEdge(""type-1"",0, 1); -+ assertTrue(test.remove(removed)); -+ assertTrue(control.remove(removed)); -+ assertEquals(control, test); -+ assertEquals(99, g.size()); -+ } ++ public long getAge() { ++ return System.currentTimeMillis() - this.time; ++ } + -+ @Test public void testAdjacentEdgesAdd() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ Set> control = new HashSet>(); -+ for (int i = 1; i <= 100; ++i) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",0, i); -+ g.add(e); -+ control.add(e); ++ public int getResponseNum() { ++ return responses; + } - --// Set> test = g.getAdjacencyList(0); --// assertEquals(control, test); -- --// DirectedTypedEdge added = new SimpleDirectedTypedEdge(""type-1"",0, 101); --// assertTrue(test.add(added)); --// assertTrue(control.add(added)); --// assertEquals(control, test); --// assertEquals(101, g.size()); --// assertTrue(g.contains(added)); --// assertTrue(g.contains(101)); --// assertEquals(102, g.order()); --// } -- --// @Test public void testClear() { --// DirectedMultigraph g = new DirectedMultigraph(); -- --// // fully connected --// for (int i = 0; i < 10; i++) { --// for (int j = i+1; j < 10; ++j) --// g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); --// } -- --// // (n * (n-1)) / 2 --// assertEquals( (10 * 9) / 2, g.size()); --// assertEquals(10, g.order()); -- --// g.clear(); --// assertEquals(0, g.size()); --// assertEquals(0, g.order()); --// assertEquals(0, g.vertices().size()); --// assertEquals(0, g.edges().size()); -+ Set> test = g.getAdjacencyList(0); -+ assertEquals(control, test); + -+ DirectedTypedEdge added = new SimpleDirectedTypedEdge(""type-1"",0, 101); -+ assertTrue(test.add(added)); -+ assertTrue(control.add(added)); -+ assertEquals(control, test); -+ assertEquals(101, g.size()); -+ assertTrue(g.contains(added)); -+ assertTrue(g.contains(101)); -+ assertEquals(102, g.order()); -+ } ++ public OSF2FSearch getSearch() { ++ return search; ++ } + -+ @Test public void testClear() { -+ DirectedMultigraph g = new DirectedMultigraph(); ++ public void gotResponse() { ++ responses++; ++ } + -+ // fully connected -+ for (int i = 0; i < 10; i++) { -+ for (int j = i+1; j < 10; ++j) -+ g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); -+ } ++ public boolean isTimedOut() { ++ return getAge() > MAX_SEARCH_AGE; ++ } + -+ // (n * (n-1)) / 2 -+ assertEquals( (10 * 9) / 2, g.size()); -+ assertEquals(10, g.order()); ++ } + -+ g.clear(); -+ assertEquals(0, g.size()); -+ assertEquals(0, g.order()); -+ assertEquals(0, g.vertices().size()); -+ assertEquals(0, g.edges().size()); - --// // Error checking case for double-clear --// g.clear(); --// } -- --// @Test public void testClearEdges() { --// DirectedMultigraph g = new DirectedMultigraph(); -- --// // fully connected --// for (int i = 0; i < 10; i++) { --// for (int j = i+1; j < 10; ++j) --// g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); --// } -- --// // (n * (n-1)) / 2 --// assertEquals( (10 * 9) / 2, g.size()); --// assertEquals(10, g.order()); -- --// g.clearEdges(); --// assertEquals(0, g.size()); --// assertEquals(10, g.order()); --// assertEquals(10, g.vertices().size()); --// assertEquals(0, g.edges().size()); -+ // Error checking case for double-clear -+ g.clear(); ++ public interface TextSearchListener { ++ public void searchResponseReceived(TextSearchResponseItem r); + } + -+ @Test public void testClearEdges() { -+ DirectedMultigraph g = new DirectedMultigraph(); ++ class TextSearchManager { ++ private final ConcurrentHashMap responses; ++ private final ConcurrentHashMap listeners; + -+ // fully connected -+ for (int i = 0; i < 10; i++) { -+ for (int j = i+1; j < 10; ++j) -+ g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); -+ } ++ public TextSearchManager() { ++ responses = new ConcurrentHashMap(); ++ listeners = new ConcurrentHashMap(); ++ } + -+ // (n * (n-1)) / 2 -+ assertEquals( (10 * 9) / 2, g.size()); -+ assertEquals(10, g.order()); ++ public List getResults(int searchId) { ++ TextSearchResponse resps = responses.get(searchId); + -+ g.clearEdges(); -+ assertEquals(0, g.size()); -+ assertEquals(10, g.order()); -+ assertEquals(10, g.vertices().size()); -+ assertEquals(0, g.edges().size()); - --// // Error checking case for double-clear --// g.clearEdges(); --// } -- --// @Test public void testToString() { --// DirectedMultigraph g = new DirectedMultigraph(); --// for (int i = 0; i < 10; ++i) --// for (int j = i + 1; j < 10; ++j) --// g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); --// g.toString(); -- --// // only vertices --// g.clearEdges(); --// g.toString(); -- --// // empty graph --// g.clear(); --// g.toString(); -+ // Error checking case for double-clear -+ g.clearEdges(); -+ } ++ HashMap result = new HashMap(); + -+ @Test public void testToString() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ for (int i = 0; i < 10; ++i) -+ for (int j = i + 1; j < 10; ++j) -+ g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); -+ g.toString(); ++ if (resps != null) { ++ /* ++ * group into file collections ++ */ ++ for (TextSearchResponseItem item : resps.getItems()) { ++ for (FileCollection collection : item.getFileList().getElements()) { ++ if (result.containsKey(collection.getUniqueID())) { ++ TextSearchResult existing = result.get(collection.getUniqueID()); ++ existing.merge(item, collection); ++ } else { ++ // mark stuff that we already have ++ boolean alreadyInLibrary = true; ++ GlobalManager globalManager = AzureusCoreImpl.getSingleton() ++ .getGlobalManager(); ++ DownloadManager dm = globalManager.getDownloadManager(new HashWrapper( ++ collection.getUniqueIdBytes())); ++ if (dm == null) { ++ alreadyInLibrary = false; ++ } ++ result.put(collection.getUniqueID(), new TextSearchResult(item, ++ collection, alreadyInLibrary)); ++ } ++ } ++ } + -+ // only vertices -+ g.clearEdges(); -+ g.toString(); ++ // /* ++ // * verify that we didn't get any bad data ++ // */ ++ // for (TextSearchResult item : result.values()) { ++ // FileCollection collection = item.getCollection(); ++ // String searchString = resps.getSearchString(); ++ // boolean collectionMatch = collection.nameMatch(searchString); ++ // ++ // Set filteredFiles = new ++ // HashSet(); ++ // List allChildren = collection.getChildren(); ++ // for (int i = 0; i < allChildren.size(); i++) { ++ // FileListFile f = allChildren.get(i); ++ // if (filteredFiles.contains(f)) { ++ // continue; ++ // } ++ // if (collectionMatch) { ++ // filteredFiles.add(f); ++ // } else if (f.searchMatch(searchString)) { ++ // filteredFiles.add(f); ++ // } else { ++ // logger.fine(""got search result that doesn't match search: "" + ++ // f.getFileName() + "" ! "" + searchString); ++ // } ++ // } ++ // logger.fine(collection.getName() + "" totalResp: "" + ++ // allChildren.size() + "" afterFiler="" + filteredFiles.size()); ++ // collection.setChildren(new ++ // ArrayList(filteredFiles)); ++ // } ++ ++ return new ArrayList(result.values()); ++ } ++ logger.fine(""no responses for searchId="" + searchId); ++ return new ArrayList(); ++ } ++ ++ public void gotSearchResponse(int searchId, Friend throughFriend, FileList fileList, ++ int channelId, int connectionId) { ++ TextSearchResponse r = responses.get(searchId); ++ if (r != null) { ++ long age = System.currentTimeMillis() - r.getTime(); ++ TextSearchResponseItem item = new TextSearchResponseItem(throughFriend, fileList, ++ age, channelId, connectionId); ++ r.add(item); ++ TextSearchListener listener = listeners.get(searchId); ++ if (listener != null) { ++ listener.searchResponseReceived(item); ++ } ++ } else { ++ logger.warning(""got response for unknown search""); ++ } ++ } + -+ // empty graph -+ g.clear(); -+ g.toString(); - --// } -- --// /****************************************************************** --// * --// * --// * VertexSet tests --// * --// * --// ******************************************************************/ -- --// @Test(expected=UnsupportedOperationException.class) public void testVertexSetAdd() { --// DirectedMultigraph g = new DirectedMultigraph(); --// Set control = new HashSet(); --// for (int i = 0; i < 100; ++i) { --// g.add(i); --// control.add(i); --// } -- --// Set vertices = g.vertices(); --// assertEquals(control.size(), vertices.size()); --// assertTrue(vertices.add(100)); --// assertTrue(g.contains(100)); --// assertEquals(101, vertices.size()); --// assertEquals(101, g.order()); -+ } ++ public void sentSearch(int searchId, String searchString, TextSearchListener listener) { ++ responses.put(searchId, new TextSearchResponse(searchString)); ++ if (listener != null) { ++ listeners.put(searchId, listener); ++ } ++ } + -+ /****************************************************************** -+ * -+ * -+ * VertexSet tests -+ * -+ * -+ ******************************************************************/ ++ public void clearOldResponses() { ++ for (Iterator iterator = responses.keySet().iterator(); iterator.hasNext();) { ++ Integer key = iterator.next(); ++ TextSearchResponse response = responses.get(key); ++ if (System.currentTimeMillis() - response.getTime() > 10 * 60 * 1000) { ++ iterator.remove(); ++ listeners.remove(key); ++ } + -+ @Test(expected=UnsupportedOperationException.class) public void testVertexSetAdd() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ Set control = new HashSet(); -+ for (int i = 0; i < 100; ++i) { -+ g.add(i); -+ control.add(i); ++ } + } -+ -+ Set vertices = g.vertices(); -+ assertEquals(control.size(), vertices.size()); -+ assertTrue(vertices.add(100)); -+ assertTrue(g.contains(100)); -+ assertEquals(101, vertices.size()); -+ assertEquals(101, g.order()); - --// // dupe --// assertFalse(vertices.add(100)); --// assertEquals(101, vertices.size()); --// } -- --// @Test(expected=UnsupportedOperationException.class) public void testVertexSetAddFromGraph() { --// DirectedMultigraph g = new DirectedMultigraph(); --// Set control = new HashSet(); --// for (int i = 0; i < 100; ++i) { --// g.add(i); --// control.add(i); --// } -- --// Set vertices = g.vertices(); --// assertEquals(control.size(), vertices.size()); --// assertTrue(g.add(100)); --// assertTrue(g.contains(100)); --// assertTrue(vertices.contains(100)); --// assertEquals(101, vertices.size()); --// assertEquals(101, g.order()); -+ // dupe -+ assertFalse(vertices.add(100)); -+ assertEquals(101, vertices.size()); + } + -+ @Test(expected=UnsupportedOperationException.class) public void testVertexSetAddFromGraph() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ Set control = new HashSet(); -+ for (int i = 0; i < 100; ++i) { -+ g.add(i); -+ control.add(i); ++ public boolean isSearchInBloomFilter(OSF2FSearch search) { ++ lock.lock(); ++ try { ++ int searchID = search.getSearchID(); ++ int valueID = search.getValueID(); ++ if (recentSearches.contains(searchID, valueID)) { ++ bloomSearchesBlockedCurr++; ++ } ++ } finally { ++ lock.unlock(); + } ++ return false; ++ } + + } +diff --git a/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/ServiceConnection.java b/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/ServiceConnection.java +index e1112dfd..b410524f 100644 +--- a/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/ServiceConnection.java ++++ b/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/network/ServiceConnection.java +@@ -1,10 +1,188 @@ + package edu.washington.cs.oneswarm.f2f.network; + +-public class ServiceConnection extends OverlayTransport { ++import java.nio.ByteBuffer; ++import java.util.LinkedList; ++import java.util.logging.Logger; + +- public ServiceConnection(FriendConnection connection, int channelId, byte[] infohash, +- int pathID, boolean outgoing, long overlayDelayMs) { +- super(connection, channelId, infohash, pathID, outgoing, overlayDelayMs); ++import org.gudy.azureus2.core3.util.DirectByteBuffer; + -+ Set vertices = g.vertices(); -+ assertEquals(control.size(), vertices.size()); -+ assertTrue(g.add(100)); -+ assertTrue(g.contains(100)); -+ assertTrue(vertices.contains(100)); -+ assertEquals(101, vertices.size()); -+ assertEquals(101, g.order()); - --// // dupe --// assertFalse(vertices.add(100)); --// assertEquals(101, vertices.size()); --// } -- --// @Test(expected=UnsupportedOperationException.class) public void testVertexSetRemove() { --// DirectedMultigraph g = new DirectedMultigraph(); --// Set control = new HashSet(); --// for (int i = 0; i < 100; ++i) { --// g.add(i); --// control.add(i); --// } -- --// Set vertices = g.vertices(); --// assertEquals(control.size(), vertices.size()); --// assertTrue(g.contains(99)); --// assertTrue(vertices.remove(99)); --// assertFalse(g.contains(99)); --// assertEquals(99, vertices.size()); --// assertEquals(99, g.order()); -+ // dupe -+ assertFalse(vertices.add(100)); -+ assertEquals(101, vertices.size()); ++import com.aelitis.azureus.core.networkmanager.IncomingMessageQueue.MessageQueueListener; ++import com.aelitis.azureus.core.networkmanager.NetworkConnection; ++import com.aelitis.azureus.core.networkmanager.NetworkConnection.ConnectionListener; ++import com.aelitis.azureus.core.peermanager.messaging.Message; ++ ++import edu.washington.cs.oneswarm.f2f.messaging.OSF2FChannelDataMsg; ++import edu.washington.cs.oneswarm.f2f.servicesharing.DataMessage; ++import edu.washington.cs.oneswarm.f2f.servicesharing.ServiceSharingManager.SharedService; ++ ++public class ServiceConnection extends OverlayEndpoint { ++ /** ++ ** High level ** ++ * ++ * Searcher: Register service, enter local port and searchkey, Open local ++ * port, On incoming connection: Search ++ * ++ * ++ * Service host: Search hit->Search reply->contact server On server timeout ++ * or any other error: send channel reset ++ * ++ * Searcher: On search reply: send any incoming data to the channel ++ * ++ */ ++ ++ /** ++ ** Details ** ++ * ++ * For service host: ++ * ++ * Create: Create new network connection, register the network connection to ++ * handle rate limited reads/writes. ++ * ++ * Server->Overlay: On incoming message: move the payload into a new overlay ++ * message with the proper channel id and queue on the friend connection. ++ * The new message now owns the payload and is responsible for destroying ++ * it. ++ * ++ * Overlay->Server: put the message in the outgoing queue on the server ++ * connection. ++ */ ++ ++ private final static Logger logger = Logger.getLogger(ServiceConnection.class.getName()); ++ // all operations on this object must be in a synchronized block ++ private final LinkedList bufferedMessages; ++ private NetworkConnection serverConnection; ++ ++ private final SharedService service; ++ private final boolean serverSide; ++ ++ public ServiceConnection(SharedService service, FriendConnection connection, int channelId, ++ int pathID, boolean serverSide) { ++ super(connection, channelId, pathID, 0); ++ this.service = service; ++ this.bufferedMessages = new LinkedList(); ++ this.serverSide = true; + } + -+ @Test(expected=UnsupportedOperationException.class) public void testVertexSetRemove() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ Set control = new HashSet(); -+ for (int i = 0; i < 100; ++i) { -+ g.add(i); -+ control.add(i); ++ @Override ++ public void cleanup() { ++ serverConnection.close(); ++ } ++ ++ @Override ++ protected void destroyBufferedMessages() { ++ synchronized (bufferedMessages) { ++ while (bufferedMessages.size() > 0) { ++ bufferedMessages.removeFirst().destroy(); ++ } + } ++ }; + -+ Set vertices = g.vertices(); -+ assertEquals(control.size(), vertices.size()); -+ assertTrue(g.contains(99)); -+ assertTrue(vertices.remove(99)); -+ assertFalse(g.contains(99)); -+ assertEquals(99, vertices.size()); -+ assertEquals(99, g.order()); - --// // dupe --// assertFalse(vertices.remove(99)); --// assertEquals(99, vertices.size()); --// } -- --// @Test public void testVertexSetRemoveFromGraph() { --// DirectedMultigraph g = new DirectedMultigraph(); --// Set control = new HashSet(); --// for (int i = 0; i < 100; ++i) { --// g.add(i); --// control.add(i); --// } -- --// Set vertices = g.vertices(); --// assertEquals(control.size(), vertices.size()); --// assertTrue(g.remove(99)); -- --// assertFalse(g.contains(99)); --// assertFalse(vertices.contains(99)); --// assertEquals(99, vertices.size()); --// assertEquals(99, g.order()); --// } -- --// @Test(expected=UnsupportedOperationException.class) public void testVertexSetIteratorRemove() { --// DirectedMultigraph g = new DirectedMultigraph(); --// Set control = new HashSet(); --// for (int i = 0; i < 100; ++i) { --// g.add(i); --// control.add(i); --// } -- --// Set vertices = g.vertices(); --// assertEquals(control.size(), vertices.size()); --// Iterator iter = vertices.iterator(); --// assertTrue(iter.hasNext()); --// Integer toRemove = iter.next(); --// assertTrue(g.contains(toRemove)); --// assertTrue(vertices.contains(toRemove)); --// iter.remove(); --// assertFalse(g.contains(toRemove)); --// assertFalse(vertices.contains(toRemove)); --// assertEquals(g.order(), vertices.size()); --// } -- --// @Test(expected=NoSuchElementException.class) public void testVertexSetIteratorTooFar() { --// DirectedMultigraph g = new DirectedMultigraph(); --// Set control = new HashSet(); --// for (int i = 0; i < 100; ++i) { --// g.add(i); --// control.add(i); --// } -- --// Set vertices = g.vertices(); --// Iterator iter = vertices.iterator(); --// int i = 0; --// while (iter.hasNext()) { --// i++; --// iter.next(); --// } --// assertEquals(vertices.size(), i); --// iter.next(); --// } -- --// @Test(expected=UnsupportedOperationException.class) public void testVertexSetIteratorRemoveTwice() { --// DirectedMultigraph g = new DirectedMultigraph(); --// Set control = new HashSet(); --// for (int i = 0; i < 100; ++i) { --// g.add(i); --// control.add(i); --// } -- --// Set vertices = g.vertices(); --// Iterator iter = vertices.iterator(); --// assertTrue(iter.hasNext()); --// Integer toRemove = iter.next(); --// assertTrue(g.contains(toRemove)); --// assertTrue(vertices.contains(toRemove)); --// iter.remove(); --// iter.remove(); --// } -- --// @Test(expected=UnsupportedOperationException.class) public void testVertexSetIteratorRemoveEarly() { --// DirectedMultigraph g = new DirectedMultigraph(); --// Set control = new HashSet(); --// for (int i = 0; i < 100; ++i) { --// g.add(i); --// control.add(i); --// } -- --// Set vertices = g.vertices(); --// Iterator iter = vertices.iterator(); --// iter.remove(); --// } -+ // dupe -+ assertFalse(vertices.remove(99)); -+ assertEquals(99, vertices.size()); ++ @Override ++ public String getDescription() { ++ return super.getDescription() + "" "" + service.toString() + "" serverside="" + serverSide; + } + -+ @Test public void testVertexSetRemoveFromGraph() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ Set control = new HashSet(); -+ for (int i = 0; i < 100; ++i) { -+ g.add(i); -+ control.add(i); ++ @Override ++ protected void handleDelayedOverlayMessage(OSF2FChannelDataMsg msg) { ++ if (closed) { ++ return; ++ } ++ if (!started) { ++ start(); ++ } ++ if (!serverConnection.isConnected()) { ++ synchronized (bufferedMessages) { ++ bufferedMessages.add(msg); ++ } ++ return; ++ } ++ writeMessageToServerConnection(msg.getData()); ++ } ++ ++ /** ++ * Currently we connect to the server when we get the first message data. ++ */ ++ // TODO (isdal): connect to the server on incoming search message and send ++ // reply on successful connect? ++ @Override ++ public void start() { ++ logger.fine(getDescription() + "" starting""); ++ if (isStarted()) { ++ logger.warning(""Tried to start already started service""); ++ return; + } ++ if (!service.isEnabled()) { ++ logger.fine(""Tried to start disabled connection""); ++ return; ++ } ++ serverConnection = service.createConnection(new ConnectionListener() { ++ @Override ++ public void connectFailure(Throwable failure_msg) { ++ logger.fine(ServiceConnection.this.getDescription() + "" connection failure""); ++ ServiceConnection.this.close(""Exception during connect""); ++ } + -+ Set vertices = g.vertices(); -+ assertEquals(control.size(), vertices.size()); -+ assertTrue(g.remove(99)); ++ @Override ++ public void connectStarted() { ++ logger.fine(ServiceConnection.this.getDescription() + "" connect started""); ++ } + -+ assertFalse(g.contains(99)); -+ assertFalse(vertices.contains(99)); -+ assertEquals(99, vertices.size()); -+ assertEquals(99, g.order()); ++ @Override ++ public void connectSuccess(ByteBuffer remaining_initial_data) { ++ logger.fine(ServiceConnection.this.getDescription() + "" connected""); ++ serverConnection.getIncomingMessageQueue().registerQueueListener( ++ new ServerIncomingMessageListener()); ++ synchronized (bufferedMessages) { ++ for (OSF2FChannelDataMsg msg : bufferedMessages) { ++ logger.finest(""sending queued message: "" + msg.getDescription()); ++ writeMessageToServerConnection(msg.getData()); ++ } ++ } ++ } ++ ++ @Override ++ public void exceptionThrown(Throwable error) { ++ ServiceConnection.this.close(""Exception during connect""); ++ } ++ ++ @Override ++ public String getDescription() { ++ return ServiceConnection.this.getDescription() + "" connect listener""; ++ } ++ }); ++ started = true; + } + ++ private void writeMessageToServerConnection(DirectByteBuffer[] data) { ++ for (DirectByteBuffer directByteBuffer : data) { ++ DataMessage msg = new DataMessage(directByteBuffer); ++ logger.finest(""writing message to server queue: "" + msg.getDescription()); ++ serverConnection.getOutgoingMessageQueue().addMessage(msg, false); ++ } + } + -+ @Test(expected=UnsupportedOperationException.class) public void testVertexSetIteratorRemove() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ Set control = new HashSet(); -+ for (int i = 0; i < 100; ++i) { -+ g.add(i); -+ control.add(i); ++ private class ServerIncomingMessageListener implements MessageQueueListener { ++ ++ @Override ++ public void dataBytesReceived(int byte_count) { + } + -+ Set vertices = g.vertices(); -+ assertEquals(control.size(), vertices.size()); -+ Iterator iter = vertices.iterator(); -+ assertTrue(iter.hasNext()); -+ Integer toRemove = iter.next(); -+ assertTrue(g.contains(toRemove)); -+ assertTrue(vertices.contains(toRemove)); -+ iter.remove(); -+ assertFalse(g.contains(toRemove)); -+ assertFalse(vertices.contains(toRemove)); -+ assertEquals(g.order(), vertices.size()); -+ } ++ @Override ++ public boolean messageReceived(Message message) { ++ logger.finest(""Message from server: "" + message.getDescription()); ++ if (!(message instanceof DataMessage)) { ++ String msg = ""got wrong message type from server: ""; ++ logger.warning(msg + message.getDescription()); ++ ServiceConnection.this.close(msg); ++ return false; ++ } ++ DataMessage dataMessage = (DataMessage) message; ++ ServiceConnection.this.writeMessageToFriendConnection(dataMessage.transferPayload()); ++ return true; ++ } + -+ @Test(expected=NoSuchElementException.class) public void testVertexSetIteratorTooFar() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ Set control = new HashSet(); -+ for (int i = 0; i < 100; ++i) { -+ g.add(i); -+ control.add(i); ++ @Override ++ public void protocolBytesReceived(int byte_count) { + } ++ } + } +diff --git a/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/servicesharing/DataMessage.java b/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/servicesharing/DataMessage.java +index 11fc62b7..2070626c 100644 +--- a/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/servicesharing/DataMessage.java ++++ b/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/servicesharing/DataMessage.java +@@ -15,16 +15,26 @@ + import com.aelitis.azureus.core.peermanager.messaging.MessageStreamDecoder; + import com.aelitis.azureus.core.peermanager.messaging.MessageStreamEncoder; + ++import edu.washington.cs.oneswarm.f2f.messaging.OSF2FChannelDataMsg; + import edu.washington.cs.oneswarm.f2f.messaging.OSF2FMessage; + +-class DataMessage implements Message { ++public class DataMessage extends OSF2FChannelDataMsg { ++ private static final byte SS = DirectByteBuffer.SS_MSG; + + private DirectByteBuffer buffer = null; + private static String ID = ""RAW_MESSAGE""; +- private final String DESC = ""Raw message""; ++ private final String desc; ++ private final int size; + + public DataMessage(DirectByteBuffer _buffer) { +- buffer = _buffer; ++ super(OSF2FMessage.CURRENT_VERSION, 1, _buffer); ++ size = _buffer.remaining(SS); ++ desc = ""Raw message: "" + size + "" bytes""; ++ } + -+ Set vertices = g.vertices(); -+ Iterator iter = vertices.iterator(); -+ int i = 0; -+ while (iter.hasNext()) { -+ i++; -+ iter.next(); ++ @Override ++ public int getMessageSize() { ++ return size; + } + + public String getID() { +@@ -48,7 +58,7 @@ public int getType() { + } + + public String getDescription() { +- return DESC; ++ return desc; + } + + public byte getVersion() { +@@ -64,11 +74,26 @@ public DirectByteBuffer[] getData() { + } + + public Message deserialize(DirectByteBuffer data, byte version) throws MessageException { +- throw (new MessageException(""not imp"")); ++ throw (new MessageException(""not implemented"")); + } + + public void destroy() { +- buffer.returnToPool(); ++ if (buffer != null) { ++ buffer.returnToPool(); + } -+ assertEquals(vertices.size(), i); -+ iter.next(); + } + -+ @Test(expected=UnsupportedOperationException.class) public void testVertexSetIteratorRemoveTwice() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ Set control = new HashSet(); -+ for (int i = 0; i < 100; ++i) { -+ g.add(i); -+ control.add(i); ++ /** ++ * Retrieve the payload from this message for transfer into a new message. ++ * ++ * The new message is responsible for returning the buffer on destroy. ++ * ++ * @return ++ */ ++ public DirectByteBuffer transferPayload() { ++ DirectByteBuffer data = buffer; ++ buffer = null; ++ return data; + } + + static class RawMessageEncoder implements MessageStreamEncoder { +@@ -81,7 +106,6 @@ public RawMessage[] encodeMessage(Message base_message) { + } + + static class RawMessageDecoder implements MessageStreamDecoder { +- private static final byte SS = DirectByteBuffer.SS_MSG; + + private static int MAX_PAYLOAD = OSF2FMessage.MAX_PAYLOAD_SIZE; + DirectByteBuffer payload_buffer; +@@ -111,6 +135,9 @@ public int performStreamDecode(Transport transport, int max_bytes) throws IOExce + if (payload_buffer == null) { + payload_buffer = DirectByteBufferPool.getBuffer(SS, MAX_PAYLOAD); + } ++ if (paused) { ++ break; ++ } + long read = transport.read(new ByteBuffer[] { payload_buffer.getBuffer(SS) }, 0, + bytes_left); + bytes_left -= read; +@@ -119,11 +146,12 @@ public int performStreamDecode(Transport transport, int max_bytes) throws IOExce + // * transport has no more data + if (payload_buffer.remaining(SS) == 0 || read == 0) { + if (payload_buffer.position(SS) > 0) { ++ payload_buffer.position(SS, 0); + Message msg = new DataMessage(payload_buffer); + messages_last_read.add(msg); + payload_buffer = null; + } +- // If transport has no more data, break ++ // If we read all from transport, break + if (read == 0) { + break; + } +@@ -169,5 +197,4 @@ public ByteBuffer destroy() { + return ByteBuffer.allocate(0); + } + } +- + } +\ No newline at end of file +diff --git a/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/servicesharing/ServiceSharingManager.java b/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/servicesharing/ServiceSharingManager.java +index 4a920db9..1d3d544e 100644 +--- a/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/servicesharing/ServiceSharingManager.java ++++ b/oneswarm_f2f/src/edu/washington/cs/oneswarm/f2f/servicesharing/ServiceSharingManager.java +@@ -1,40 +1,78 @@ + package edu.washington.cs.oneswarm.f2f.servicesharing; + + import java.net.InetSocketAddress; +-import java.util.concurrent.ConcurrentHashMap; ++import java.nio.ByteBuffer; ++import java.util.HashMap; ++import java.util.logging.Logger; + + import com.aelitis.azureus.core.networkmanager.ConnectionEndpoint; + import com.aelitis.azureus.core.networkmanager.NetworkConnection; ++import com.aelitis.azureus.core.networkmanager.NetworkConnection.ConnectionListener; + import com.aelitis.azureus.core.networkmanager.NetworkManager; + ++import edu.washington.cs.oneswarm.f2f.BigFatLock; + import edu.washington.cs.oneswarm.f2f.messaging.OSF2FHashSearch; ++import edu.washington.cs.oneswarm.f2f.network.OverlayManager; + import edu.washington.cs.oneswarm.f2f.servicesharing.DataMessage.RawMessageDecoder; + import edu.washington.cs.oneswarm.f2f.servicesharing.DataMessage.RawMessageEncoder; + + public class ServiceSharingManager { + +- private final static ServiceSharingManager instace = new ServiceSharingManager(); ++ private final static ServiceSharingManager instance = new ServiceSharingManager(); ++ ++ private static BigFatLock lock = OverlayManager.lock; ++ ++ private final static Logger logger = Logger.getLogger(ServiceSharingManager.class.getName()); + + public static ServiceSharingManager getInstance() { +- return instace; ++ return instance; + } + + private ServiceSharingManager() { + } + +- public ConcurrentHashMap services = new ConcurrentHashMap(); ++ public HashMap serverServices = new HashMap(); ++ ++ public void registerServerService(long searchkey, SharedService service) { ++ try { ++ lock.lock(); ++ serverServices.put(searchkey, service); ++ } finally { ++ lock.unlock(); + } + -+ Set vertices = g.vertices(); -+ Iterator iter = vertices.iterator(); -+ assertTrue(iter.hasNext()); -+ Integer toRemove = iter.next(); -+ assertTrue(g.contains(toRemove)); -+ assertTrue(vertices.contains(toRemove)); -+ iter.remove(); -+ iter.remove(); + } + -+ @Test(expected=UnsupportedOperationException.class) public void testVertexSetIteratorRemoveEarly() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ Set control = new HashSet(); -+ for (int i = 0; i < 100; ++i) { -+ g.add(i); -+ control.add(i); ++ public void deregisterServerService(long searchKey) { ++ try { ++ lock.lock(); ++ serverServices.remove(searchKey); ++ } finally { ++ lock.unlock(); ++ } + +- public void registerService(long searchkey, Service service) { +- services.put(searchkey, service); + } + +- public Service handleSearch(OSF2FHashSearch search) { +- Service service = services.get(search.getInfohashhash()); ++ public SharedService handleSearch(OSF2FHashSearch search) { ++ SharedService service = null; ++ try { ++ lock.lock(); ++ service = serverServices.get(search.getInfohashhash()); ++ } finally { ++ lock.unlock(); + } + -+ Set vertices = g.vertices(); -+ Iterator iter = vertices.iterator(); -+ iter.remove(); -+ } - ++ if (service == null || !service.isEnabled()) { ++ return null; ++ } + return service; + } --// /****************************************************************** --// * --// * --// * EdgeView tests --// * --// * --// ******************************************************************/ -- --// @Test public void testEdgeViewAdd() { --// DirectedMultigraph g = new DirectedMultigraph(); --// Set> edges = g.edges(); --// assertEquals(g.size(), edges.size()); --// edges.add(new SimpleDirectedTypedEdge(""type-1"",0, 1)); --// assertEquals(2, g.order()); --// assertEquals(1, g.size()); --// assertEquals(1, edges.size()); --// assertTrue(g.contains(new SimpleDirectedTypedEdge(""type-1"",0, 1))); --// assertTrue(edges.contains(new SimpleDirectedTypedEdge(""type-1"",0, 1))); --// } -- --// @Test public void testEdgeViewRemove() { --// DirectedMultigraph g = new DirectedMultigraph(); --// Set> edges = g.edges(); --// assertEquals(g.size(), edges.size()); --// edges.add(new SimpleDirectedTypedEdge(""type-1"",0, 1)); --// edges.remove(new SimpleDirectedTypedEdge(""type-1"",0, 1)); --// assertEquals(2, g.order()); --// assertEquals(0, g.size()); --// assertEquals(0, edges.size()); --// assertFalse(g.contains(new SimpleDirectedTypedEdge(""type-1"",0, 1))); --// assertFalse(edges.contains(new SimpleDirectedTypedEdge(""type-1"",0, 1))); --// } -- --// @Test public void testEdgeViewIterator() { --// DirectedMultigraph g = new DirectedMultigraph(); --// Set> edges = g.edges(); -- --// Set> control = new HashSet>(); --// for (int i = 0; i < 100; i += 2) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, i+1); --// g.add(e); // all disconnected --// control.add(e); --// } -+ /****************************************************************** -+ * -+ * -+ * EdgeView tests -+ * -+ * -+ ******************************************************************/ +- static class Service { +- public Service(InetSocketAddress address, String name) { ++ public static class SharedService { ++ // Time the service is disabled after a failed connect attempt; ++ public static final long FAILURE_BACKOFF = 60 * 1000; + -+ @Test public void testEdgeViewAdd() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ Set> edges = g.edges(); -+ assertEquals(g.size(), edges.size()); -+ edges.add(new SimpleDirectedTypedEdge(""type-1"",0, 1)); -+ assertEquals(2, g.order()); -+ assertEquals(1, g.size()); -+ assertEquals(1, edges.size()); -+ assertTrue(g.contains(new SimpleDirectedTypedEdge(""type-1"",0, 1))); -+ assertTrue(edges.contains(new SimpleDirectedTypedEdge(""type-1"",0, 1))); -+ } ++ public SharedService(InetSocketAddress address, String name) { + super(); + this.address = address; + this.name = name; +@@ -42,13 +80,53 @@ public Service(InetSocketAddress address, String name) { + + private final InetSocketAddress address; + private final String name; ++ private long lastFailedConnect; + +- public NetworkConnection createConnection() { ++ public boolean isEnabled() { ++ long lastFailedAge = System.currentTimeMillis() - lastFailedConnect; ++ boolean enabled = lastFailedAge > FAILURE_BACKOFF; ++ logger.finer(String.format(""Service %s is disabled, last failure: %d seconds ago"", ++ name, lastFailedAge)); ++ return enabled; ++ } + -+ @Test public void testEdgeViewRemove() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ Set> edges = g.edges(); -+ assertEquals(g.size(), edges.size()); -+ edges.add(new SimpleDirectedTypedEdge(""type-1"",0, 1)); -+ edges.remove(new SimpleDirectedTypedEdge(""type-1"",0, 1)); -+ assertEquals(2, g.order()); -+ assertEquals(0, g.size()); -+ assertEquals(0, edges.size()); -+ assertFalse(g.contains(new SimpleDirectedTypedEdge(""type-1"",0, 1))); -+ assertFalse(edges.contains(new SimpleDirectedTypedEdge(""type-1"",0, 1))); -+ } ++ public NetworkConnection createConnection(final ConnectionListener listener) { + ConnectionEndpoint target = new ConnectionEndpoint(address); + NetworkConnection conn = NetworkManager.getSingleton().createConnection(target, + new RawMessageEncoder(), new RawMessageDecoder(), false, false, new byte[0][0]); ++ conn.connect(false, new ConnectionListener() { + -+ @Test public void testEdgeViewIterator() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ Set> edges = g.edges(); ++ @Override ++ public String getDescription() { ++ return name + ""Listener""; ++ } + -+ Set> control = new HashSet>(); -+ for (int i = 0; i < 100; i += 2) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, i+1); -+ g.add(e); // all disconnected -+ control.add(e); ++ @Override ++ public void exceptionThrown(Throwable error) { ++ listener.exceptionThrown(error); ++ } ++ ++ @Override ++ public void connectSuccess(ByteBuffer remaining_initial_data) { ++ listener.connectSuccess(remaining_initial_data); ++ } ++ ++ @Override ++ public void connectStarted() { ++ listener.connectStarted(); ++ } ++ ++ @Override ++ public void connectFailure(Throwable failure_msg) { ++ lastFailedConnect = System.currentTimeMillis(); ++ listener.connectFailure(failure_msg); ++ } ++ }); + return conn; + } - --// assertEquals(100, g.order()); --// assertEquals(50, g.size()); --// assertEquals(50, edges.size()); -+ assertEquals(100, g.order()); -+ assertEquals(50, g.size()); -+ assertEquals(50, edges.size()); - --// Set> test = new HashSet>(); --// for (DirectedTypedEdge e : edges) --// test.add(e); --// assertEquals(control.size(), test.size()); --// for (Edge e : test) --// assertTrue(control.contains(e)); --// } -- --// @Test public void testEdgeViewIteratorRemove() { --// DirectedMultigraph g = new DirectedMultigraph(); --// Set> edges = g.edges(); -- --// Set> control = new HashSet>(); --// for (int i = 0; i < 10; i += 2) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, i+1); --// g.add(e); // all disconnected --// control.add(e); --// } -+ Set> test = new HashSet>(); -+ for (DirectedTypedEdge e : edges) -+ test.add(e); -+ assertEquals(control.size(), test.size()); -+ for (Edge e : test) -+ assertTrue(control.contains(e)); ++ public String toString() { ++ return name + "" "" + address; + } + } + }" +f64233fb2d9bcb77e9249d3bc497fd9d110e6a9f,ReactiveX-RxJava,Add Single.fromCallable()--,a,https://github.com/ReactiveX/RxJava,"diff --git a/src/main/java/rx/Single.java b/src/main/java/rx/Single.java +index 4324d32acf..3701d93189 100644 +--- a/src/main/java/rx/Single.java ++++ b/src/main/java/rx/Single.java +@@ -12,6 +12,7 @@ + */ + package rx; + ++import java.util.concurrent.Callable; + import java.util.concurrent.Future; + import java.util.concurrent.TimeUnit; + import java.util.concurrent.TimeoutException; +@@ -605,6 +606,43 @@ public final static Single from(Future future, Scheduler sch + return new Single(OnSubscribeToObservableFuture.toObservableFuture(future)).subscribeOn(scheduler); + } + ++ /** ++ * Returns a {@link Single} that invokes passed function and emits its result for each new Observer that subscribes. ++ *

++ * Allows you to defer execution of passed function until Observer subscribes to the {@link Single}. ++ * It makes passed function ""lazy"". ++ * Result of the function invocation will be emitted by the {@link Single}. ++ *

++ *
Scheduler:
++ *
{@code fromCallable} does not operate by default on a particular {@link Scheduler}.
++ *
++ * ++ * @param func ++ * function which execution should be deferred, it will be invoked when Observer will subscribe to the {@link Single}. ++ * @param ++ * the type of the item emitted by the {@link Single}. ++ * @return a {@link Single} whose {@link Observer}s' subscriptions trigger an invocation of the given function. ++ */ ++ @Experimental ++ public static Single fromCallable(final Callable func) { ++ return create(new OnSubscribe() { ++ @Override ++ public void call(SingleSubscriber singleSubscriber) { ++ final T value; ++ ++ try { ++ value = func.call(); ++ } catch (Throwable t) { ++ Exceptions.throwIfFatal(t); ++ singleSubscriber.onError(t); ++ return; ++ } ++ ++ singleSubscriber.onSuccess(value); ++ } ++ }); + } + -+ @Test public void testEdgeViewIteratorRemove() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ Set> edges = g.edges(); + /** + * Returns a {@code Single} that emits a specified item. + *

+diff --git a/src/test/java/rx/SingleTest.java b/src/test/java/rx/SingleTest.java +index 7d8fe2dc22..f78151b094 100644 +--- a/src/test/java/rx/SingleTest.java ++++ b/src/test/java/rx/SingleTest.java +@@ -20,8 +20,10 @@ + import static org.mockito.Mockito.mock; + import static org.mockito.Mockito.verify; + import static org.mockito.Mockito.verifyZeroInteractions; ++import static org.mockito.Mockito.when; + + import java.util.Arrays; ++import java.util.concurrent.Callable; + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.TimeUnit; + import java.util.concurrent.TimeoutException; +@@ -530,4 +532,42 @@ public void doOnErrorShouldThrowCompositeExceptionIfOnErrorActionThrows() { + + verify(action).call(error); + } + -+ Set> control = new HashSet>(); -+ for (int i = 0; i < 10; i += 2) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, i+1); -+ g.add(e); // all disconnected -+ control.add(e); -+ } - --// assertEquals(10, g.order()); --// assertEquals(5, g.size()); --// assertEquals(5, edges.size()); -+ assertEquals(10, g.order()); -+ assertEquals(5, g.size()); -+ assertEquals(5, edges.size()); ++ @Test ++ public void shouldEmitValueFromCallable() throws Exception { ++ Callable callable = mock(Callable.class); ++ ++ when(callable.call()).thenReturn(""value""); ++ ++ TestSubscriber testSubscriber = new TestSubscriber(); ++ ++ Single ++ .fromCallable(callable) ++ .subscribe(testSubscriber); ++ ++ testSubscriber.assertValue(""value""); ++ testSubscriber.assertNoErrors(); ++ ++ verify(callable).call(); ++ } ++ ++ @Test ++ public void shouldPassErrorFromCallable() throws Exception { ++ Callable callable = mock(Callable.class); ++ ++ Throwable error = new IllegalStateException(); ++ ++ when(callable.call()).thenThrow(error); ++ ++ TestSubscriber testSubscriber = new TestSubscriber(); ++ ++ Single ++ .fromCallable(callable) ++ .subscribe(testSubscriber); ++ ++ testSubscriber.assertNoValues(); ++ testSubscriber.assertError(error); ++ ++ verify(callable).call(); ++ } + }" +e2ffe19d36e25a9d208e53a534f878e8605ed5ab,intellij-community,cleanup--,p,https://github.com/JetBrains/intellij-community,"diff --git a/java/compiler/impl/src/com/intellij/compiler/server/BuildManager.java b/java/compiler/impl/src/com/intellij/compiler/server/BuildManager.java +index a34084fa28eaf..6fb2c80e3d801 100644 +--- a/java/compiler/impl/src/com/intellij/compiler/server/BuildManager.java ++++ b/java/compiler/impl/src/com/intellij/compiler/server/BuildManager.java +@@ -137,7 +137,7 @@ public Boolean fun(String s) { + private final CompileServerClasspathManager myClasspathManager = new CompileServerClasspathManager(); + private final Executor myPooledThreadExecutor = new Executor() { + @Override +- public void execute(Runnable command) { ++ public void execute(@NotNull Runnable command) { + ApplicationManager.getApplication().executeOnPooledThread(command); + } + }; +@@ -478,7 +478,7 @@ public void run() { + globals = buildGlobalSettings(); + myGlobals = globals; + } +- CmdlineRemoteProto.Message.ControllerMessage.FSEvent currentFSChanges = null; ++ CmdlineRemoteProto.Message.ControllerMessage.FSEvent currentFSChanges; + final SequentialTaskExecutor projectTaskQueue; + synchronized (myProjectDataMap) { + ProjectData data = myProjectDataMap.get(projectPath); +@@ -761,21 +761,12 @@ private Process launchBuildProcess(Project project, final int port, final UUID s + cmdLine.addParameter(""-D""+ GlobalOptions.HOSTNAME_OPTION + ""="" + host); + + // javac's VM should use the same default locale that IDEA uses in order for javac to print messages in 'correct' language +- final String lang = System.getProperty(""user.language""); +- if (lang != null) { +- //noinspection HardCodedStringLiteral +- cmdLine.addParameter(""-Duser.language="" + lang); +- } +- final String country = System.getProperty(""user.country""); +- if (country != null) { +- //noinspection HardCodedStringLiteral +- cmdLine.addParameter(""-Duser.country="" + country); +- } +- //noinspection HardCodedStringLiteral +- final String region = System.getProperty(""user.region""); +- if (region != null) { +- //noinspection HardCodedStringLiteral +- cmdLine.addParameter(""-Duser.region="" + region); ++ String[] propertyNames = {""user.language"", ""user.country"", ""user.region""}; ++ for (String name : propertyNames) { ++ final String value = System.getProperty(name); ++ if (value != null) { ++ cmdLine.addParameter(""-D"" + name + ""="" + value); ++ } + } + + cmdLine.addParameter(""-classpath""); +diff --git a/jps/jps-builders/src/org/jetbrains/jps/api/CmdlineProtoUtil.java b/jps/jps-builders/src/org/jetbrains/jps/api/CmdlineProtoUtil.java +index 38d0aa113a458..ac843603c68b7 100644 +--- a/jps/jps-builders/src/org/jetbrains/jps/api/CmdlineProtoUtil.java ++++ b/jps/jps-builders/src/org/jetbrains/jps/api/CmdlineProtoUtil.java +@@ -22,7 +22,7 @@ public static CmdlineRemoteProto.Message.ControllerMessage createMakeRequest(Str + List scopes, + final Map userData, + final CmdlineRemoteProto.Message.ControllerMessage.GlobalSettings globals, +- final CmdlineRemoteProto.Message.ControllerMessage.FSEvent event) { ++ final @Nullable CmdlineRemoteProto.Message.ControllerMessage.FSEvent event) { + return createBuildParametersMessage(CmdlineRemoteProto.Message.ControllerMessage.ParametersMessage.Type.MAKE, project, scopes, + userData, Collections.emptyList(), + globals, event); +@@ -33,7 +33,7 @@ public static CmdlineRemoteProto.Message.ControllerMessage createForceCompileReq + Collection paths, + final Map userData, + final CmdlineRemoteProto.Message.ControllerMessage.GlobalSettings globals, +- final CmdlineRemoteProto.Message.ControllerMessage.FSEvent event) { ++ final @Nullable CmdlineRemoteProto.Message.ControllerMessage.FSEvent event) { + return createBuildParametersMessage(CmdlineRemoteProto.Message.ControllerMessage.ParametersMessage.Type.FORCED_COMPILATION, project, + scopes, userData, paths, globals, event); + } +@@ -63,7 +63,7 @@ public static TargetTypeBuildScope createAllTargetsScope(BuildTargetType type + + private static CmdlineRemoteProto.Message.ControllerMessage createBuildParametersMessage(CmdlineRemoteProto.Message.ControllerMessage.ParametersMessage.Type buildType, + String project, +- List scopes, ++ List scopes, + Map userData, + Collection paths, + final CmdlineRemoteProto.Message.ControllerMessage.GlobalSettings globals, +@@ -99,7 +99,7 @@ public static CmdlineRemoteProto.Message.KeyValuePair createPair(String key, Str + } + + +- public static CmdlineRemoteProto.Message.Failure createFailure(String description, Throwable cause) { ++ public static CmdlineRemoteProto.Message.Failure createFailure(String description, @Nullable Throwable cause) { + final CmdlineRemoteProto.Message.Failure.Builder builder = CmdlineRemoteProto.Message.Failure.newBuilder(); + builder.setDescription(description); + if (cause != null) {" +6236c422615cfe33795267214077551f3d9ffa6f,camel,CAMEL-1977: Http based components should filter- out camel internal headers.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@814567 13f79535-47bb-0310-9956-ffa450edef68-,c,https://github.com/apache/camel,"diff --git a/components/camel-http/src/main/java/org/apache/camel/component/http/HttpHeaderFilterStrategy.java b/components/camel-http/src/main/java/org/apache/camel/component/http/HttpHeaderFilterStrategy.java +index a32be75c50c52..37e81e5c902f3 100644 +--- a/components/camel-http/src/main/java/org/apache/camel/component/http/HttpHeaderFilterStrategy.java ++++ b/components/camel-http/src/main/java/org/apache/camel/component/http/HttpHeaderFilterStrategy.java +@@ -45,6 +45,7 @@ protected void initialize() { + setLowerCase(true); --// Iterator> iter = edges.iterator(); --// while (iter.hasNext()) { --// iter.next(); --// iter.remove(); --// } --// assertEquals(0, g.size()); --// assertFalse(g.edges().iterator().hasNext()); --// assertEquals(0, edges.size()); --// assertEquals(10, g.order()); --// } -- --// /****************************************************************** --// * --// * --// * AdjacencyListView tests --// * --// * --// ******************************************************************/ -- --// @Test public void testAdjacencyList() { --// DirectedMultigraph g = new DirectedMultigraph(); --// for (int i = 0; i < 10; ++i) --// for (int j = i + 1; j < 10; ++j) --// g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); -- --// for (int i = 0; i < 10; ++i) { --// Set> adjacencyList = g.getAdjacencyList(i); --// assertEquals(9, adjacencyList.size()); -+ Iterator> iter = edges.iterator(); -+ while (iter.hasNext()) { -+ iter.next(); -+ iter.remove(); -+ } -+ assertEquals(0, g.size()); -+ assertFalse(g.edges().iterator().hasNext()); -+ assertEquals(0, edges.size()); -+ assertEquals(10, g.order()); -+ } + // filter headers begin with ""Camel"" or ""org.apache.camel"" +- setOutFilterPattern(""(Camel|org\\.apache\\.camel)[\\.|a-z|A-z|0-9]*""); ++ // must ignore case for Http based transports ++ setOutFilterPattern(""(?i)(Camel|org\\.apache\\.camel)[\\.|a-z|A-z|0-9]*""); + } + } +diff --git a/components/camel-jetty/src/test/java/org/apache/camel/component/jetty/HttpFilterCamelHeadersTest.java b/components/camel-jetty/src/test/java/org/apache/camel/component/jetty/HttpFilterCamelHeadersTest.java +new file mode 100644 +index 0000000000000..40861c7fcc891 +--- /dev/null ++++ b/components/camel-jetty/src/test/java/org/apache/camel/component/jetty/HttpFilterCamelHeadersTest.java +@@ -0,0 +1,80 @@ ++/** ++ * Licensed to the Apache Software Foundation (ASF) under one or more ++ * contributor license agreements. See the NOTICE file distributed with ++ * this work for additional information regarding copyright ownership. ++ * The ASF licenses this file to You under the Apache License, Version 2.0 ++ * (the ""License""); you may not use this file except in compliance with ++ * the License. You may obtain a copy of the License at ++ * ++ * http://www.apache.org/licenses/LICENSE-2.0 ++ * ++ * Unless required by applicable law or agreed to in writing, software ++ * distributed under the License is distributed on an ""AS IS"" BASIS, ++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++ * See the License for the specific language governing permissions and ++ * limitations under the License. ++ */ ++package org.apache.camel.component.jetty; + -+ /****************************************************************** -+ * -+ * -+ * AdjacencyListView tests -+ * -+ * -+ ******************************************************************/ ++import java.util.Map; + -+ @Test public void testAdjacencyList() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ for (int i = 0; i < 10; ++i) -+ for (int j = i + 1; j < 10; ++j) -+ g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); ++import org.apache.camel.Exchange; ++import org.apache.camel.Processor; ++import org.apache.camel.builder.RouteBuilder; ++import org.apache.camel.impl.JndiRegistry; ++import org.apache.camel.test.junit4.CamelTestSupport; ++import org.junit.Test; + -+ for (int i = 0; i < 10; ++i) { -+ Set> adjacencyList = g.getAdjacencyList(i); -+ assertEquals(9, adjacencyList.size()); - --// for (int j = 0; j < 10; ++j) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); --// if (i >= j) --// assertFalse(adjacencyList.contains(e)); --// else --// assertTrue(adjacencyList.contains(e)); --// } --// } --// } -- --// @Test public void testAdjacencyListRemoveEdge() { --// DirectedMultigraph g = new DirectedMultigraph(); --// for (int i = 0; i < 10; ++i) --// for (int j = i + 1; j < 10; ++j) --// g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); -- --// Set> adjacencyList = g.getAdjacencyList(0); --// Edge e = new SimpleDirectedTypedEdge(""type-1"",0, 1); --// assertTrue(adjacencyList.contains(e)); --// assertTrue(adjacencyList.remove(e)); --// assertEquals(8, adjacencyList.size()); --// assertEquals( (10 * 9) / 2 - 1, g.size()); --// } -- --// public void testAdjacencyListAddEdge() { --// DirectedMultigraph g = new DirectedMultigraph(); --// for (int i = 0; i < 10; ++i) --// for (int j = i + 2; j < 10; ++j) --// g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); -- --// assertEquals( (10 * 9) / 2 - 9, g.size()); -- --// Set> adjacencyList = g.getAdjacencyList(0); --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",0, 1); --// assertFalse(adjacencyList.contains(e)); --// assertFalse(g.contains(e)); -- --// assertTrue(adjacencyList.add(e)); --// assertTrue(g.contains(e)); -- --// assertEquals(9, adjacencyList.size()); --// assertEquals( (10 * 9) / 2 - 8, g.size()); --// } -- --// @Test public void testAdjacencyListIterator() { --// DirectedMultigraph g = new DirectedMultigraph(); --// for (int i = 0; i < 10; ++i) { --// for (int j = i + 1; j < 10; ++j) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); --// g.add(e); --// } --// } -- --// Set> test = new HashSet>(); --// Set> adjacencyList = g.getAdjacencyList(0); --// assertEquals(9, adjacencyList.size()); -- --// Iterator> it = adjacencyList.iterator(); --// int i = 0; --// while (it.hasNext()) --// assertTrue(test.add(it.next())); --// assertEquals(9, test.size()); --// } -- --// @Test public void testAdjacencyListNoVertex() { --// DirectedMultigraph g = new DirectedMultigraph(); --// Set> adjacencyList = g.getAdjacencyList(0); --// assertEquals(0, adjacencyList.size()); --// } -- --// @Test(expected=NoSuchElementException.class) --// public void testAdjacencyListIteratorNextOffEnd() { --// DirectedMultigraph g = new DirectedMultigraph(); --// for (int i = 0; i < 10; ++i) { --// for (int j = i + 1; j < 10; ++j) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); --// g.add(e); --// } --// } -- --// Set> test = new HashSet>(); --// Set> adjacencyList = g.getAdjacencyList(0); --// assertEquals(9, adjacencyList.size()); -- --// Iterator> it = adjacencyList.iterator(); --// int i = 0; --// while (it.hasNext()) --// assertTrue(test.add(it.next())); --// assertEquals(9, test.size()); --// it.next(); --// } -- --// @Test(expected=UnsupportedOperationException.class) public void testAdjacencyListIteratorRemove() { --// DirectedMultigraph g = new DirectedMultigraph(); --// for (int i = 0; i < 10; ++i) { --// for (int j = i + 1; j < 10; ++j) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); --// g.add(e); --// } --// } -- --// Set> test = new HashSet>(); --// Set> adjacencyList = g.getAdjacencyList(0); --// assertEquals(9, adjacencyList.size()); -- --// Iterator> it = adjacencyList.iterator(); --// assertTrue(it.hasNext()); --// Edge e = it.next(); --// it.remove(); --// assertFalse(adjacencyList.contains(e)); --// assertEquals(8, adjacencyList.size()); --// assertFalse(g.contains(e)); --// assertEquals( (10 * 9) / 2 - 1, g.size()); --// } -- --// @Test(expected=UnsupportedOperationException.class) --// public void testAdjacencyListIteratorRemoveFirst() { --// DirectedMultigraph g = new DirectedMultigraph(); --// for (int i = 0; i < 10; ++i) { --// for (int j = i + 1; j < 10; ++j) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); --// g.add(e); --// } --// } -- --// Set> test = new HashSet>(); --// Set> adjacencyList = g.getAdjacencyList(0); --// assertEquals(9, adjacencyList.size()); -- --// Iterator> it = adjacencyList.iterator(); --// it.remove(); --// } -- --// @Test(expected=UnsupportedOperationException.class) --// public void testAdjacencyListIteratorRemoveTwice() { --// DirectedMultigraph g = new DirectedMultigraph(); --// for (int i = 0; i < 10; ++i) { --// for (int j = i + 1; j < 10; ++j) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); --// g.add(e); --// } --// } -- --// Set> test = new HashSet>(); --// Set> adjacencyList = g.getAdjacencyList(0); --// assertEquals(9, adjacencyList.size()); -- --// Iterator> it = adjacencyList.iterator(); --// assertTrue(it.hasNext()); --// it.next(); --// it.remove(); --// it.remove(); --// } -- --// /****************************************************************** --// * --// * --// * AdjacentVerticesView tests --// * --// * --// ******************************************************************/ -- -- --// @Test public void testAdjacentVertices() { --// DirectedMultigraph g = new DirectedMultigraph(); --// for (int i = 0; i < 10; ++i) { --// for (int j = i + 1; j < 10; ++j) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); --// g.add(e); --// } --// } -- --// Set test = new HashSet(); --// Set adjacent = g.getNeighbors(0); --// assertEquals(9, adjacent.size()); --// for (int i = 1; i < 10; ++i) --// assertTrue(adjacent.contains(i)); --// assertFalse(adjacent.contains(0)); --// assertFalse(adjacent.contains(10)); --// } -- --// @Test(expected=UnsupportedOperationException.class) public void testAdjacentVerticesAdd() { --// DirectedMultigraph g = new DirectedMultigraph(); --// for (int i = 0; i < 10; ++i) { --// for (int j = i + 1; j < 10; ++j) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); --// g.add(e); --// } --// } -- --// Set test = new HashSet(); --// Set adjacent = g.getNeighbors(0); --// adjacent.add(1); --// } -- --// @Test(expected=UnsupportedOperationException.class) public void testAdjacentVerticesRemove() { --// DirectedMultigraph g = new DirectedMultigraph(); --// for (int i = 0; i < 10; ++i) { --// for (int j = i + 1; j < 10; ++j) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); --// g.add(e); --// } --// } -- --// Set test = new HashSet(); --// Set adjacent = g.getNeighbors(0); --// adjacent.remove(1); --// } -- --// @Test public void testAdjacentVerticesIterator() { --// DirectedMultigraph g = new DirectedMultigraph(); --// for (int i = 0; i < 10; ++i) { --// for (int j = i + 1; j < 10; ++j) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); --// g.add(e); --// } --// } -- --// Set test = new HashSet(); --// Set adjacent = g.getNeighbors(0); --// Iterator it = adjacent.iterator(); --// while (it.hasNext()) --// assertTrue(test.add(it.next())); --// assertEquals(9, test.size()); --// } -- -- --// @Test(expected=UnsupportedOperationException.class) public void testAdjacentVerticesIteratorRemove() { --// DirectedMultigraph g = new DirectedMultigraph(); --// for (int i = 0; i < 10; ++i) { --// for (int j = i + 1; j < 10; ++j) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); --// g.add(e); --// } --// } -- --// Set test = new HashSet(); --// Set adjacent = g.getNeighbors(0); --// Iterator it = adjacent.iterator(); --// assertTrue(it.hasNext()); --// it.next(); --// it.remove(); --// } -- --// /****************************************************************** --// * --// * --// * Subgraph tests --// * --// * --// ******************************************************************/ -- --// @Test public void testSubgraph() { --// DirectedMultigraph g = new DirectedMultigraph(); -- --// // fully connected --// for (int i = 0; i < 10; i++) { --// for (int j = i+1; j < 10; ++j) --// g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); --// } -- --// // (n * (n-1)) / 2 --// assertEquals( (10 * 9) / 2, g.size()); --// assertEquals(10, g.order()); -- --// Set vertices = new LinkedHashSet(); --// for (int i = 0; i < 5; ++i) --// vertices.add(i); -- --// DirectedMultigraph subgraph = g.subgraph(vertices); --// assertEquals(5, subgraph.order()); --// assertEquals( (5 * 4) / 2, subgraph.size()); --// } -- --// @Test public void testSubgraphContainsVertex() { --// DirectedMultigraph g = new DirectedMultigraph(); -- --// // fully connected --// for (int i = 0; i < 10; i++) { --// for (int j = i+1; j < 10; ++j) --// g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); --// } -- --// // (n * (n-1)) / 2 --// assertEquals( (10 * 9) / 2, g.size()); --// assertEquals(10, g.order()); -- --// Set vertices = new LinkedHashSet(); --// for (int i = 0; i < 5; ++i) --// vertices.add(i); -- --// DirectedMultigraph subgraph = g.subgraph(vertices); --// assertEquals(5, subgraph.order()); --// assertEquals( (5 * 4) / 2, subgraph.size()); --// for (int i = 0; i < 5; ++i) --// assertTrue(subgraph.contains(i)); --// for (int i = 5; i < 10; ++i) { --// assertTrue(g.contains(i)); --// assertFalse(subgraph.contains(i)); --// } --// } -- --// @Test public void testSubgraphContainsEdge() { --// DirectedMultigraph g = new DirectedMultigraph(); -- --// // fully connected --// for (int i = 0; i < 10; i++) { --// for (int j = i+1; j < 10; ++j) --// g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); --// } -- --// // (n * (n-1)) / 2 --// assertEquals( (10 * 9) / 2, g.size()); --// assertEquals(10, g.order()); -- --// Set vertices = new LinkedHashSet(); --// for (int i = 0; i < 5; ++i) --// vertices.add(i); -- --// DirectedMultigraph subgraph = g.subgraph(vertices); --// assertEquals(5, subgraph.order()); --// assertEquals( (5 * 4) / 2, subgraph.size()); --// for (int i = 0; i < 5; ++i) { --// for (int j = i+1; j < 5; ++j) { --// assertTrue(subgraph.contains(new SimpleDirectedTypedEdge(""type-1"",i, j))); --// } --// } -- --// for (int i = 5; i < 10; ++i) { --// for (int j = i+1; j < 10; ++j) { --// DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); --// assertTrue(g.contains(e)); --// assertFalse(subgraph.contains(e)); --// } --// } --// } -- --// @Test public void testSubgraphAddEdge() { --// DirectedMultigraph g = new DirectedMultigraph(); -- --// // fully connected --// for (int i = 0; i < 10; i++) { --// for (int j = i+1; j < i+2 && j < 10; ++j) --// assertTrue(g.add(new SimpleDirectedTypedEdge(""type-1"",i, j))); --// } -- --// assertEquals(9, g.size()); --// assertEquals(10, g.order()); -- --// Set vertices = new LinkedHashSet(); --// for (int i = 0; i < 5; ++i) --// vertices.add(i); -- --// DirectedMultigraph subgraph = g.subgraph(vertices); --// assertEquals(5, subgraph.order()); --// assertEquals(4, subgraph.size()); -- --// // Add an edge to a new vertex --// assertTrue(subgraph.add(new SimpleDirectedTypedEdge(""type-1"", 1, 0))); --// assertEquals(5, subgraph.size()); --// assertEquals(5, subgraph.order()); --// assertEquals(10, g.size()); -- --// } -- --// @Test(expected=UnsupportedOperationException.class) public void testSubgraphAddEdgeNewVertex() { --// DirectedMultigraph g = new DirectedMultigraph(); -- --// // fully connected --// for (int i = 0; i < 10; i++) { --// for (int j = i+1; j < 10; ++j) --// g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); --// } -- --// // (n * (n-1)) / 2 --// assertEquals( (10 * 9) / 2, g.size()); --// assertEquals(10, g.order()); -- --// Set vertices = new LinkedHashSet(); --// for (int i = 0; i < 5; ++i) --// vertices.add(i); -- --// DirectedMultigraph subgraph = g.subgraph(vertices); --// assertEquals(5, subgraph.order()); --// assertEquals( (5 * 4) / 2, subgraph.size()); -- --// // Add an edge to a new vertex --// assertTrue(subgraph.add(new SimpleDirectedTypedEdge(""type-1"",0, 5))); --// assertEquals( (5 * 4) / 2 + 1, subgraph.size()); --// assertEquals(6, subgraph.order()); --// assertEquals(11, g.order()); --// assertEquals( (9*10)/2 + 1, g.size()); --// } -- --// @Test public void testSubgraphRemoveEdge() { --// DirectedMultigraph g = new DirectedMultigraph(); -- --// // fully connected --// for (int i = 0; i < 10; i++) { --// for (int j = i+1; j < 10; ++j) --// g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); --// } -- --// // (n * (n-1)) / 2 --// assertEquals( (10 * 9) / 2, g.size()); --// assertEquals(10, g.order()); -- --// Set vertices = new LinkedHashSet(); --// for (int i = 0; i < 5; ++i) --// vertices.add(i); ++/** ++ * @version $Revision$ ++ */ ++public class HttpFilterCamelHeadersTest extends CamelTestSupport { ++ ++ @Test ++ public void testFilterCamelHeaders() throws Exception { ++ Exchange out = template.send(""http://localhost:9090/test/filter"", new Processor() { ++ public void process(Exchange exchange) throws Exception { ++ exchange.getIn().setBody(""Claus""); ++ exchange.getIn().setHeader(""bar"", 123); ++ } ++ }); ++ ++ assertNotNull(out); ++ assertEquals(""Hi Claus"", out.getOut().getBody(String.class)); ++ ++ // there should be no internal Camel headers ++ // except for the response code ++ Map headers = out.getOut().getHeaders(); ++ for (String key : headers.keySet()) { ++ if (!key.equalsIgnoreCase(Exchange.HTTP_RESPONSE_CODE)) { ++ assertTrue(""Should not contain any Camel internal headers"", !key.toLowerCase().startsWith(""camel"")); ++ } else { ++ assertEquals(200, headers.get(Exchange.HTTP_RESPONSE_CODE)); ++ } ++ } ++ } ++ ++ @Override ++ protected JndiRegistry createRegistry() throws Exception { ++ JndiRegistry jndi = super.createRegistry(); ++ jndi.bind(""foo"", new MyFooBean()); ++ return jndi; ++ } ++ ++ @Override ++ protected RouteBuilder createRouteBuilder() throws Exception { ++ return new RouteBuilder() { ++ @Override ++ public void configure() throws Exception { ++ from(""jetty:http://localhost:9090/test/filter"").beanRef(""foo""); ++ } ++ }; ++ } ++ ++ public static class MyFooBean { ++ ++ public String hello(String name) { ++ return ""Hi "" + name; ++ } ++ } ++}" +e931ef7e6e360b1a89e3f0d97fbc8332852b8dcd,intellij-community,move suppress/settings intention down- (IDEA-72320 )--,c,https://github.com/JetBrains/intellij-community,"diff --git a/java/java-tests/testData/codeInsight/daemonCodeAnalyzer/quickFix/emptyIntention/LowPriority.java b/java/java-tests/testData/codeInsight/daemonCodeAnalyzer/quickFix/emptyIntention/LowPriority.java +new file mode 100644 +index 0000000000000..51882b4824e92 +--- /dev/null ++++ b/java/java-tests/testData/codeInsight/daemonCodeAnalyzer/quickFix/emptyIntention/LowPriority.java +@@ -0,0 +1,6 @@ ++class Test { ++ void method() { ++ final String i = """"; ++ i = """"; ++ } ++} +\ No newline at end of file +diff --git a/java/java-tests/testSrc/com/intellij/codeInsight/daemon/quickFix/EmptyIntentionInspectionQuickFixTest.java b/java/java-tests/testSrc/com/intellij/codeInsight/daemon/quickFix/EmptyIntentionInspectionQuickFixTest.java +index 3381bec67ed7f..f1d7457625e91 100644 +--- a/java/java-tests/testSrc/com/intellij/codeInsight/daemon/quickFix/EmptyIntentionInspectionQuickFixTest.java ++++ b/java/java-tests/testSrc/com/intellij/codeInsight/daemon/quickFix/EmptyIntentionInspectionQuickFixTest.java +@@ -4,6 +4,7 @@ + import com.intellij.codeInsight.intention.IntentionAction; + import com.intellij.codeInspection.LocalInspectionTool; + import com.intellij.codeInspection.ProblemsHolder; ++import com.intellij.codeInspection.defUse.DefUseInspection; + import com.intellij.psi.JavaElementVisitor; + import com.intellij.psi.PsiElementVisitor; + import com.intellij.psi.PsiLiteralExpression; +@@ -26,7 +27,7 @@ protected String getBasePath() { + + @Override + protected LocalInspectionTool[] configureLocalInspectionTools() { +- return new LocalInspectionTool[]{new LocalInspectionTool() { ++ return new LocalInspectionTool[]{new DefUseInspection(), new LocalInspectionTool() { + @Override + @Nls + @NotNull +@@ -74,4 +75,26 @@ public void testX() throws Exception { + } + assertEquals(1, emptyActions.size()); + } ++ ++ public void testLowPriority() throws Exception { ++ configureByFile(getBasePath() + ""/LowPriority.java""); ++ List emptyActions = getAvailableActions(); ++ int i = 0; ++ for(;i < emptyActions.size(); i++) { ++ final IntentionAction intentionAction = emptyActions.get(i); ++ if (""Make 'i' not final"".equals(intentionAction.getText())) { ++ break; ++ } ++ if (intentionAction instanceof EmptyIntentionAction) { ++ fail(""Low priority action prior to quick fix""); ++ } ++ } ++ assertTrue(i < emptyActions.size()); ++ for (; i < emptyActions.size(); i++) { ++ if (emptyActions.get(i) instanceof EmptyIntentionAction) { ++ return; ++ } ++ } ++ fail(""Missed inspection setting action""); ++ } + } +diff --git a/platform/lang-impl/src/com/intellij/codeInsight/intention/EmptyIntentionAction.java b/platform/lang-impl/src/com/intellij/codeInsight/intention/EmptyIntentionAction.java +index 8808e65288f17..f74376b27b1d0 100644 +--- a/platform/lang-impl/src/com/intellij/codeInsight/intention/EmptyIntentionAction.java ++++ b/platform/lang-impl/src/com/intellij/codeInsight/intention/EmptyIntentionAction.java +@@ -29,7 +29,7 @@ + * User: anna + * Date: May 11, 2005 + */ +-public final class EmptyIntentionAction implements IntentionAction{ ++public final class EmptyIntentionAction implements IntentionAction, LowPriorityAction{ + private final String myName; + + public EmptyIntentionAction(@NotNull String name) {" +ac93bc54a2c1a7b17d3a0b57fc9a24ec9d334c78,Delta Spike,"DELTASPIKE-289 WindowContext cleanup +",c,https://github.com/apache/deltaspike,"diff --git a/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/spi/scope/window/WindowContext.java b/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/spi/scope/window/WindowContext.java +index fbd585cdb..14663c5af 100644 +--- a/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/spi/scope/window/WindowContext.java ++++ b/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/spi/scope/window/WindowContext.java +@@ -30,7 +30,8 @@ + * session as @SessionScoped bean. + *

+ *

Every WindowContext is uniquely identified via a +- * 'windowId'. Each Thread is associated with at most ++ * 'windowId' inside the current Session. ++ * Each Thread is associated with at most + * one single windowId at a time. The {@link WindowContext} + * is the interface which allows resolving the current windowId + * associated with this very Thread.

+@@ -47,21 +48,12 @@ public interface WindowContext + * If no WindowContext exists with the very windowId we will create a new one. + * @param windowId + */ +- void activateWindowContext(String windowId); ++ void activateWindow(String windowId); + + /** +- * close the WindowContext with the currently activated windowId for the very Thread. ++ * close the WindowContext with the given windowId. + * @return true if any did exist, false otherwise + */ +- boolean closeCurrentWindowContext(); - --// DirectedMultigraph subgraph = g.subgraph(vertices); --// assertEquals(5, subgraph.order()); --// assertEquals( (5 * 4) / 2, subgraph.size()); - --// // Remove an existing edge --// assertTrue(subgraph.remove(new SimpleDirectedTypedEdge(""type-1"",0, 1))); --// assertEquals( (5 * 4) / 2 - 1, subgraph.size()); --// assertEquals(5, subgraph.order()); --// assertEquals(10, g.order()); --// assertEquals( (9*10)/2 - 1, g.size()); +- /** +- * Close all WindowContexts which are managed by the WindowContextManager. +- * This is necessary when the session gets closed down or the application closes. +- * @return +- */ +- void destroy(); - --// // Remove a non-existent edge, which should have no effect even though --// // the edge is present in the backing graph --// assertFalse(subgraph.remove(new SimpleDirectedTypedEdge(""type-1"",0, 6))); --// assertEquals( (5 * 4) / 2 - 1, subgraph.size()); --// assertEquals(5, subgraph.order()); --// assertEquals(10, g.order()); --// assertEquals( (9*10)/2 - 1, g.size()); --// } ++ boolean closeWindow(String windowId); + + } +diff --git a/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/util/context/AbstractContext.java b/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/util/context/AbstractContext.java +index 583faf80c..e30931ca2 100644 +--- a/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/util/context/AbstractContext.java ++++ b/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/util/context/AbstractContext.java +@@ -154,9 +154,11 @@ public void destroyAllActive() + } + + /** +- * destroys all the Contextual Instances in the specified ContextualStorage. ++ * Destroys all the Contextual Instances in the specified ContextualStorage. ++ * This is a static method to allow various holder objects to cleanup ++ * properly in @PreDestroy. + */ +- public void destroyAllActive(ContextualStorage storage) ++ public static void destroyAllActive(ContextualStorage storage) + { + Map> contextMap = storage.getStorage(); + for (Map.Entry> entry : contextMap.entrySet()) +diff --git a/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/scope/window/WindowBeanHolder.java b/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/scope/window/WindowBeanHolder.java +index d51342f41..8acd0e663 100644 +--- a/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/scope/window/WindowBeanHolder.java ++++ b/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/scope/window/WindowBeanHolder.java +@@ -18,12 +18,14 @@ + */ + package org.apache.deltaspike.core.impl.scope.window; + ++import javax.annotation.PreDestroy; + import javax.enterprise.context.SessionScoped; + import javax.enterprise.inject.spi.BeanManager; + import java.io.Serializable; + import java.util.Map; + import java.util.concurrent.ConcurrentHashMap; + ++import org.apache.deltaspike.core.util.context.AbstractContext; + import org.apache.deltaspike.core.util.context.ContextualStorage; + + /** +@@ -41,6 +43,7 @@ public class WindowBeanHolder implements Serializable + */ + private volatile Map storageMap = new ConcurrentHashMap(); + ++ //X TODO review usage + public Map getStorageMap() + { + return storageMap; +@@ -86,4 +89,18 @@ public Map forceNewStorage() + storageMap = new ConcurrentHashMap(); + return oldStorageMap; + } ++ ++ @PreDestroy ++ public void destroyBeans() ++ { ++ // we replace the old windowBeanHolder beans with a new storage Map ++ // an afterwards destroy the old Beans without having to care about any syncs. ++ Map oldWindowContextStorages = forceNewStorage(); ++ ++ for (ContextualStorage contextualStorage : oldWindowContextStorages.values()) ++ { ++ AbstractContext.destroyAllActive(contextualStorage); ++ } ++ ++ } + } +diff --git a/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/scope/window/WindowContextImpl.java b/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/scope/window/WindowContextImpl.java +index f20d02fe1..e9aa32661 100644 +--- a/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/scope/window/WindowContextImpl.java ++++ b/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/scope/window/WindowContextImpl.java +@@ -23,8 +23,6 @@ + import javax.enterprise.inject.spi.BeanManager; + + import java.lang.annotation.Annotation; +-import java.util.Map; +-import java.util.concurrent.ConcurrentHashMap; + + import org.apache.deltaspike.core.api.scope.WindowScoped; + import org.apache.deltaspike.core.spi.scope.window.WindowContext; +@@ -38,13 +36,18 @@ + public class WindowContextImpl extends AbstractContext implements WindowContext + { + /** +- * all the {@link WindowContext}s which are active in this very Session. ++ * Holds the currently active windowId of each Request + */ +- private Map windowContexts = new ConcurrentHashMap(); - + private WindowIdHolder windowIdHolder; ++ ++ /** ++ * Contains the stored WindowScoped contextual instances. ++ */ + private WindowBeanHolder windowBeanHolder; + ++ /** ++ * needed for serialisation and passivationId ++ */ + private BeanManager beanManager; + + +@@ -55,8 +58,20 @@ public WindowContextImpl(BeanManager beanManager) + this.beanManager = beanManager; + } + ++ /** ++ * We need to pass the session scoped windowbean holder and the ++ * requestscoped windowIdHolder in a later phase because ++ * getBeans is only allowed from AfterDeploymentValidation onwards. ++ */ ++ void initWindowContext(WindowBeanHolder windowBeanHolder, WindowIdHolder windowIdHolder) ++ { ++ this.windowBeanHolder = windowBeanHolder; ++ this.windowIdHolder = windowIdHolder; ++ } ++ ++ + @Override +- public void activateWindowContext(String windowId) ++ public void activateWindow(String windowId) + { + windowIdHolder.setWindowId(windowId); + } +@@ -68,16 +83,15 @@ public String getCurrentWindowId() + } + + @Override +- public boolean closeCurrentWindowContext() ++ public boolean closeWindow(String windowId) + { +- String windowId = windowIdHolder.getWindowId(); + if (windowId == null) + { + return false; + } + +- WindowContext windowContext = windowContexts.get(windowId); +- if (windowContext == null) ++ ContextualStorage windowStorage = windowBeanHolder.getContextualStorage(beanManager, windowId); ++ if (windowStorage == null) + { + return false; + } +@@ -85,19 +99,6 @@ public boolean closeCurrentWindowContext() + return true; + } + +- @Override +- public synchronized void destroy() +- { +- // we replace the old windowBeanHolder beans with a new storage Map +- // an afterwards destroy the old Beans without having to care about any syncs. +- Map oldWindowContextStorages = windowBeanHolder.forceNewStorage(); - --// /****************************************************************** --// * --// * --// * SubgraphVertexView tests --// * --// * --// ******************************************************************/ +- for (ContextualStorage contextualStorage : oldWindowContextStorages.values()) +- { +- destroyAllActive(contextualStorage); +- } +- } - + @Override + protected ContextualStorage getContextualStorage(boolean createIfNotExist) + { +@@ -127,9 +128,4 @@ public boolean isActive() + return windowId != null; + } + +- void initWindowContext(WindowBeanHolder windowBeanHolder, WindowIdHolder windowIdHolder) +- { +- this.windowBeanHolder = windowBeanHolder; +- this.windowIdHolder = windowIdHolder; +- } + } +diff --git a/deltaspike/core/impl/src/test/java/org/apache/deltaspike/test/core/impl/scope/window/DefaultWindowContextTest.java b/deltaspike/core/impl/src/test/java/org/apache/deltaspike/test/core/impl/scope/window/DefaultWindowContextTest.java +index 68408fda5..39640b8d4 100644 +--- a/deltaspike/core/impl/src/test/java/org/apache/deltaspike/test/core/impl/scope/window/DefaultWindowContextTest.java ++++ b/deltaspike/core/impl/src/test/java/org/apache/deltaspike/test/core/impl/scope/window/DefaultWindowContextTest.java +@@ -68,14 +68,14 @@ public void testWindowScoedBean() + Assert.assertNotNull(someWindowScopedBean); + + { +- windowContext.activateWindowContext(""window1""); ++ windowContext.activateWindow(""window1""); + someWindowScopedBean.setValue(""Hans""); + Assert.assertEquals(""Hans"", someWindowScopedBean.getValue()); + } + + // now we switch it away to another 'window' + { +- windowContext.activateWindowContext(""window2""); ++ windowContext.activateWindow(""window2""); + Assert.assertNull(someWindowScopedBean.getValue()); + someWindowScopedBean.setValue(""Karl""); + Assert.assertEquals(""Karl"", someWindowScopedBean.getValue()); +@@ -83,7 +83,7 @@ public void testWindowScoedBean() + + // and now back to the first window + { +- windowContext.activateWindowContext(""window1""); ++ windowContext.activateWindow(""window1""); + + // which must still contain the old value + Assert.assertEquals(""Hans"", someWindowScopedBean.getValue()); +@@ -91,7 +91,7 @@ public void testWindowScoedBean() + + // and again back to the second window + { +- windowContext.activateWindowContext(""window2""); ++ windowContext.activateWindow(""window2""); + + // which must still contain the old value of the 2nd window + Assert.assertEquals(""Karl"", someWindowScopedBean.getValue());" +a035e9fd8a5bde10c26338d7b23f75dbf59f1352,drools,JBRULES-2121: JavaDialect isn't creating unique ids- - fixed name that is checked--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@26929 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-,c,https://github.com/kiegroup/drools,"diff --git a/drools-compiler/src/main/java/org/drools/rule/builder/dialect/java/JavaDialect.java b/drools-compiler/src/main/java/org/drools/rule/builder/dialect/java/JavaDialect.java +index 8d9bf344fe1..e6a9f723ae8 100644 +--- a/drools-compiler/src/main/java/org/drools/rule/builder/dialect/java/JavaDialect.java ++++ b/drools-compiler/src/main/java/org/drools/rule/builder/dialect/java/JavaDialect.java +@@ -721,7 +721,7 @@ public static String getUniqueLegalName(final String packageName, + + counter++; + final String fileName = packageName.replaceAll( ""\\."", +- ""/"" ) + newName + ""_"" + counter + ext; ++ ""/"" ) + ""/"" + newName + ""_"" + counter + ""."" + ext; + + //MVEL:test null to Fix failing test on org.drools.rule.builder.dialect.mvel.MVELConsequenceBuilderTest.testImperativeCodeError() + exists = src != null && src.isAvailable( fileName );" +01e706466e561557d8591b3031cd85ae39b0559a,intellij-community,gradle: correctly set TestModuleProperties for- modules containing '-' in names (IDEA-151590)--,c,https://github.com/JetBrains/intellij-community,"diff --git a/platform/external-system-api/src/com/intellij/openapi/externalSystem/model/project/ModuleData.java b/platform/external-system-api/src/com/intellij/openapi/externalSystem/model/project/ModuleData.java +index 4179681ab59ba..c2c06d2180435 100644 +--- a/platform/external-system-api/src/com/intellij/openapi/externalSystem/model/project/ModuleData.java ++++ b/platform/external-system-api/src/com/intellij/openapi/externalSystem/model/project/ModuleData.java +@@ -3,7 +3,6 @@ + import com.intellij.ide.highlighter.ModuleFileType; + import com.intellij.openapi.externalSystem.model.ProjectSystemId; + import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil; +-import com.intellij.openapi.util.io.FileUtil; + import com.intellij.util.containers.ContainerUtil; + import org.jetbrains.annotations.NotNull; + import org.jetbrains.annotations.Nullable; +@@ -58,7 +57,7 @@ protected ModuleData(@NotNull String id, + @NotNull String internalName, + @NotNull String moduleFileDirectoryPath, + @NotNull String externalConfigPath) { +- super(owner, externalName, FileUtil.sanitizeFileName(internalName)); ++ super(owner, externalName, internalName); + myId = id; + myModuleTypeId = typeId; + myExternalConfigPath = externalConfigPath; +diff --git a/plugins/gradle/src/org/jetbrains/plugins/gradle/service/project/BaseGradleProjectResolverExtension.java b/plugins/gradle/src/org/jetbrains/plugins/gradle/service/project/BaseGradleProjectResolverExtension.java +index 2a7f6b2eb6c25..9f67df07d6893 100644 +--- a/plugins/gradle/src/org/jetbrains/plugins/gradle/service/project/BaseGradleProjectResolverExtension.java ++++ b/plugins/gradle/src/org/jetbrains/plugins/gradle/service/project/BaseGradleProjectResolverExtension.java +@@ -234,8 +234,8 @@ public DataNode createModule(@NotNull IdeaModule gradleModule, @NotN + } + + @NotNull +- public String getInternalModuleName(@NotNull IdeaModule gradleModule, @NotNull String sourceSetName) { +- return gradleModule.getName() + ""_"" + sourceSetName; ++ private static String getInternalModuleName(@NotNull IdeaModule gradleModule, @NotNull String sourceSetName) { ++ return FileUtil.sanitizeFileName(gradleModule.getName() + ""_"" + sourceSetName); + } + + @Override +diff --git a/plugins/gradle/testSources/org/jetbrains/plugins/gradle/importing/GradleMiscImportingTest.java b/plugins/gradle/testSources/org/jetbrains/plugins/gradle/importing/GradleMiscImportingTest.java +index c28a1745846c8..88c0cb575d438 100644 +--- a/plugins/gradle/testSources/org/jetbrains/plugins/gradle/importing/GradleMiscImportingTest.java ++++ b/plugins/gradle/testSources/org/jetbrains/plugins/gradle/importing/GradleMiscImportingTest.java +@@ -59,6 +59,20 @@ public void testTestModuleProperties() throws Exception { + assertSame(productionModule, testModuleProperties.getProductionModule()); + } + ++ @Test ++ public void testTestModulePropertiesForModuleWithHyphenInName() throws Exception { ++ createSettingsFile(""rootProject.name='my-project'""); ++ importProject( ++ ""apply plugin: 'java'"" ++ ); ++ ++ assertModules(""my-project"", ""my_project_main"", ""my_project_test""); ++ ++ final Module testModule = getModule(""my_project_test""); ++ TestModuleProperties testModuleProperties = TestModuleProperties.getInstance(testModule); ++ assertEquals(""my_project_main"", testModuleProperties.getProductionModuleName()); ++ } ++ + @Test + public void testInheritProjectJdkForModules() throws Exception { + importProject(" +663575068c45608bfd45348ec98d1a26834cbb51,tapiji,"Cleans up build properties and plugin description files. + +Based on previous refactorings, build properties got out-of-sync with the plug-in content. This change also cleans warnings from `plugin.xml` files. +(cherry picked from commit 9ae85d634223cec5c208e29c63b81c773fccf0be) +",p,https://github.com/tapiji/tapiji,"diff --git a/org.eclipse.babel.tapiji.tools.core.ui/build.properties b/org.eclipse.babel.tapiji.tools.core.ui/build.properties +index 285b8bf4..f48d8dca 100644 +--- a/org.eclipse.babel.tapiji.tools.core.ui/build.properties ++++ b/org.eclipse.babel.tapiji.tools.core.ui/build.properties +@@ -3,4 +3,10 @@ output.. = bin/ + bin.includes = META-INF/,\ + .,\ + plugin.xml,\ +- icons/ ++ icons/,\ ++ about.html,\ ++ bin/,\ ++ epl-v10.html ++src.includes = icons/,\ ++ epl-v10.html,\ ++ about.html +diff --git a/org.eclipse.babel.tapiji.tools.core/build.properties b/org.eclipse.babel.tapiji.tools.core/build.properties +index 58be59e2..d9a05cb6 100644 +--- a/org.eclipse.babel.tapiji.tools.core/build.properties ++++ b/org.eclipse.babel.tapiji.tools.core/build.properties +@@ -6,10 +6,8 @@ bin.includes = plugin.xml,\ + about.html,\ + epl-v10.html,\ + bin/,\ +- src/,\ + resourcebundle.jar +-src.includes = src/,\ +- schema/,\ ++src.includes = schema/,\ + about.html,\ + epl-v10.html + jars.compile.order = .,\ +diff --git a/org.eclipse.babel.tapiji.tools.java.ui/build.properties b/org.eclipse.babel.tapiji.tools.java.ui/build.properties +index 5435750f..f28f57e2 100644 +--- a/org.eclipse.babel.tapiji.tools.java.ui/build.properties ++++ b/org.eclipse.babel.tapiji.tools.java.ui/build.properties +@@ -3,6 +3,8 @@ output.. = bin/ + bin.includes = META-INF/,\ + .,\ + plugin.xml,\ +- epl-v10.html +-src.includes = src/,\ +- epl-v10.html ++ epl-v10.html,\ ++ about.html,\ ++ bin/ ++src.includes = epl-v10.html,\ ++ about.html +diff --git a/org.eclipse.babel.tapiji.tools.java.ui/plugin.xml b/org.eclipse.babel.tapiji.tools.java.ui/plugin.xml +index ba577346..666fd7e0 100644 +--- a/org.eclipse.babel.tapiji.tools.java.ui/plugin.xml ++++ b/org.eclipse.babel.tapiji.tools.java.ui/plugin.xml +@@ -3,9 +3,9 @@ + + +- +- ++ + + +diff --git a/org.eclipse.babel.tapiji.tools.java/build.properties b/org.eclipse.babel.tapiji.tools.java/build.properties +index e9863e28..8971f49e 100644 +--- a/org.eclipse.babel.tapiji.tools.java/build.properties ++++ b/org.eclipse.babel.tapiji.tools.java/build.properties +@@ -2,4 +2,8 @@ source.. = src/ + output.. = bin/ + bin.includes = META-INF/,\ + .,\ +- plugin.xml ++ epl-v10.html,\ ++ about.html,\ ++ bin/ ++src.includes = epl-v10.html,\ ++ about.html +diff --git a/org.eclipse.babel.tapiji.tools.rbmanager/build.properties b/org.eclipse.babel.tapiji.tools.rbmanager/build.properties +index f176c838..030c8540 100644 +--- a/org.eclipse.babel.tapiji.tools.rbmanager/build.properties ++++ b/org.eclipse.babel.tapiji.tools.rbmanager/build.properties +@@ -2,9 +2,13 @@ source.. = src/ + output.. = bin/ + bin.includes = META-INF/,\ + .,\ +- plugin.xml,\ + bin/,\ +- icons/ ++ icons/,\ ++ epl-v10.html,\ ++ about.html,\ ++ plugin.xml + src.includes = icons/,\ +- src/,\ +- bin/ +\ No newline at end of file ++ bin/,\ ++ epl-v10.html,\ ++ about.html ++" +3990e8b478b1d958479c173c74946e38360cfd17,hadoop,Merge r1503933 from trunk to branch-2 for YARN-513.- Create common proxy client for communicating with RM (Xuan Gong & Jian He via- bikas)--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1503935 13f79535-47bb-0310-9956-ffa450edef68-,a,https://github.com/apache/hadoop,"diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt +index 65d19bff9d839..4d6cb00b23eca 100644 +--- a/hadoop-yarn-project/CHANGES.txt ++++ b/hadoop-yarn-project/CHANGES.txt +@@ -465,6 +465,9 @@ Release 2.1.0-beta - 2013-07-02 + YARN-521. Augment AM - RM client module to be able to request containers + only at specific locations (Sandy Ryza via bikas) + ++ YARN-513. Create common proxy client for communicating with RM. (Xuan Gong ++ & Jian He via bikas) ++ + OPTIMIZATIONS + + YARN-512. Log aggregation root directory check is more expensive than it +diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java +index 44c35c3d58b28..b14e65225200d 100644 +--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java ++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java +@@ -655,17 +655,17 @@ public class YarnConfiguration extends Configuration { + public static final long DEFAULT_NM_PROCESS_KILL_WAIT_MS = + 2000; + +- /** Max time to wait to establish a connection to RM when NM starts ++ /** Max time to wait to establish a connection to RM + */ +- public static final String RESOURCEMANAGER_CONNECT_WAIT_SECS = +- NM_PREFIX + ""resourcemanager.connect.wait.secs""; +- public static final int DEFAULT_RESOURCEMANAGER_CONNECT_WAIT_SECS = ++ public static final String RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS = ++ RM_PREFIX + ""resourcemanager.connect.max.wait.secs""; ++ public static final int DEFAULT_RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS = + 15*60; + +- /** Time interval between each NM attempt to connect to RM ++ /** Time interval between each attempt to connect to RM + */ + public static final String RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS = +- NM_PREFIX + ""resourcemanager.connect.retry_interval.secs""; ++ RM_PREFIX + ""resourcemanager.connect.retry_interval.secs""; + public static final long DEFAULT_RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS + = 30; + +diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/ClientRMProxy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/ClientRMProxy.java +new file mode 100644 +index 0000000000000..f70b44ce3a8db +--- /dev/null ++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/ClientRMProxy.java +@@ -0,0 +1,65 @@ ++/** ++* Licensed to the Apache Software Foundation (ASF) under one ++* or more contributor license agreements. See the NOTICE file ++* distributed with this work for additional information ++* regarding copyright ownership. The ASF licenses this file ++* to you under the Apache License, Version 2.0 (the ++* ""License""); you may not use this file except in compliance ++* with the License. You may obtain a copy of the License at ++* ++* http://www.apache.org/licenses/LICENSE-2.0 ++* ++* Unless required by applicable law or agreed to in writing, software ++* distributed under the License is distributed on an ""AS IS"" BASIS, ++* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++* See the License for the specific language governing permissions and ++* limitations under the License. ++*/ ++ ++package org.apache.hadoop.yarn.client; ++ ++import java.io.IOException; ++import java.net.InetSocketAddress; ++ ++import org.apache.commons.logging.Log; ++import org.apache.commons.logging.LogFactory; ++import org.apache.hadoop.conf.Configuration; ++import org.apache.hadoop.yarn.api.ApplicationClientProtocol; ++import org.apache.hadoop.yarn.api.ApplicationMasterProtocol; ++import org.apache.hadoop.yarn.conf.YarnConfiguration; ++import org.apache.hadoop.yarn.server.api.ResourceManagerAdministrationProtocol; ++ ++public class ClientRMProxy extends RMProxy{ ++ ++ private static final Log LOG = LogFactory.getLog(ClientRMProxy.class); ++ ++ public static T createRMProxy(final Configuration conf, ++ final Class protocol) throws IOException { ++ InetSocketAddress rmAddress = getRMAddress(conf, protocol); ++ return createRMProxy(conf, protocol, rmAddress); ++ } ++ ++ private static InetSocketAddress getRMAddress(Configuration conf, Class protocol) { ++ if (protocol == ApplicationClientProtocol.class) { ++ return conf.getSocketAddr(YarnConfiguration.RM_ADDRESS, ++ YarnConfiguration.DEFAULT_RM_ADDRESS, ++ YarnConfiguration.DEFAULT_RM_PORT); ++ } else if (protocol == ResourceManagerAdministrationProtocol.class) { ++ return conf.getSocketAddr( ++ YarnConfiguration.RM_ADMIN_ADDRESS, ++ YarnConfiguration.DEFAULT_RM_ADMIN_ADDRESS, ++ YarnConfiguration.DEFAULT_RM_ADMIN_PORT); ++ } else if (protocol == ApplicationMasterProtocol.class) { ++ return conf.getSocketAddr( ++ YarnConfiguration.RM_SCHEDULER_ADDRESS, ++ YarnConfiguration.DEFAULT_RM_SCHEDULER_ADDRESS, ++ YarnConfiguration.DEFAULT_RM_SCHEDULER_PORT); ++ } else { ++ String message = ""Unsupported protocol found when creating the proxy "" + ++ ""connection to ResourceManager: "" + ++ ((protocol != null) ? protocol.getClass().getName() : ""null""); ++ LOG.error(message); ++ throw new IllegalStateException(message); ++ } ++ } ++} +diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/YarnClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/YarnClient.java +index e8dca61d32a0b..22d80c6e8d90b 100644 +--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/YarnClient.java ++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/YarnClient.java +@@ -19,7 +19,6 @@ + package org.apache.hadoop.yarn.client.api; + + import java.io.IOException; +-import java.net.InetSocketAddress; + import java.util.List; + import java.util.Set; + +@@ -54,25 +53,6 @@ public static YarnClient createYarnClient() { + return client; + } + +- /** +- * Create a new instance of YarnClient. +- */ +- @Public +- public static YarnClient createYarnClient(InetSocketAddress rmAddress) { +- YarnClient client = new YarnClientImpl(rmAddress); +- return client; +- } - --// @Test(expected=UnsupportedOperationException.class) public void testSubgraphVerticesAdd() { --// DirectedMultigraph g = new DirectedMultigraph(); +- /** +- * Create a new instance of YarnClient. +- */ +- @Public +- public static YarnClient createYarnClient(String name, +- InetSocketAddress rmAddress) { +- YarnClient client = new YarnClientImpl(name, rmAddress); +- return client; +- } - --// // fully connected --// for (int i = 0; i < 10; i++) { --// for (int j = i+1; j < 10; ++j) --// g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); --// } + @Private + protected YarnClient(String name) { + super(name); +diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/AMRMClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/AMRMClientImpl.java +index 0f088a0604b6e..4119a0cb1de7e 100644 +--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/AMRMClientImpl.java ++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/AMRMClientImpl.java +@@ -19,8 +19,6 @@ + package org.apache.hadoop.yarn.client.api.impl; + + import java.io.IOException; +-import java.net.InetSocketAddress; +-import java.security.PrivilegedAction; + import java.util.ArrayList; + import java.util.Collection; + import java.util.Collections; +@@ -42,7 +40,6 @@ + import org.apache.hadoop.classification.InterfaceStability.Unstable; + import org.apache.hadoop.conf.Configuration; + import org.apache.hadoop.ipc.RPC; +-import org.apache.hadoop.security.UserGroupInformation; + import org.apache.hadoop.yarn.api.ApplicationMasterProtocol; + import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest; + import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; +@@ -56,16 +53,16 @@ + import org.apache.hadoop.yarn.api.records.Priority; + import org.apache.hadoop.yarn.api.records.Resource; + import org.apache.hadoop.yarn.api.records.ResourceRequest; ++import org.apache.hadoop.yarn.client.ClientRMProxy; + import org.apache.hadoop.yarn.client.api.AMRMClient; ++import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest; + import org.apache.hadoop.yarn.client.api.InvalidContainerRequestException; + import org.apache.hadoop.yarn.client.api.NMTokenCache; +-import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest; + import org.apache.hadoop.yarn.conf.YarnConfiguration; + import org.apache.hadoop.yarn.exceptions.YarnException; + import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; + import org.apache.hadoop.yarn.factories.RecordFactory; + import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; +-import org.apache.hadoop.yarn.ipc.YarnRPC; + import org.apache.hadoop.yarn.util.RackResolver; + + import com.google.common.annotations.VisibleForTesting; +@@ -171,28 +168,11 @@ protected void serviceInit(Configuration conf) throws Exception { + @Override + protected void serviceStart() throws Exception { + final YarnConfiguration conf = new YarnConfiguration(getConfig()); +- final YarnRPC rpc = YarnRPC.create(conf); +- final InetSocketAddress rmAddress = conf.getSocketAddr( +- YarnConfiguration.RM_SCHEDULER_ADDRESS, +- YarnConfiguration.DEFAULT_RM_SCHEDULER_ADDRESS, +- YarnConfiguration.DEFAULT_RM_SCHEDULER_PORT); - --// // (n * (n-1)) / 2 --// assertEquals( (10 * 9) / 2, g.size()); --// assertEquals(10, g.order()); +- UserGroupInformation currentUser; + try { +- currentUser = UserGroupInformation.getCurrentUser(); ++ rmClient = ClientRMProxy.createRMProxy(conf, ApplicationMasterProtocol.class); + } catch (IOException e) { + throw new YarnRuntimeException(e); + } - --// Set vertices = new LinkedHashSet(); --// for (int i = 0; i < 5; ++i) --// vertices.add(i); +- // CurrentUser should already have AMToken loaded. +- rmClient = currentUser.doAs(new PrivilegedAction() { +- @Override +- public ApplicationMasterProtocol run() { +- return (ApplicationMasterProtocol) rpc.getProxy(ApplicationMasterProtocol.class, rmAddress, +- conf); +- } +- }); +- LOG.debug(""Connecting to ResourceManager at "" + rmAddress); + super.serviceStart(); + } + +diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java +index b3b8bdf4316bb..4398359862b06 100644 +--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java ++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java +@@ -59,11 +59,12 @@ + import org.apache.hadoop.yarn.api.records.Token; + import org.apache.hadoop.yarn.api.records.YarnApplicationState; + import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; ++import org.apache.hadoop.yarn.client.ClientRMProxy; + import org.apache.hadoop.yarn.client.api.YarnClient; + import org.apache.hadoop.yarn.client.api.YarnClientApplication; + import org.apache.hadoop.yarn.conf.YarnConfiguration; + import org.apache.hadoop.yarn.exceptions.YarnException; +-import org.apache.hadoop.yarn.ipc.YarnRPC; ++import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; + import org.apache.hadoop.yarn.util.Records; + + import com.google.common.annotations.VisibleForTesting; +@@ -81,16 +82,7 @@ public class YarnClientImpl extends YarnClient { + private static final String ROOT = ""root""; + + public YarnClientImpl() { +- this(null); +- } +- +- public YarnClientImpl(InetSocketAddress rmAddress) { +- this(YarnClientImpl.class.getName(), rmAddress); +- } - --// DirectedMultigraph subgraph = g.subgraph(vertices); --// assertEquals(5, subgraph.order()); --// assertEquals( (5 * 4) / 2, subgraph.size()); +- public YarnClientImpl(String name, InetSocketAddress rmAddress) { +- super(name); +- this.rmAddress = rmAddress; ++ super(YarnClientImpl.class.getName()); + } + + private static InetSocketAddress getRmAddress(Configuration conf) { +@@ -100,9 +92,7 @@ private static InetSocketAddress getRmAddress(Configuration conf) { + + @Override + protected void serviceInit(Configuration conf) throws Exception { +- if (this.rmAddress == null) { +- this.rmAddress = getRmAddress(conf); +- } ++ this.rmAddress = getRmAddress(conf); + statePollIntervalMillis = conf.getLong( + YarnConfiguration.YARN_CLIENT_APP_SUBMISSION_POLL_INTERVAL_MS, + YarnConfiguration.DEFAULT_YARN_CLIENT_APP_SUBMISSION_POLL_INTERVAL_MS); +@@ -111,12 +101,11 @@ protected void serviceInit(Configuration conf) throws Exception { + + @Override + protected void serviceStart() throws Exception { +- YarnRPC rpc = YarnRPC.create(getConfig()); - --// Set test = subgraph.vertices(); --// assertEquals(5, test.size()); +- this.rmClient = (ApplicationClientProtocol) rpc.getProxy( +- ApplicationClientProtocol.class, rmAddress, getConfig()); +- if (LOG.isDebugEnabled()) { +- LOG.debug(""Connecting to ResourceManager at "" + rmAddress); ++ try { ++ rmClient = ClientRMProxy.createRMProxy(getConfig(), ++ ApplicationClientProtocol.class); ++ } catch (IOException e) { ++ throw new YarnRuntimeException(e); + } + super.serviceStart(); + } +diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java +index 6426fe9dbc77e..11335c0d8f68d 100644 +--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java ++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java +@@ -19,8 +19,6 @@ + package org.apache.hadoop.yarn.client.cli; + + import java.io.IOException; +-import java.net.InetSocketAddress; +-import java.security.PrivilegedAction; + import java.util.Arrays; + + import org.apache.hadoop.classification.InterfaceAudience.Private; +@@ -31,11 +29,11 @@ + import org.apache.hadoop.security.UserGroupInformation; + import org.apache.hadoop.util.Tool; + import org.apache.hadoop.util.ToolRunner; ++import org.apache.hadoop.yarn.client.ClientRMProxy; + import org.apache.hadoop.yarn.conf.YarnConfiguration; + import org.apache.hadoop.yarn.exceptions.YarnException; + import org.apache.hadoop.yarn.factories.RecordFactory; + import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; +-import org.apache.hadoop.yarn.ipc.YarnRPC; + import org.apache.hadoop.yarn.server.api.ResourceManagerAdministrationProtocol; + import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshAdminAclsRequest; + import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshNodesRequest; +@@ -164,32 +162,10 @@ private static void printUsage(String cmd) { + } + } + +- private static UserGroupInformation getUGI(Configuration conf +- ) throws IOException { +- return UserGroupInformation.getCurrentUser(); +- } - --// // Add a vertex --// assertTrue(test.add(5)); --// assertEquals(6, test.size()); --// assertEquals(6, subgraph.order()); --// assertEquals(11, g.order()); --// assertEquals( (5*4)/2, subgraph.size()); --// } + private ResourceManagerAdministrationProtocol createAdminProtocol() throws IOException { + // Get the current configuration + final YarnConfiguration conf = new YarnConfiguration(getConf()); - --// @Test(expected=UnsupportedOperationException.class) public void testSubgraphVerticesRemove() { --// DirectedMultigraph g = new DirectedMultigraph(); +- // Create the client +- final InetSocketAddress addr = conf.getSocketAddr( +- YarnConfiguration.RM_ADMIN_ADDRESS, +- YarnConfiguration.DEFAULT_RM_ADMIN_ADDRESS, +- YarnConfiguration.DEFAULT_RM_ADMIN_PORT); +- final YarnRPC rpc = YarnRPC.create(conf); +- +- ResourceManagerAdministrationProtocol adminProtocol = +- getUGI(conf).doAs(new PrivilegedAction() { +- @Override +- public ResourceManagerAdministrationProtocol run() { +- return (ResourceManagerAdministrationProtocol) rpc.getProxy(ResourceManagerAdministrationProtocol.class, +- addr, conf); +- } +- }); - --// // fully connected --// for (int i = 0; i < 10; i++) { --// for (int j = i+1; j < 10; ++j) --// g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); --// } +- return adminProtocol; ++ return ClientRMProxy.createRMProxy(conf, ResourceManagerAdministrationProtocol.class); + } + + private int refreshQueues() throws IOException, YarnException { +diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/RMProxy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/RMProxy.java +new file mode 100644 +index 0000000000000..e4493b5a469b9 +--- /dev/null ++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/RMProxy.java +@@ -0,0 +1,125 @@ ++/** ++ * Licensed to the Apache Software Foundation (ASF) under one ++ * or more contributor license agreements. See the NOTICE file ++ * distributed with this work for additional information ++ * regarding copyright ownership. The ASF licenses this file ++ * to you under the Apache License, Version 2.0 (the ++ * ""License""); you may not use this file except in compliance ++ * with the License. You may obtain a copy of the License at ++ * ++ * http://www.apache.org/licenses/LICENSE-2.0 ++ * ++ * Unless required by applicable law or agreed to in writing, software ++ * distributed under the License is distributed on an ""AS IS"" BASIS, ++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++ * See the License for the specific language governing permissions and ++ * limitations under the License. ++ */ ++ ++package org.apache.hadoop.yarn.client; ++ ++import java.io.IOException; ++import java.net.ConnectException; ++import java.net.InetSocketAddress; ++import java.security.PrivilegedAction; ++import java.util.HashMap; ++import java.util.Map; ++import java.util.concurrent.TimeUnit; ++ ++import org.apache.commons.logging.Log; ++import org.apache.commons.logging.LogFactory; ++import org.apache.hadoop.classification.InterfaceAudience; ++import org.apache.hadoop.classification.InterfaceStability; ++import org.apache.hadoop.conf.Configuration; ++import org.apache.hadoop.io.retry.RetryPolicies; ++import org.apache.hadoop.io.retry.RetryPolicy; ++import org.apache.hadoop.io.retry.RetryProxy; ++import org.apache.hadoop.security.UserGroupInformation; ++import org.apache.hadoop.yarn.conf.YarnConfiguration; ++import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; ++import org.apache.hadoop.yarn.ipc.YarnRPC; ++ ++@InterfaceAudience.Public ++@InterfaceStability.Evolving ++public class RMProxy { ++ ++ private static final Log LOG = LogFactory.getLog(RMProxy.class); ++ ++ @SuppressWarnings(""unchecked"") ++ public static T createRMProxy(final Configuration conf, ++ final Class protocol, InetSocketAddress rmAddress) throws IOException { ++ RetryPolicy retryPolicy = createRetryPolicy(conf); ++ T proxy = RMProxy.getProxy(conf, protocol, rmAddress); ++ LOG.info(""Connecting to ResourceManager at "" + rmAddress); ++ return (T) RetryProxy.create(protocol, proxy, retryPolicy); ++ } ++ ++ @SuppressWarnings(""unchecked"") ++ protected static T getProxy(final Configuration conf, ++ final Class protocol, final InetSocketAddress rmAddress) ++ throws IOException { ++ return (T) UserGroupInformation.getCurrentUser().doAs( ++ new PrivilegedAction() { ++ ++ @Override ++ public T run() { ++ return (T) YarnRPC.create(conf).getProxy(protocol, rmAddress, conf); ++ } ++ }); ++ } ++ ++ public static RetryPolicy createRetryPolicy(Configuration conf) { ++ long rmConnectWaitMS = ++ conf.getInt( ++ YarnConfiguration.RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS, ++ YarnConfiguration.DEFAULT_RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS) ++ * 1000; ++ long rmConnectionRetryIntervalMS = ++ conf.getLong( ++ YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS, ++ YarnConfiguration ++ .DEFAULT_RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS) ++ * 1000; ++ ++ if (rmConnectionRetryIntervalMS < 0) { ++ throw new YarnRuntimeException(""Invalid Configuration. "" + ++ YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS + ++ "" should not be negative.""); ++ } ++ ++ boolean waitForEver = (rmConnectWaitMS == -1000); ++ ++ if (waitForEver) { ++ return RetryPolicies.RETRY_FOREVER; ++ } else { ++ if (rmConnectWaitMS < 0) { ++ throw new YarnRuntimeException(""Invalid Configuration. "" ++ + YarnConfiguration.RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS ++ + "" can be -1, but can not be other negative numbers""); ++ } ++ ++ // try connect once ++ if (rmConnectWaitMS < rmConnectionRetryIntervalMS) { ++ LOG.warn(YarnConfiguration.RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS ++ + "" is smaller than "" ++ + YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS ++ + "". Only try connect once.""); ++ rmConnectWaitMS = 0; ++ } ++ } ++ ++ RetryPolicy retryPolicy = ++ RetryPolicies.retryUpToMaximumTimeWithFixedSleep(rmConnectWaitMS, ++ rmConnectionRetryIntervalMS, ++ TimeUnit.MILLISECONDS); ++ ++ Map, RetryPolicy> exceptionToPolicyMap = ++ new HashMap, RetryPolicy>(); ++ exceptionToPolicyMap.put(ConnectException.class, retryPolicy); ++ //TO DO: after HADOOP-9576, IOException can be changed to EOFException ++ exceptionToPolicyMap.put(IOException.class, retryPolicy); ++ ++ return RetryPolicies.retryByException(RetryPolicies.TRY_ONCE_THEN_FAIL, ++ exceptionToPolicyMap); ++ } ++} +diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/ServerRMProxy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/ServerRMProxy.java +new file mode 100644 +index 0000000000000..ef9154fde1b5f +--- /dev/null ++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/ServerRMProxy.java +@@ -0,0 +1,55 @@ ++/** ++* Licensed to the Apache Software Foundation (ASF) under one ++* or more contributor license agreements. See the NOTICE file ++* distributed with this work for additional information ++* regarding copyright ownership. The ASF licenses this file ++* to you under the Apache License, Version 2.0 (the ++* ""License""); you may not use this file except in compliance ++* with the License. You may obtain a copy of the License at ++* ++* http://www.apache.org/licenses/LICENSE-2.0 ++* ++* Unless required by applicable law or agreed to in writing, software ++* distributed under the License is distributed on an ""AS IS"" BASIS, ++* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++* See the License for the specific language governing permissions and ++* limitations under the License. ++*/ ++ ++package org.apache.hadoop.yarn.server.api; ++ ++import java.io.IOException; ++import java.net.InetSocketAddress; ++ ++import org.apache.commons.logging.Log; ++import org.apache.commons.logging.LogFactory; ++import org.apache.hadoop.conf.Configuration; ++import org.apache.hadoop.yarn.client.RMProxy; ++import org.apache.hadoop.yarn.conf.YarnConfiguration; ++ ++public class ServerRMProxy extends RMProxy{ ++ ++ private static final Log LOG = LogFactory.getLog(ServerRMProxy.class); ++ ++ public static T createRMProxy(final Configuration conf, ++ final Class protocol) throws IOException { ++ InetSocketAddress rmAddress = getRMAddress(conf, protocol); ++ return createRMProxy(conf, protocol, rmAddress); ++ } ++ ++ private static InetSocketAddress getRMAddress(Configuration conf, Class protocol) { ++ if (protocol == ResourceTracker.class) { ++ return conf.getSocketAddr( ++ YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS, ++ YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS, ++ YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_PORT); ++ } ++ else { ++ String message = ""Unsupported protocol found when creating the proxy "" + ++ ""connection to ResourceManager: "" + ++ ((protocol != null) ? protocol.getClass().getName() : ""null""); ++ LOG.error(message); ++ throw new IllegalStateException(message); ++ } ++ } ++} +diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java +index 396204cf2dbdb..40f6874623fdf 100644 +--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java ++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java +@@ -18,6 +18,7 @@ + + package org.apache.hadoop.yarn.server.api.impl.pb.client; + ++import java.io.Closeable; + import java.io.IOException; + import java.net.InetSocketAddress; + +@@ -41,7 +42,7 @@ + + import com.google.protobuf.ServiceException; + +-public class ResourceTrackerPBClientImpl implements ResourceTracker { ++public class ResourceTrackerPBClientImpl implements ResourceTracker, Closeable { + + private ResourceTrackerPB proxy; + +@@ -50,7 +51,14 @@ public ResourceTrackerPBClientImpl(long clientVersion, InetSocketAddress addr, C + proxy = (ResourceTrackerPB)RPC.getProxy( + ResourceTrackerPB.class, clientVersion, addr, conf); + } +- ++ ++ @Override ++ public void close() { ++ if(this.proxy != null) { ++ RPC.stopProxy(this.proxy); ++ } ++ } ++ + @Override + public RegisterNodeManagerResponse registerNodeManager( + RegisterNodeManagerRequest request) throws YarnException, +diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java +index 550cdc5a98f4f..b0e71e915633e 100644 +--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java ++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java +@@ -19,7 +19,7 @@ + package org.apache.hadoop.yarn.server.nodemanager; + + import java.io.IOException; +-import java.net.InetSocketAddress; ++import java.net.ConnectException; + import java.util.ArrayList; + import java.util.Collections; + import java.util.HashMap; +@@ -33,6 +33,7 @@ + import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.classification.InterfaceAudience.Private; + import org.apache.hadoop.conf.Configuration; ++import org.apache.hadoop.ipc.RPC; + import org.apache.hadoop.security.UserGroupInformation; + import org.apache.hadoop.service.AbstractService; + import org.apache.hadoop.yarn.api.records.ApplicationId; +@@ -47,9 +48,9 @@ + import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; + import org.apache.hadoop.yarn.factories.RecordFactory; + import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; +-import org.apache.hadoop.yarn.ipc.YarnRPC; + import org.apache.hadoop.yarn.server.api.ResourceManagerConstants; + import org.apache.hadoop.yarn.server.api.ResourceTracker; ++import org.apache.hadoop.yarn.server.api.ServerRMProxy; + import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatRequest; + import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse; + import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerRequest; +@@ -77,7 +78,6 @@ public class NodeStatusUpdaterImpl extends AbstractService implements + private NodeId nodeId; + private long nextHeartBeatInterval; + private ResourceTracker resourceTracker; +- private InetSocketAddress rmAddress; + private Resource totalResource; + private int httpPort; + private volatile boolean isStopped; +@@ -91,9 +91,6 @@ public class NodeStatusUpdaterImpl extends AbstractService implements + + private final NodeHealthCheckerService healthChecker; + private final NodeManagerMetrics metrics; +- private long rmConnectWaitMS; +- private long rmConnectionRetryIntervalMS; +- private boolean waitForEver; + + private Runnable statusUpdaterRunnable; + private Thread statusUpdater; +@@ -110,11 +107,6 @@ public NodeStatusUpdaterImpl(Context context, Dispatcher dispatcher, + + @Override + protected void serviceInit(Configuration conf) throws Exception { +- this.rmAddress = conf.getSocketAddr( +- YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS, +- YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS, +- YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_PORT); - --// // (n * (n-1)) / 2 --// assertEquals( (10 * 9) / 2, g.size()); --// assertEquals(10, g.order()); + int memoryMb = + conf.getInt( + YarnConfiguration.NM_PMEM_MB, YarnConfiguration.DEFAULT_NM_PMEM_MB); +@@ -153,6 +145,7 @@ protected void serviceStart() throws Exception { + try { + // Registration has to be in start so that ContainerManager can get the + // perNM tokens needed to authenticate ContainerTokens. ++ this.resourceTracker = getRMClient(); + registerWithRM(); + super.serviceStart(); + startStatusUpdater(); +@@ -167,6 +160,7 @@ protected void serviceStart() throws Exception { + protected void serviceStop() throws Exception { + // Interrupt the updater. + this.isStopped = true; ++ stopRMProxy(); + super.serviceStop(); + } + +@@ -188,6 +182,13 @@ protected void rebootNodeStatusUpdater() { + } + } + ++ @VisibleForTesting ++ protected void stopRMProxy() { ++ if(this.resourceTracker != null) { ++ RPC.stopProxy(this.resourceTracker); ++ } ++ } ++ + @Private + protected boolean isTokenKeepAliveEnabled(Configuration conf) { + return conf.getBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, +@@ -195,93 +196,22 @@ protected boolean isTokenKeepAliveEnabled(Configuration conf) { + && UserGroupInformation.isSecurityEnabled(); + } + +- protected ResourceTracker getRMClient() { ++ @VisibleForTesting ++ protected ResourceTracker getRMClient() throws IOException { + Configuration conf = getConfig(); +- YarnRPC rpc = YarnRPC.create(conf); +- return (ResourceTracker) rpc.getProxy(ResourceTracker.class, rmAddress, +- conf); ++ return ServerRMProxy.createRMProxy(conf, ResourceTracker.class); + } + + @VisibleForTesting + protected void registerWithRM() throws YarnException, IOException { +- Configuration conf = getConfig(); +- rmConnectWaitMS = +- conf.getInt( +- YarnConfiguration.RESOURCEMANAGER_CONNECT_WAIT_SECS, +- YarnConfiguration.DEFAULT_RESOURCEMANAGER_CONNECT_WAIT_SECS) +- * 1000; +- rmConnectionRetryIntervalMS = +- conf.getLong( +- YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS, +- YarnConfiguration +- .DEFAULT_RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS) +- * 1000; - --// Set vertices = new LinkedHashSet(); --// for (int i = 0; i < 5; ++i) --// vertices.add(i); +- if(rmConnectionRetryIntervalMS < 0) { +- throw new YarnRuntimeException(""Invalid Configuration. "" + +- YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS + +- "" should not be negative.""); +- } - --// DirectedMultigraph subgraph = g.subgraph(vertices); --// assertEquals(5, subgraph.order()); --// assertEquals( (5 * 4) / 2, subgraph.size()); +- waitForEver = (rmConnectWaitMS == -1000); - --// Set test = subgraph.vertices(); --// assertEquals(5, test.size()); +- if(! waitForEver) { +- if(rmConnectWaitMS < 0) { +- throw new YarnRuntimeException(""Invalid Configuration. "" + +- YarnConfiguration.RESOURCEMANAGER_CONNECT_WAIT_SECS + +- "" can be -1, but can not be other negative numbers""); +- } - --// // Add a vertex --// assertTrue(test.remove(0)); --// assertEquals(4, test.size()); --// assertEquals(4, subgraph.order()); --// assertEquals(9, g.order()); --// assertEquals( (4*3)/2, subgraph.size()); --// } +- //try connect once +- if(rmConnectWaitMS < rmConnectionRetryIntervalMS) { +- LOG.warn(YarnConfiguration.RESOURCEMANAGER_CONNECT_WAIT_SECS +- + "" is smaller than "" +- + YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS +- + "". Only try connect once.""); +- rmConnectWaitMS = 0; +- } +- } - --// @Test(expected=UnsupportedOperationException.class) public void testSubgraphVerticesIteratorRemove() { --// DirectedMultigraph g = new DirectedMultigraph(); +- int rmRetryCount = 0; +- long waitStartTime = System.currentTimeMillis(); - --// // fully connected --// for (int i = 0; i < 10; i++) { --// for (int j = i+1; j < 10; ++j) --// g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); --// } + RegisterNodeManagerRequest request = + recordFactory.newRecordInstance(RegisterNodeManagerRequest.class); + request.setHttpPort(this.httpPort); + request.setResource(this.totalResource); + request.setNodeId(this.nodeId); +- RegisterNodeManagerResponse regNMResponse; - --// // (n * (n-1)) / 2 --// assertEquals( (10 * 9) / 2, g.size()); --// assertEquals(10, g.order()); +- while(true) { +- try { +- rmRetryCount++; +- LOG.info(""Connecting to ResourceManager at "" + this.rmAddress +- + "". current no. of attempts is "" + rmRetryCount); +- this.resourceTracker = getRMClient(); +- regNMResponse = +- this.resourceTracker.registerNodeManager(request); +- this.rmIdentifier = regNMResponse.getRMIdentifier(); +- break; +- } catch(Throwable e) { +- LOG.warn(""Trying to connect to ResourceManager, "" + +- ""current no. of failed attempts is ""+rmRetryCount); +- if(System.currentTimeMillis() - waitStartTime < rmConnectWaitMS +- || waitForEver) { +- try { +- LOG.info(""Sleeping for "" + rmConnectionRetryIntervalMS/1000 +- + "" seconds before next connection retry to RM""); +- Thread.sleep(rmConnectionRetryIntervalMS); +- } catch(InterruptedException ex) { +- //done nothing +- } +- } else { +- String errorMessage = ""Failed to Connect to RM, "" + +- ""no. of failed attempts is ""+rmRetryCount; +- LOG.error(errorMessage,e); +- throw new YarnRuntimeException(errorMessage,e); +- } +- } +- } ++ RegisterNodeManagerResponse regNMResponse = ++ resourceTracker.registerNodeManager(request); ++ this.rmIdentifier = regNMResponse.getRMIdentifier(); + // if the Resourcemanager instructs NM to shutdown. + if (NodeAction.SHUTDOWN.equals(regNMResponse.getNodeAction())) { + String message = +@@ -426,8 +356,6 @@ public void run() { + // Send heartbeat + try { + NodeHeartbeatResponse response = null; +- int rmRetryCount = 0; +- long waitStartTime = System.currentTimeMillis(); + NodeStatus nodeStatus = getNodeStatusAndUpdateContainersInContext(); + nodeStatus.setResponseId(lastHeartBeatID); + +@@ -440,31 +368,7 @@ public void run() { + request + .setLastKnownNMTokenMasterKey(NodeStatusUpdaterImpl.this.context + .getNMTokenSecretManager().getCurrentKey()); +- while (!isStopped) { +- try { +- rmRetryCount++; +- response = resourceTracker.nodeHeartbeat(request); +- break; +- } catch (Throwable e) { +- LOG.warn(""Trying to heartbeat to ResourceManager, "" +- + ""current no. of failed attempts is "" + rmRetryCount); +- if(System.currentTimeMillis() - waitStartTime < rmConnectWaitMS +- || waitForEver) { +- try { +- LOG.info(""Sleeping for "" + rmConnectionRetryIntervalMS/1000 +- + "" seconds before next heartbeat to RM""); +- Thread.sleep(rmConnectionRetryIntervalMS); +- } catch(InterruptedException ex) { +- //done nothing +- } +- } else { +- String errorMessage = ""Failed to heartbeat to RM, "" + +- ""no. of failed attempts is ""+rmRetryCount; +- LOG.error(errorMessage,e); +- throw new YarnRuntimeException(errorMessage,e); +- } +- } +- } ++ response = resourceTracker.nodeHeartbeat(request); + //get next heartbeat interval from response + nextHeartBeatInterval = response.getNextHeartBeatInterval(); + updateMasterKeys(response); +@@ -508,11 +412,11 @@ public void run() { + dispatcher.getEventHandler().handle( + new CMgrCompletedAppsEvent(appsToCleanup)); + } +- } catch (YarnRuntimeException e) { ++ } catch (ConnectException e) { + //catch and throw the exception if tried MAX wait time to connect RM + dispatcher.getEventHandler().handle( + new NodeManagerEvent(NodeManagerEventType.SHUTDOWN)); +- throw e; ++ throw new YarnRuntimeException(e); + } catch (Throwable e) { + // TODO Better error handling. Thread can die with the rest of the + // NM still running. +diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockNodeStatusUpdater.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockNodeStatusUpdater.java +index e93778e2987ef..a3e1faf310e54 100644 +--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockNodeStatusUpdater.java ++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockNodeStatusUpdater.java +@@ -61,6 +61,10 @@ public MockNodeStatusUpdater(Context context, Dispatcher dispatcher, + protected ResourceTracker getRMClient() { + return resourceTracker; + } ++ @Override ++ protected void stopRMProxy() { ++ return; ++ } + + private static class MockResourceTracker implements ResourceTracker { + private int heartBeatID; +diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java +index 668b85b6511bd..294c93ed3b84a 100644 +--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java ++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java +@@ -107,6 +107,11 @@ protected ResourceTracker getRMClient() { + return new LocalRMInterface(); + }; + ++ @Override ++ protected void stopRMProxy() { ++ return; ++ } ++ + @Override + protected void startStatusUpdater() { + return; // Don't start any updating thread. +diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java +index e17131fd3a1dc..2a3e3d579ca03 100644 +--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java ++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java +@@ -41,6 +41,8 @@ + import org.apache.hadoop.conf.Configuration; + import org.apache.hadoop.fs.FileContext; + import org.apache.hadoop.fs.Path; ++import org.apache.hadoop.io.retry.RetryPolicy; ++import org.apache.hadoop.io.retry.RetryProxy; + import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; + import org.apache.hadoop.net.NetUtils; + import org.apache.hadoop.service.ServiceOperations; +@@ -53,6 +55,7 @@ + import org.apache.hadoop.yarn.api.records.ContainerStatus; + import org.apache.hadoop.yarn.api.records.NodeId; + import org.apache.hadoop.yarn.api.records.Resource; ++import org.apache.hadoop.yarn.client.RMProxy; + import org.apache.hadoop.yarn.conf.YarnConfiguration; + import org.apache.hadoop.yarn.event.Dispatcher; + import org.apache.hadoop.yarn.event.EventHandler; +@@ -60,9 +63,9 @@ + import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; + import org.apache.hadoop.yarn.factories.RecordFactory; + import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; +-import org.apache.hadoop.yarn.ipc.RPCUtil; + import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; + import org.apache.hadoop.yarn.server.api.ResourceTracker; ++import org.apache.hadoop.yarn.server.api.ServerRMProxy; + import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatRequest; + import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse; + import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerRequest; +@@ -103,11 +106,17 @@ public class TestNodeStatusUpdater { + volatile int heartBeatID = 0; + volatile Throwable nmStartError = null; + private final List registeredNodes = new ArrayList(); +- private final Configuration conf = createNMConfig(); ++ private boolean triggered = false; ++ private Configuration conf; + private NodeManager nm; + private boolean containerStatusBackupSuccessfully = true; + private List completedContainerStatusList = new ArrayList(); + ++ @Before ++ public void setUp() { ++ conf = createNMConfig(); ++ } ++ + @After + public void tearDown() { + this.registeredNodes.clear(); +@@ -274,6 +283,11 @@ public MyNodeStatusUpdater(Context context, Dispatcher dispatcher, + protected ResourceTracker getRMClient() { + return resourceTracker; + } ++ ++ @Override ++ protected void stopRMProxy() { ++ return; ++ } + } + + private class MyNodeStatusUpdater2 extends NodeStatusUpdaterImpl { +@@ -290,6 +304,10 @@ protected ResourceTracker getRMClient() { + return resourceTracker; + } + ++ @Override ++ protected void stopRMProxy() { ++ return; ++ } + } + + private class MyNodeStatusUpdater3 extends NodeStatusUpdaterImpl { +@@ -307,7 +325,12 @@ public MyNodeStatusUpdater3(Context context, Dispatcher dispatcher, + protected ResourceTracker getRMClient() { + return resourceTracker; + } +- ++ ++ @Override ++ protected void stopRMProxy() { ++ return; ++ } ++ + @Override + protected boolean isTokenKeepAliveEnabled(Configuration conf) { + return true; +@@ -315,21 +338,16 @@ protected boolean isTokenKeepAliveEnabled(Configuration conf) { + } + + private class MyNodeStatusUpdater4 extends NodeStatusUpdaterImpl { +- public ResourceTracker resourceTracker = +- new MyResourceTracker(this.context); ++ + private Context context; +- private long waitStartTime; + private final long rmStartIntervalMS; + private final boolean rmNeverStart; +- private volatile boolean triggered = false; +- private long durationWhenTriggered = -1; - --// Set vertices = new LinkedHashSet(); --// for (int i = 0; i < 5; ++i) --// vertices.add(i); ++ public ResourceTracker resourceTracker; + public MyNodeStatusUpdater4(Context context, Dispatcher dispatcher, + NodeHealthCheckerService healthChecker, NodeManagerMetrics metrics, + long rmStartIntervalMS, boolean rmNeverStart) { + super(context, dispatcher, healthChecker, metrics); + this.context = context; +- this.waitStartTime = System.currentTimeMillis(); + this.rmStartIntervalMS = rmStartIntervalMS; + this.rmNeverStart = rmNeverStart; + } +@@ -337,25 +355,16 @@ public MyNodeStatusUpdater4(Context context, Dispatcher dispatcher, + @Override + protected void serviceStart() throws Exception { + //record the startup time +- this.waitStartTime = System.currentTimeMillis(); + super.serviceStart(); + } + + @Override +- protected ResourceTracker getRMClient() { +- if (!triggered) { +- long t = System.currentTimeMillis(); +- long duration = t - waitStartTime; +- if (duration <= rmStartIntervalMS +- || rmNeverStart) { +- throw new YarnRuntimeException(""Faking RM start failure as start "" + +- ""delay timer has not expired.""); +- } else { +- //triggering +- triggered = true; +- durationWhenTriggered = duration; +- } +- } ++ protected ResourceTracker getRMClient() throws IOException { ++ RetryPolicy retryPolicy = RMProxy.createRetryPolicy(conf); ++ resourceTracker = ++ (ResourceTracker) RetryProxy.create(ResourceTracker.class, ++ new MyResourceTracker6(this.context, rmStartIntervalMS, ++ rmNeverStart), retryPolicy); + return resourceTracker; + } + +@@ -363,37 +372,35 @@ private boolean isTriggered() { + return triggered; + } + +- private long getWaitStartTime() { +- return waitStartTime; +- } - --// DirectedMultigraph subgraph = g.subgraph(vertices); --// assertEquals(5, subgraph.order()); --// assertEquals( (5 * 4) / 2, subgraph.size()); +- private long getDurationWhenTriggered() { +- return durationWhenTriggered; +- } - --// Set test = subgraph.vertices(); --// assertEquals(5, test.size()); --// Iterator it = test.iterator(); --// assertTrue(it.hasNext()); --// // Remove the first vertex returned --// it.next(); --// it.remove(); -- --// assertEquals(4, test.size()); --// assertEquals(4, subgraph.order()); --// assertEquals(9, g.order()); --// assertEquals( (4*3)/2, subgraph.size()); --// } -+ for (int j = 0; j < 10; ++j) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); -+ if (i >= j) -+ assertFalse(adjacencyList.contains(e)); -+ else -+ assertTrue(adjacencyList.contains(e)); + @Override +- public String toString() { +- return ""MyNodeStatusUpdater4{"" + +- ""rmNeverStart="" + rmNeverStart + +- "", triggered="" + triggered + +- "", duration="" + durationWhenTriggered + +- "", rmStartIntervalMS="" + rmStartIntervalMS + +- '}'; ++ protected void stopRMProxy() { ++ return; + } + } + ++ ++ + private class MyNodeStatusUpdater5 extends NodeStatusUpdaterImpl { + private ResourceTracker resourceTracker; ++ private Configuration conf; + + public MyNodeStatusUpdater5(Context context, Dispatcher dispatcher, +- NodeHealthCheckerService healthChecker, NodeManagerMetrics metrics) { ++ NodeHealthCheckerService healthChecker, NodeManagerMetrics metrics, Configuration conf) { + super(context, dispatcher, healthChecker, metrics); + resourceTracker = new MyResourceTracker5(); ++ this.conf = conf; + } + + @Override + protected ResourceTracker getRMClient() { +- return resourceTracker; ++ RetryPolicy retryPolicy = RMProxy.createRetryPolicy(conf); ++ return (ResourceTracker) RetryProxy.create(ResourceTracker.class, ++ resourceTracker, retryPolicy); ++ } ++ ++ @Override ++ protected void stopRMProxy() { ++ return; + } + } + +@@ -417,15 +424,18 @@ private class MyNodeManager2 extends NodeManager { + public boolean isStopped = false; + private NodeStatusUpdater nodeStatusUpdater; + private CyclicBarrier syncBarrier; +- public MyNodeManager2 (CyclicBarrier syncBarrier) { ++ private Configuration conf; ++ ++ public MyNodeManager2 (CyclicBarrier syncBarrier, Configuration conf) { + this.syncBarrier = syncBarrier; ++ this.conf = conf; + } + @Override + protected NodeStatusUpdater createNodeStatusUpdater(Context context, + Dispatcher dispatcher, NodeHealthCheckerService healthChecker) { + nodeStatusUpdater = + new MyNodeStatusUpdater5(context, dispatcher, healthChecker, +- metrics); ++ metrics, conf); + return nodeStatusUpdater; + } + +@@ -577,7 +587,7 @@ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) + .get(4).getState() == ContainerState.RUNNING + && request.getNodeStatus().getContainersStatuses().get(4) + .getContainerId().getId() == 5); +- throw new YarnRuntimeException(""Lost the heartbeat response""); ++ throw new java.net.ConnectException(""Lost the heartbeat response""); + } else if (heartBeatID == 2) { + Assert.assertEquals(request.getNodeStatus().getContainersStatuses() + .size(), 7); +@@ -646,7 +656,63 @@ public RegisterNodeManagerResponse registerNodeManager( + public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) + throws YarnException, IOException { + heartBeatID++; +- throw RPCUtil.getRemoteException(""NodeHeartbeat exception""); ++ throw new java.net.ConnectException( ++ ""NodeHeartbeat exception""); ++ } ++ } ++ ++ private class MyResourceTracker6 implements ResourceTracker { ++ ++ private final Context context; ++ private long rmStartIntervalMS; ++ private boolean rmNeverStart; ++ private final long waitStartTime; ++ ++ public MyResourceTracker6(Context context, long rmStartIntervalMS, ++ boolean rmNeverStart) { ++ this.context = context; ++ this.rmStartIntervalMS = rmStartIntervalMS; ++ this.rmNeverStart = rmNeverStart; ++ this.waitStartTime = System.currentTimeMillis(); ++ } ++ ++ @Override ++ public RegisterNodeManagerResponse registerNodeManager( ++ RegisterNodeManagerRequest request) throws YarnException, IOException, ++ IOException { ++ if (System.currentTimeMillis() - waitStartTime <= rmStartIntervalMS ++ || rmNeverStart) { ++ throw new java.net.ConnectException(""Faking RM start failure as start "" ++ + ""delay timer has not expired.""); ++ } else { ++ NodeId nodeId = request.getNodeId(); ++ Resource resource = request.getResource(); ++ LOG.info(""Registering "" + nodeId.toString()); ++ // NOTE: this really should be checking against the config value ++ InetSocketAddress expected = NetUtils.getConnectAddress( ++ conf.getSocketAddr(YarnConfiguration.NM_ADDRESS, null, -1)); ++ Assert.assertEquals(NetUtils.getHostPortString(expected), ++ nodeId.toString()); ++ Assert.assertEquals(5 * 1024, resource.getMemory()); ++ registeredNodes.add(nodeId); ++ ++ RegisterNodeManagerResponse response = recordFactory ++ .newRecordInstance(RegisterNodeManagerResponse.class); ++ triggered = true; ++ return response; ++ } ++ } ++ ++ @Override ++ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) ++ throws YarnException, IOException { ++ NodeStatus nodeStatus = request.getNodeStatus(); ++ nodeStatus.setResponseId(heartBeatID++); ++ ++ NodeHeartbeatResponse nhResponse = YarnServerBuilderUtils. ++ newNodeHeartbeatResponse(heartBeatID, NodeAction.NORMAL, null, ++ null, null, null, 1000L); ++ return nhResponse; + } + } + +@@ -843,8 +909,7 @@ public void testNMConnectionToRM() throws Exception { + final long connectionRetryIntervalSecs = 1; + //Waiting for rmStartIntervalMS, RM will be started + final long rmStartIntervalMS = 2*1000; +- YarnConfiguration conf = createNMConfig(); +- conf.setLong(YarnConfiguration.RESOURCEMANAGER_CONNECT_WAIT_SECS, ++ conf.setLong(YarnConfiguration.RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS, + connectionWaitSecs); + conf.setLong(YarnConfiguration + .RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS, +@@ -907,8 +972,6 @@ protected NodeStatusUpdater createUpdater(Context context, + } + long duration = System.currentTimeMillis() - waitStartTime; + MyNodeStatusUpdater4 myUpdater = (MyNodeStatusUpdater4) updater; +- Assert.assertTrue(""Updater was never started"", +- myUpdater.getWaitStartTime()>0); + Assert.assertTrue(""NM started before updater triggered"", + myUpdater.isTriggered()); + Assert.assertTrue(""NM should have connected to RM after "" +@@ -1037,13 +1100,13 @@ public void testNodeStatusUpdaterRetryAndNMShutdown() + final long connectionWaitSecs = 1; + final long connectionRetryIntervalSecs = 1; + YarnConfiguration conf = createNMConfig(); +- conf.setLong(YarnConfiguration.RESOURCEMANAGER_CONNECT_WAIT_SECS, ++ conf.setLong(YarnConfiguration.RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS, + connectionWaitSecs); + conf.setLong(YarnConfiguration + .RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS, + connectionRetryIntervalSecs); + CyclicBarrier syncBarrier = new CyclicBarrier(2); +- nm = new MyNodeManager2(syncBarrier); ++ nm = new MyNodeManager2(syncBarrier, conf); + nm.init(conf); + nm.start(); + try { +diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java +index 83d21e1640721..cfcf7f6445e63 100644 +--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java ++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java +@@ -117,6 +117,11 @@ protected ResourceTracker getRMClient() { + return new LocalRMInterface(); + }; + ++ @Override ++ protected void stopRMProxy() { ++ return; ++ } ++ + @Override + protected void startStatusUpdater() { + return; // Don't start any updating thread. +diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java +index cc529739dea79..144b111f83072 100644 +--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java ++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java +@@ -390,6 +390,11 @@ public RegisterNodeManagerResponse registerNodeManager( + } + }; + }; ++ ++ @Override ++ protected void stopRMProxy() { ++ return; ++ } + }; + }; + }" +83d5b1e6a0280cc78625bacc2d3f7d1676c7385e,kotlin,Supported propagation for subclass of- j.u.Collection and similar classes.--,a,https://github.com/JetBrains/kotlin,"diff --git a/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/JavaToKotlinMethodMap.java b/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/JavaToKotlinMethodMap.java +new file mode 100644 +index 0000000000000..cea6587792692 +--- /dev/null ++++ b/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/JavaToKotlinMethodMap.java +@@ -0,0 +1,149 @@ ++/* ++ * Copyright 2010-2012 JetBrains s.r.o. ++ * ++ * Licensed under the Apache License, Version 2.0 (the ""License""); ++ * you may not use this file except in compliance with the License. ++ * You may obtain a copy of the License at ++ * ++ * http://www.apache.org/licenses/LICENSE-2.0 ++ * ++ * Unless required by applicable law or agreed to in writing, software ++ * distributed under the License is distributed on an ""AS IS"" BASIS, ++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++ * See the License for the specific language governing permissions and ++ * limitations under the License. ++ */ ++ ++package org.jetbrains.jet.lang.resolve.java; ++ ++import com.google.common.collect.*; ++import com.intellij.openapi.util.Pair; ++import com.intellij.psi.PsiMethod; ++import com.intellij.psi.util.PsiFormatUtil; ++import org.jetbrains.annotations.NotNull; ++import org.jetbrains.jet.lang.descriptors.ClassDescriptor; ++import org.jetbrains.jet.lang.descriptors.FunctionDescriptor; ++import org.jetbrains.jet.lang.resolve.DescriptorUtils; ++import org.jetbrains.jet.lang.resolve.name.Name; ++import org.jetbrains.jet.lang.types.JetType; ++import org.jetbrains.jet.lang.types.TypeUtils; ++import org.jetbrains.jet.lang.types.lang.KotlinBuiltIns; ++import org.jetbrains.jet.resolve.DescriptorRenderer; ++ ++import java.util.Collection; ++import java.util.List; ++import java.util.Map; ++import java.util.Set; ++ ++public class JavaToKotlinMethodMap { ++ public static final JavaToKotlinMethodMap INSTANCE = new JavaToKotlinMethodMap(); ++ ++ private final JavaToKotlinMethodMapGenerated mapContainer = new JavaToKotlinMethodMapGenerated(); ++ ++ private JavaToKotlinMethodMap() { ++ } ++ ++ @NotNull ++ private static Set getAllSuperClasses(@NotNull ClassDescriptor klass) { ++ Set allSupertypes = TypeUtils.getAllSupertypes(klass.getDefaultType()); ++ Set allSuperclasses = Sets.newHashSet(); ++ for (JetType supertype : allSupertypes) { ++ ClassDescriptor superclass = TypeUtils.getClassDescriptor(supertype); ++ assert superclass != null; ++ allSuperclasses.add(superclass); ++ } ++ return allSuperclasses; ++ } ++ ++ @NotNull ++ public List getFunctions(@NotNull PsiMethod psiMethod, @NotNull ClassDescriptor containingClass) { ++ ImmutableCollection classDatas = mapContainer.map.get(psiMethod.getContainingClass().getQualifiedName()); ++ ++ List result = Lists.newArrayList(); ++ ++ Set allSuperClasses = getAllSuperClasses(containingClass); ++ ++ String serializedPsiMethod = serializePsiMethod(psiMethod); ++ for (ClassData classData : classDatas) { ++ String expectedSerializedFunction = classData.method2Function.get(serializedPsiMethod); ++ if (expectedSerializedFunction == null) continue; ++ ++ ClassDescriptor kotlinClass = classData.kotlinClass; ++ if (!allSuperClasses.contains(kotlinClass)) continue; ++ ++ ++ Collection functions = ++ kotlinClass.getDefaultType().getMemberScope().getFunctions(Name.identifier(psiMethod.getName())); ++ ++ for (FunctionDescriptor function : functions) { ++ if (expectedSerializedFunction.equals(serializeFunction(function))) { ++ result.add(function); ++ } + } + } ++ ++ return result; + } + -+ @Test public void testAdjacencyListRemoveEdge() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ for (int i = 0; i < 10; ++i) -+ for (int j = i + 1; j < 10; ++j) -+ g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); ++ @NotNull ++ public static String serializePsiMethod(@NotNull PsiMethod psiMethod) { ++ String externalName = PsiFormatUtil.getExternalName(psiMethod); ++ assert externalName != null : ""couldn't find external name for "" + psiMethod.getText(); ++ return externalName; ++ } ++ ++ @NotNull ++ public static String serializeFunction(@NotNull FunctionDescriptor fun) { ++ return DescriptorRenderer.TEXT.render(fun); ++ } ++ ++ // used in generated code ++ static Pair pair(String a, String b) { ++ return Pair.create(a, b); ++ } ++ ++ // used in generated code ++ static void put( ++ ImmutableMultimap.Builder builder, ++ String javaFqName, ++ String kotlinQualifiedName, ++ Pair... methods2Functions ++ ) { ++ ImmutableMap methods2FunctionsMap = pairs2Map(methods2Functions); ++ ++ ClassDescriptor kotlinClass; ++ if (kotlinQualifiedName.contains(""."")) { // Map.Entry and MutableMap.MutableEntry ++ String[] kotlinNames = kotlinQualifiedName.split(""\\.""); ++ assert kotlinNames.length == 2 : ""unexpected qualified name "" + kotlinQualifiedName; ++ ++ ClassDescriptor outerClass = KotlinBuiltIns.getInstance().getBuiltInClassByName(Name.identifier(kotlinNames[0])); ++ kotlinClass = DescriptorUtils.getInnerClassByName(outerClass, kotlinNames[1]); ++ assert kotlinClass != null : ""Class not found: "" + kotlinQualifiedName; ++ } ++ else { ++ kotlinClass = KotlinBuiltIns.getInstance().getBuiltInClassByName(Name.identifier(kotlinQualifiedName)); ++ } ++ ++ builder.put(javaFqName, new ClassData(kotlinClass, methods2FunctionsMap)); ++ } ++ ++ private static ImmutableMap pairs2Map(Pair[] pairs) { ++ ImmutableMap.Builder builder = ImmutableMap.builder(); ++ for (Pair pair : pairs) { ++ builder.put(pair.first, pair.second); ++ } ++ return builder.build(); ++ } ++ ++ static class ClassData { ++ @NotNull ++ public final ClassDescriptor kotlinClass; ++ @NotNull ++ public Map method2Function; ++ ++ public ClassData(@NotNull ClassDescriptor kotlinClass, @NotNull Map method2Function) { ++ this.kotlinClass = kotlinClass; ++ this.method2Function = method2Function; ++ } ++ } ++} +diff --git a/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/JavaToKotlinMethodMapGenerated.java b/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/JavaToKotlinMethodMapGenerated.java +new file mode 100644 +index 0000000000000..70de02d0f8a10 +--- /dev/null ++++ b/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/JavaToKotlinMethodMapGenerated.java +@@ -0,0 +1,245 @@ ++/* ++ * Copyright 2010-2012 JetBrains s.r.o. ++ * ++ * Licensed under the Apache License, Version 2.0 (the ""License""); ++ * you may not use this file except in compliance with the License. ++ * You may obtain a copy of the License at ++ * ++ * http://www.apache.org/licenses/LICENSE-2.0 ++ * ++ * Unless required by applicable law or agreed to in writing, software ++ * distributed under the License is distributed on an ""AS IS"" BASIS, ++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++ * See the License for the specific language governing permissions and ++ * limitations under the License. ++ */ ++ ++package org.jetbrains.jet.lang.resolve.java; ++ ++import com.google.common.collect.ImmutableMultimap; ++ ++import static org.jetbrains.jet.lang.resolve.java.JavaToKotlinMethodMap.*; ++ ++/* This file is generated by org.jetbrains.jet.generators.GenerateJavaToKotlinMethodMap. DO NOT EDIT! */ ++@SuppressWarnings(""unchecked"") ++class JavaToKotlinMethodMapGenerated { ++ final ImmutableMultimap map; ++ ++ JavaToKotlinMethodMapGenerated() { ++ ImmutableMultimap.Builder b = ImmutableMultimap.builder(); ++ ++ put(b, ""java.lang.String"", ""String"", ++ pair(""java.lang.String int compareTo(java.lang.String)"", ""public open fun compareTo(that : jet.String) : jet.Int defined in jet.String""), ++ pair(""java.lang.String boolean equals(java.lang.Object)"", ""public final fun equals(other : jet.Any?) : jet.Boolean defined in jet.String""), ++ pair(""java.lang.String java.lang.String toString()"", ""public open fun toString() : jet.String defined in jet.String"") ++ ); ++ ++ put(b, ""java.lang.CharSequence"", ""CharSequence"", ++ pair(""java.lang.CharSequence java.lang.String toString()"", ""public abstract fun toString() : jet.String defined in jet.CharSequence"") ++ ); ++ ++ put(b, ""java.lang.Throwable"", ""Throwable"", ++ pair(""java.lang.Throwable java.lang.Throwable getCause()"", ""public final fun getCause() : jet.Throwable? defined in jet.Throwable""), ++ pair(""java.lang.Throwable java.lang.String getMessage()"", ""public final fun getMessage() : jet.String? defined in jet.Throwable""), ++ pair(""java.lang.Throwable void printStackTrace()"", ""public final fun printStackTrace() : Unit defined in jet.Throwable"") ++ ); ++ ++ put(b, ""java.lang.Comparable"", ""Comparable"", ++ pair(""java.lang.Comparable int compareTo(T)"", ""public abstract fun compareTo(other : T) : jet.Int defined in jet.Comparable"") ++ ); ++ ++ put(b, ""java.lang.Enum"", ""Enum"", ++ pair(""java.lang.Enum java.lang.String name()"", ""public final fun name() : jet.String defined in jet.Enum""), ++ pair(""java.lang.Enum int ordinal()"", ""public final fun ordinal() : jet.Int defined in jet.Enum"") ++ ); ++ ++ put(b, ""java.lang.Iterable"", ""Iterable"", ++ pair(""java.lang.Iterable java.util.Iterator iterator()"", ""public abstract fun iterator() : jet.Iterator defined in jet.Iterable"") ++ ); ++ ++ put(b, ""java.lang.Iterable"", ""MutableIterable"", ++ pair(""java.lang.Iterable java.util.Iterator iterator()"", ""public abstract fun iterator() : jet.MutableIterator defined in jet.MutableIterable"") ++ ); ++ ++ put(b, ""java.util.Iterator"", ""Iterator"", ++ pair(""java.util.Iterator boolean hasNext()"", ""public abstract fun hasNext() : jet.Boolean defined in jet.Iterator""), ++ pair(""java.util.Iterator E next()"", ""public abstract fun next() : T defined in jet.Iterator"") ++ ); ++ ++ put(b, ""java.util.Iterator"", ""MutableIterator"", ++ pair(""java.util.Iterator boolean hasNext()"", ""public abstract fun hasNext() : jet.Boolean defined in jet.MutableIterator""), ++ pair(""java.util.Iterator E next()"", ""public abstract fun next() : T defined in jet.MutableIterator""), ++ pair(""java.util.Iterator void remove()"", ""public abstract fun remove() : Unit defined in jet.MutableIterator"") ++ ); ++ ++ put(b, ""java.util.Collection"", ""Collection"", ++ pair(""java.util.Collection boolean contains(java.lang.Object)"", ""public abstract fun contains(o : jet.Any?) : jet.Boolean defined in jet.Collection""), ++ pair(""java.util.Collection boolean containsAll(java.util.Collection)"", ""public abstract fun containsAll(c : jet.Collection) : jet.Boolean defined in jet.Collection""), ++ pair(""java.util.Collection boolean equals(java.lang.Object)"", ""public abstract fun equals(other : jet.Any?) : jet.Boolean defined in jet.Collection""), ++ pair(""java.util.Collection int hashCode()"", ""public abstract fun hashCode() : jet.Int defined in jet.Collection""), ++ pair(""java.util.Collection boolean isEmpty()"", ""public abstract fun isEmpty() : jet.Boolean defined in jet.Collection""), ++ pair(""java.util.Collection java.util.Iterator iterator()"", ""public abstract fun iterator() : jet.Iterator defined in jet.Collection""), ++ pair(""java.util.Collection int size()"", ""public abstract fun size() : jet.Int defined in jet.Collection""), ++ pair(""java.util.Collection T[] toArray(T[])"", ""public abstract fun toArray(a : jet.Array) : jet.Array defined in jet.Collection""), ++ pair(""java.util.Collection java.lang.Object[] toArray()"", ""public abstract fun toArray() : jet.Array defined in jet.Collection"") ++ ); ++ ++ put(b, ""java.util.Collection"", ""MutableCollection"", ++ pair(""java.util.Collection boolean add(E)"", ""public abstract fun add(e : E) : jet.Boolean defined in jet.MutableCollection""), ++ pair(""java.util.Collection boolean addAll(java.util.Collection)"", ""public abstract fun addAll(c : jet.Collection) : jet.Boolean defined in jet.MutableCollection""), ++ pair(""java.util.Collection void clear()"", ""public abstract fun clear() : Unit defined in jet.MutableCollection""), ++ pair(""java.util.Collection boolean contains(java.lang.Object)"", ""public abstract fun contains(o : jet.Any?) : jet.Boolean defined in jet.MutableCollection""), ++ pair(""java.util.Collection boolean containsAll(java.util.Collection)"", ""public abstract fun containsAll(c : jet.Collection) : jet.Boolean defined in jet.MutableCollection""), ++ pair(""java.util.Collection boolean equals(java.lang.Object)"", ""public abstract fun equals(other : jet.Any?) : jet.Boolean defined in jet.MutableCollection""), ++ pair(""java.util.Collection int hashCode()"", ""public abstract fun hashCode() : jet.Int defined in jet.MutableCollection""), ++ pair(""java.util.Collection boolean isEmpty()"", ""public abstract fun isEmpty() : jet.Boolean defined in jet.MutableCollection""), ++ pair(""java.util.Collection java.util.Iterator iterator()"", ""public abstract fun iterator() : jet.MutableIterator defined in jet.MutableCollection""), ++ pair(""java.util.Collection boolean remove(java.lang.Object)"", ""public abstract fun remove(o : jet.Any?) : jet.Boolean defined in jet.MutableCollection""), ++ pair(""java.util.Collection boolean removeAll(java.util.Collection)"", ""public abstract fun removeAll(c : jet.Collection) : jet.Boolean defined in jet.MutableCollection""), ++ pair(""java.util.Collection boolean retainAll(java.util.Collection)"", ""public abstract fun retainAll(c : jet.Collection) : jet.Boolean defined in jet.MutableCollection""), ++ pair(""java.util.Collection int size()"", ""public abstract fun size() : jet.Int defined in jet.MutableCollection""), ++ pair(""java.util.Collection T[] toArray(T[])"", ""public abstract fun toArray(a : jet.Array) : jet.Array defined in jet.MutableCollection""), ++ pair(""java.util.Collection java.lang.Object[] toArray()"", ""public abstract fun toArray() : jet.Array defined in jet.MutableCollection"") ++ ); ++ ++ put(b, ""java.util.List"", ""List"", ++ pair(""java.util.List boolean contains(java.lang.Object)"", ""public abstract fun contains(o : jet.Any?) : jet.Boolean defined in jet.List""), ++ pair(""java.util.List boolean containsAll(java.util.Collection)"", ""public abstract fun containsAll(c : jet.Collection) : jet.Boolean defined in jet.List""), ++ pair(""java.util.List boolean equals(java.lang.Object)"", ""public abstract fun equals(other : jet.Any?) : jet.Boolean defined in jet.List""), ++ pair(""java.util.List E get(int)"", ""public abstract fun get(index : jet.Int) : E defined in jet.List""), ++ pair(""java.util.List int hashCode()"", ""public abstract fun hashCode() : jet.Int defined in jet.List""), ++ pair(""java.util.List int indexOf(java.lang.Object)"", ""public abstract fun indexOf(o : jet.Any?) : jet.Int defined in jet.List""), ++ pair(""java.util.List boolean isEmpty()"", ""public abstract fun isEmpty() : jet.Boolean defined in jet.List""), ++ pair(""java.util.List java.util.Iterator iterator()"", ""public abstract fun iterator() : jet.Iterator defined in jet.List""), ++ pair(""java.util.List int lastIndexOf(java.lang.Object)"", ""public abstract fun lastIndexOf(o : jet.Any?) : jet.Int defined in jet.List""), ++ pair(""java.util.List java.util.ListIterator listIterator()"", ""public abstract fun listIterator() : jet.ListIterator defined in jet.List""), ++ pair(""java.util.List java.util.ListIterator listIterator(int)"", ""public abstract fun listIterator(index : jet.Int) : jet.ListIterator defined in jet.List""), ++ pair(""java.util.List int size()"", ""public abstract fun size() : jet.Int defined in jet.List""), ++ pair(""java.util.List java.util.List subList(int, int)"", ""public abstract fun subList(fromIndex : jet.Int, toIndex : jet.Int) : jet.List defined in jet.List""), ++ pair(""java.util.List T[] toArray(T[])"", ""public abstract fun toArray(a : jet.Array) : jet.Array defined in jet.List""), ++ pair(""java.util.List java.lang.Object[] toArray()"", ""public abstract fun toArray() : jet.Array defined in jet.List"") ++ ); ++ ++ put(b, ""java.util.List"", ""MutableList"", ++ pair(""java.util.List boolean add(E)"", ""public abstract fun add(e : E) : jet.Boolean defined in jet.MutableList""), ++ pair(""java.util.List void add(int, E)"", ""public abstract fun add(index : jet.Int, element : E) : Unit defined in jet.MutableList""), ++ pair(""java.util.List boolean addAll(int, java.util.Collection)"", ""public abstract fun addAll(index : jet.Int, c : jet.Collection) : jet.Boolean defined in jet.MutableList""), ++ pair(""java.util.List boolean addAll(java.util.Collection)"", ""public abstract fun addAll(c : jet.Collection) : jet.Boolean defined in jet.MutableList""), ++ pair(""java.util.List void clear()"", ""public abstract fun clear() : Unit defined in jet.MutableList""), ++ pair(""java.util.List boolean contains(java.lang.Object)"", ""public abstract fun contains(o : jet.Any?) : jet.Boolean defined in jet.MutableList""), ++ pair(""java.util.List boolean containsAll(java.util.Collection)"", ""public abstract fun containsAll(c : jet.Collection) : jet.Boolean defined in jet.MutableList""), ++ pair(""java.util.List boolean equals(java.lang.Object)"", ""public abstract fun equals(other : jet.Any?) : jet.Boolean defined in jet.MutableList""), ++ pair(""java.util.List E get(int)"", ""public abstract fun get(index : jet.Int) : E defined in jet.MutableList""), ++ pair(""java.util.List int hashCode()"", ""public abstract fun hashCode() : jet.Int defined in jet.MutableList""), ++ pair(""java.util.List int indexOf(java.lang.Object)"", ""public abstract fun indexOf(o : jet.Any?) : jet.Int defined in jet.MutableList""), ++ pair(""java.util.List boolean isEmpty()"", ""public abstract fun isEmpty() : jet.Boolean defined in jet.MutableList""), ++ pair(""java.util.List java.util.Iterator iterator()"", ""public abstract fun iterator() : jet.Iterator defined in jet.MutableList""), ++ pair(""java.util.List int lastIndexOf(java.lang.Object)"", ""public abstract fun lastIndexOf(o : jet.Any?) : jet.Int defined in jet.MutableList""), ++ pair(""java.util.List java.util.ListIterator listIterator()"", ""public abstract fun listIterator() : jet.MutableListIterator defined in jet.MutableList""), ++ pair(""java.util.List java.util.ListIterator listIterator(int)"", ""public abstract fun listIterator(index : jet.Int) : jet.MutableListIterator defined in jet.MutableList""), ++ pair(""java.util.List E remove(int)"", ""public abstract fun remove(index : jet.Int) : E defined in jet.MutableList""), ++ pair(""java.util.List boolean remove(java.lang.Object)"", ""public abstract fun remove(o : jet.Any?) : jet.Boolean defined in jet.MutableList""), ++ pair(""java.util.List boolean removeAll(java.util.Collection)"", ""public abstract fun removeAll(c : jet.Collection) : jet.Boolean defined in jet.MutableList""), ++ pair(""java.util.List boolean retainAll(java.util.Collection)"", ""public abstract fun retainAll(c : jet.Collection) : jet.Boolean defined in jet.MutableList""), ++ pair(""java.util.List E set(int, E)"", ""public abstract fun set(index : jet.Int, element : E) : E defined in jet.MutableList""), ++ pair(""java.util.List int size()"", ""public abstract fun size() : jet.Int defined in jet.MutableList""), ++ pair(""java.util.List java.util.List subList(int, int)"", ""public abstract fun subList(fromIndex : jet.Int, toIndex : jet.Int) : jet.MutableList defined in jet.MutableList""), ++ pair(""java.util.List T[] toArray(T[])"", ""public abstract fun toArray(a : jet.Array) : jet.Array defined in jet.MutableList""), ++ pair(""java.util.List java.lang.Object[] toArray()"", ""public abstract fun toArray() : jet.Array defined in jet.MutableList"") ++ ); ++ ++ put(b, ""java.util.Set"", ""Set"", ++ pair(""java.util.Set boolean contains(java.lang.Object)"", ""public abstract fun contains(o : jet.Any?) : jet.Boolean defined in jet.Set""), ++ pair(""java.util.Set boolean containsAll(java.util.Collection)"", ""public abstract fun containsAll(c : jet.Collection) : jet.Boolean defined in jet.Set""), ++ pair(""java.util.Set boolean equals(java.lang.Object)"", ""public abstract fun equals(other : jet.Any?) : jet.Boolean defined in jet.Set""), ++ pair(""java.util.Set int hashCode()"", ""public abstract fun hashCode() : jet.Int defined in jet.Set""), ++ pair(""java.util.Set boolean isEmpty()"", ""public abstract fun isEmpty() : jet.Boolean defined in jet.Set""), ++ pair(""java.util.Set java.util.Iterator iterator()"", ""public abstract fun iterator() : jet.Iterator defined in jet.Set""), ++ pair(""java.util.Set int size()"", ""public abstract fun size() : jet.Int defined in jet.Set""), ++ pair(""java.util.Set T[] toArray(T[])"", ""public abstract fun toArray(a : jet.Array) : jet.Array defined in jet.Set""), ++ pair(""java.util.Set java.lang.Object[] toArray()"", ""public abstract fun toArray() : jet.Array defined in jet.Set"") ++ ); ++ ++ put(b, ""java.util.Set"", ""MutableSet"", ++ pair(""java.util.Set boolean add(E)"", ""public abstract fun add(e : E) : jet.Boolean defined in jet.MutableSet""), ++ pair(""java.util.Set boolean addAll(java.util.Collection)"", ""public abstract fun addAll(c : jet.Collection) : jet.Boolean defined in jet.MutableSet""), ++ pair(""java.util.Set void clear()"", ""public abstract fun clear() : Unit defined in jet.MutableSet""), ++ pair(""java.util.Set boolean contains(java.lang.Object)"", ""public abstract fun contains(o : jet.Any?) : jet.Boolean defined in jet.MutableSet""), ++ pair(""java.util.Set boolean containsAll(java.util.Collection)"", ""public abstract fun containsAll(c : jet.Collection) : jet.Boolean defined in jet.MutableSet""), ++ pair(""java.util.Set boolean equals(java.lang.Object)"", ""public abstract fun equals(other : jet.Any?) : jet.Boolean defined in jet.MutableSet""), ++ pair(""java.util.Set int hashCode()"", ""public abstract fun hashCode() : jet.Int defined in jet.MutableSet""), ++ pair(""java.util.Set boolean isEmpty()"", ""public abstract fun isEmpty() : jet.Boolean defined in jet.MutableSet""), ++ pair(""java.util.Set java.util.Iterator iterator()"", ""public abstract fun iterator() : jet.MutableIterator defined in jet.MutableSet""), ++ pair(""java.util.Set boolean remove(java.lang.Object)"", ""public abstract fun remove(o : jet.Any?) : jet.Boolean defined in jet.MutableSet""), ++ pair(""java.util.Set boolean removeAll(java.util.Collection)"", ""public abstract fun removeAll(c : jet.Collection) : jet.Boolean defined in jet.MutableSet""), ++ pair(""java.util.Set boolean retainAll(java.util.Collection)"", ""public abstract fun retainAll(c : jet.Collection) : jet.Boolean defined in jet.MutableSet""), ++ pair(""java.util.Set int size()"", ""public abstract fun size() : jet.Int defined in jet.MutableSet""), ++ pair(""java.util.Set T[] toArray(T[])"", ""public abstract fun toArray(a : jet.Array) : jet.Array defined in jet.MutableSet""), ++ pair(""java.util.Set java.lang.Object[] toArray()"", ""public abstract fun toArray() : jet.Array defined in jet.MutableSet"") ++ ); ++ ++ put(b, ""java.util.Map"", ""Map"", ++ pair(""java.util.Map boolean containsKey(java.lang.Object)"", ""public abstract fun containsKey(key : jet.Any?) : jet.Boolean defined in jet.Map""), ++ pair(""java.util.Map boolean containsValue(java.lang.Object)"", ""public abstract fun containsValue(value : jet.Any?) : jet.Boolean defined in jet.Map""), ++ pair(""java.util.Map java.util.Set> entrySet()"", ""public abstract fun entrySet() : jet.Set> defined in jet.Map""), ++ pair(""java.util.Map V get(java.lang.Object)"", ""public abstract fun get(key : jet.Any?) : V? defined in jet.Map""), ++ pair(""java.util.Map boolean isEmpty()"", ""public abstract fun isEmpty() : jet.Boolean defined in jet.Map""), ++ pair(""java.util.Map java.util.Set keySet()"", ""public abstract fun keySet() : jet.Set defined in jet.Map""), ++ pair(""java.util.Map int size()"", ""public abstract fun size() : jet.Int defined in jet.Map""), ++ pair(""java.util.Map java.util.Collection values()"", ""public abstract fun values() : jet.Collection defined in jet.Map"") ++ ); ++ ++ put(b, ""java.util.Map"", ""MutableMap"", ++ pair(""java.util.Map void clear()"", ""public abstract fun clear() : Unit defined in jet.MutableMap""), ++ pair(""java.util.Map boolean containsKey(java.lang.Object)"", ""public abstract fun containsKey(key : jet.Any?) : jet.Boolean defined in jet.MutableMap""), ++ pair(""java.util.Map boolean containsValue(java.lang.Object)"", ""public abstract fun containsValue(value : jet.Any?) : jet.Boolean defined in jet.MutableMap""), ++ pair(""java.util.Map java.util.Set> entrySet()"", ""public abstract fun entrySet() : jet.MutableSet> defined in jet.MutableMap""), ++ pair(""java.util.Map V get(java.lang.Object)"", ""public abstract fun get(key : jet.Any?) : V? defined in jet.MutableMap""), ++ pair(""java.util.Map boolean isEmpty()"", ""public abstract fun isEmpty() : jet.Boolean defined in jet.MutableMap""), ++ pair(""java.util.Map java.util.Set keySet()"", ""public abstract fun keySet() : jet.MutableSet defined in jet.MutableMap""), ++ pair(""java.util.Map V put(K, V)"", ""public abstract fun put(key : K, value : V) : V? defined in jet.MutableMap""), ++ pair(""java.util.Map void putAll(java.util.Map)"", ""public abstract fun putAll(m : jet.Map) : Unit defined in jet.MutableMap""), ++ pair(""java.util.Map V remove(java.lang.Object)"", ""public abstract fun remove(key : jet.Any?) : V? defined in jet.MutableMap""), ++ pair(""java.util.Map int size()"", ""public abstract fun size() : jet.Int defined in jet.MutableMap""), ++ pair(""java.util.Map java.util.Collection values()"", ""public abstract fun values() : jet.MutableCollection defined in jet.MutableMap"") ++ ); ++ ++ put(b, ""java.util.Map.Entry"", ""Map.Entry"", ++ pair(""java.util.Map.Entry boolean equals(java.lang.Object)"", ""public abstract fun equals(other : jet.Any?) : jet.Boolean defined in jet.Map.Entry""), ++ pair(""java.util.Map.Entry K getKey()"", ""public abstract fun getKey() : K defined in jet.Map.Entry""), ++ pair(""java.util.Map.Entry V getValue()"", ""public abstract fun getValue() : V defined in jet.Map.Entry""), ++ pair(""java.util.Map.Entry int hashCode()"", ""public abstract fun hashCode() : jet.Int defined in jet.Map.Entry"") ++ ); ++ ++ put(b, ""java.util.Map.Entry"", ""MutableMap.MutableEntry"", ++ pair(""java.util.Map.Entry boolean equals(java.lang.Object)"", ""public abstract fun equals(other : jet.Any?) : jet.Boolean defined in jet.MutableMap.MutableEntry""), ++ pair(""java.util.Map.Entry K getKey()"", ""public abstract fun getKey() : K defined in jet.MutableMap.MutableEntry""), ++ pair(""java.util.Map.Entry V getValue()"", ""public abstract fun getValue() : V defined in jet.MutableMap.MutableEntry""), ++ pair(""java.util.Map.Entry int hashCode()"", ""public abstract fun hashCode() : jet.Int defined in jet.MutableMap.MutableEntry""), ++ pair(""java.util.Map.Entry V setValue(V)"", ""public abstract fun setValue(value : V) : V defined in jet.MutableMap.MutableEntry"") ++ ); ++ ++ put(b, ""java.util.ListIterator"", ""ListIterator"", ++ pair(""java.util.ListIterator boolean hasNext()"", ""public abstract fun hasNext() : jet.Boolean defined in jet.ListIterator""), ++ pair(""java.util.ListIterator boolean hasPrevious()"", ""public abstract fun hasPrevious() : jet.Boolean defined in jet.ListIterator""), ++ pair(""java.util.ListIterator E next()"", ""public abstract fun next() : T defined in jet.ListIterator""), ++ pair(""java.util.ListIterator int nextIndex()"", ""public abstract fun nextIndex() : jet.Int defined in jet.ListIterator""), ++ pair(""java.util.ListIterator E previous()"", ""public abstract fun previous() : T defined in jet.ListIterator""), ++ pair(""java.util.ListIterator int previousIndex()"", ""public abstract fun previousIndex() : jet.Int defined in jet.ListIterator"") ++ ); ++ ++ put(b, ""java.util.ListIterator"", ""MutableListIterator"", ++ pair(""java.util.ListIterator void add(E)"", ""public abstract fun add(e : T) : Unit defined in jet.MutableListIterator""), ++ pair(""java.util.ListIterator boolean hasNext()"", ""public abstract fun hasNext() : jet.Boolean defined in jet.MutableListIterator""), ++ pair(""java.util.ListIterator boolean hasPrevious()"", ""public abstract fun hasPrevious() : jet.Boolean defined in jet.MutableListIterator""), ++ pair(""java.util.ListIterator E next()"", ""public abstract fun next() : T defined in jet.MutableListIterator""), ++ pair(""java.util.ListIterator int nextIndex()"", ""public abstract fun nextIndex() : jet.Int defined in jet.MutableListIterator""), ++ pair(""java.util.ListIterator E previous()"", ""public abstract fun previous() : T defined in jet.MutableListIterator""), ++ pair(""java.util.ListIterator int previousIndex()"", ""public abstract fun previousIndex() : jet.Int defined in jet.MutableListIterator""), ++ pair(""java.util.ListIterator void remove()"", ""public abstract fun remove() : Unit defined in jet.MutableListIterator""), ++ pair(""java.util.ListIterator void set(E)"", ""public abstract fun set(e : T) : Unit defined in jet.MutableListIterator"") ++ ); ++ ++ map = b.build(); ++ } ++} +diff --git a/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/kotlinSignature/SignaturesPropagationData.java b/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/kotlinSignature/SignaturesPropagationData.java +index 4eecef9f415d4..238e5343ed568 100644 +--- a/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/kotlinSignature/SignaturesPropagationData.java ++++ b/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/kotlinSignature/SignaturesPropagationData.java +@@ -28,7 +28,10 @@ + import org.jetbrains.jet.lang.resolve.BindingTrace; + import org.jetbrains.jet.lang.resolve.java.CollectionClassMapping; + import org.jetbrains.jet.lang.resolve.java.JavaDescriptorResolver; ++import org.jetbrains.jet.lang.resolve.java.JavaToKotlinClassMap; ++import org.jetbrains.jet.lang.resolve.java.JavaToKotlinMethodMap; + import org.jetbrains.jet.lang.resolve.java.wrapper.PsiMethodWrapper; ++import org.jetbrains.jet.lang.resolve.name.FqName; + import org.jetbrains.jet.lang.resolve.name.FqNameUnsafe; + import org.jetbrains.jet.lang.resolve.scopes.JetScope; + import org.jetbrains.jet.lang.types.*; +@@ -50,13 +53,14 @@ public class SignaturesPropagationData { + private final Map autoTypeParameterToModified; + + public SignaturesPropagationData( ++ @NotNull ClassDescriptor containingClass, + @NotNull JetType autoReturnType, // type built by JavaTypeTransformer from Java signature and @NotNull annotations + @NotNull JavaDescriptorResolver.ValueParameterDescriptors autoValueParameters, // descriptors built by parameters resolver + @NotNull List autoTypeParameters, // descriptors built by signature resolver + @NotNull PsiMethodWrapper method, + @NotNull BindingTrace trace + ) { +- superFunctions = getSuperFunctionsForMethod(method, trace); ++ superFunctions = getSuperFunctionsForMethod(method, trace, containingClass); + + autoTypeParameterToModified = SignaturesUtil.recreateTypeParametersAndReturnMapping(autoTypeParameters); + +@@ -187,7 +191,8 @@ public JetType fun(FunctionDescriptor superFunction) { + + private static List getSuperFunctionsForMethod( + @NotNull PsiMethodWrapper method, +- @NotNull BindingTrace trace ++ @NotNull BindingTrace trace, ++ @NotNull ClassDescriptor containingClass + ) { + List superFunctions = Lists.newArrayList(); + for (HierarchicalMethodSignature superSignature : method.getPsiMethod().getHierarchicalMethodSignature().getSuperSignatures()) { +@@ -196,15 +201,22 @@ private static List getSuperFunctionsForMethod( + superFunctions.add(((FunctionDescriptor) superFun)); + } + else { +- // TODO assert is temporarily disabled +- // It fails because of bug in IDEA on Mac: it adds invalid roots to JDK classpath and it leads to the problem that +- // getHierarchicalMethodSignature() returns elements from invalid virtual files +- +- // Function descriptor can't be find iff superclass is java.lang.Collection or similar (translated to jet.* collections) +- //assert !JavaToKotlinClassMap.getInstance().mapPlatformClass( +- // new FqName(superSignature.getMethod().getContainingClass().getQualifiedName())).isEmpty(): +- // ""Can't find super function for "" + method.getPsiMethod() + "" defined in "" +- // + method.getPsiMethod().getContainingClass(); ++ String fqName = superSignature.getMethod().getContainingClass().getQualifiedName(); ++ assert fqName != null; ++ Collection platformClasses = JavaToKotlinClassMap.getInstance().mapPlatformClass(new FqName(fqName)); ++ if (platformClasses.isEmpty()) { ++ // TODO assert is temporarily disabled ++ // It fails because of bug in IDEA on Mac: it adds invalid roots to JDK classpath and it leads to the problem that ++ // getHierarchicalMethodSignature() returns elements from invalid virtual files ++ ++ //assert false : ""Can't find super function for "" + method.getPsiMethod() + ++ // "" defined in "" + method.getPsiMethod().getContainingClass() ++ } ++ else { ++ List funsFromMap = ++ JavaToKotlinMethodMap.INSTANCE.getFunctions(superSignature.getMethod(), containingClass); ++ superFunctions.addAll(funsFromMap); ++ } + } + } + +diff --git a/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/resolver/JavaFunctionResolver.java b/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/resolver/JavaFunctionResolver.java +index fb1c73aca457c..a528a0577ec0c 100644 +--- a/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/resolver/JavaFunctionResolver.java ++++ b/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/resolver/JavaFunctionResolver.java +@@ -144,8 +144,8 @@ private SimpleFunctionDescriptor resolveMethodToFunctionDescriptor( + + List superFunctions; + if (ownerDescriptor instanceof ClassDescriptor) { +- SignaturesPropagationData signaturesPropagationData = +- new SignaturesPropagationData(returnType, valueParameterDescriptors, methodTypeParameters, method, trace); ++ SignaturesPropagationData signaturesPropagationData = new SignaturesPropagationData( ++ (ClassDescriptor) ownerDescriptor, returnType, valueParameterDescriptors, methodTypeParameters, method, trace); + superFunctions = signaturesPropagationData.getSuperFunctions(); + + returnType = signaturesPropagationData.getModifiedReturnType(); +@@ -214,6 +214,9 @@ private static void checkFunctionsOverrideCorrectly( + ((ClassDescriptor) functionDescriptor.getContainingDeclaration()).getDefaultType()); + FunctionDescriptor superFunctionSubstituted = superFunction.substitute(substitutor); + ++ assert superFunctionSubstituted != null : ++ ""Couldn't substitute super function: "" + superFunction + "", substitutor = "" + substitutor; ++ + OverrideCompatibilityInfo.Result overridableResult = + isOverridableBy(superFunctionSubstituted, functionDescriptor).getResult(); + boolean paramsOk = overridableResult == OverrideCompatibilityInfo.Result.OVERRIDABLE; +diff --git a/compiler/frontend/src/org/jetbrains/jet/lang/types/lang/KotlinBuiltIns.java b/compiler/frontend/src/org/jetbrains/jet/lang/types/lang/KotlinBuiltIns.java +index 75e97187252d0..c8d2153ce2534 100644 +--- a/compiler/frontend/src/org/jetbrains/jet/lang/types/lang/KotlinBuiltIns.java ++++ b/compiler/frontend/src/org/jetbrains/jet/lang/types/lang/KotlinBuiltIns.java +@@ -334,12 +334,17 @@ public JetScope getBuiltInsScope() { + //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + + @NotNull +- private ClassDescriptor getBuiltInClassByName(@NotNull String simpleName) { +- ClassifierDescriptor classifier = getBuiltInsScope().getClassifier(Name.identifier(simpleName)); ++ public ClassDescriptor getBuiltInClassByName(@NotNull Name simpleName) { ++ ClassifierDescriptor classifier = getBuiltInsScope().getClassifier(simpleName); + assert classifier instanceof ClassDescriptor : ""Must be a class descriptor "" + simpleName + "", but was "" + classifier; + return (ClassDescriptor) classifier; + } + ++ @NotNull ++ private ClassDescriptor getBuiltInClassByName(@NotNull String simpleName) { ++ return getBuiltInClassByName(Name.identifier(simpleName)); ++ } ++ + // Special + + @NotNull +diff --git a/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfCollection.java b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfCollection.java +new file mode 100644 +index 0000000000000..d5e36c9d1635b +--- /dev/null ++++ b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfCollection.java +@@ -0,0 +1,8 @@ ++package test; ++ ++import java.util.*; ++ ++public interface SubclassOfCollection extends Collection { ++ Iterator iterator(); ++ ++} +diff --git a/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfCollection.kt b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfCollection.kt +new file mode 100644 +index 0000000000000..d4159b0dd5dc3 +--- /dev/null ++++ b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfCollection.kt +@@ -0,0 +1,5 @@ ++package test ++ ++public trait SubclassOfCollection: MutableCollection { ++ override fun iterator() : MutableIterator ++} +diff --git a/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfCollection.txt b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfCollection.txt +new file mode 100644 +index 0000000000000..c5005631e02fd +--- /dev/null ++++ b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfCollection.txt +@@ -0,0 +1,17 @@ ++namespace test ++ ++public abstract trait test.SubclassOfCollection : jet.MutableCollection { ++ public abstract override /*1*/ /*fake_override*/ fun add(/*0*/ e: E): jet.Boolean ++ public abstract override /*1*/ /*fake_override*/ fun addAll(/*0*/ c: jet.Collection): jet.Boolean ++ public abstract override /*1*/ /*fake_override*/ fun clear(): jet.Tuple0 ++ public abstract override /*1*/ /*fake_override*/ fun contains(/*0*/ o: jet.Any?): jet.Boolean ++ public abstract override /*1*/ /*fake_override*/ fun containsAll(/*0*/ c: jet.Collection): jet.Boolean ++ public abstract override /*1*/ /*fake_override*/ fun isEmpty(): jet.Boolean ++ public abstract override /*1*/ fun iterator(): jet.MutableIterator ++ public abstract override /*1*/ /*fake_override*/ fun remove(/*0*/ o: jet.Any?): jet.Boolean ++ public abstract override /*1*/ /*fake_override*/ fun removeAll(/*0*/ c: jet.Collection): jet.Boolean ++ public abstract override /*1*/ /*fake_override*/ fun retainAll(/*0*/ c: jet.Collection): jet.Boolean ++ public abstract override /*1*/ /*fake_override*/ fun size(): jet.Int ++ public abstract override /*1*/ /*fake_override*/ fun toArray(): jet.Array ++ public abstract override /*1*/ /*fake_override*/ fun toArray(/*0*/ a: jet.Array): jet.Array ++} +diff --git a/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfMapEntry.java b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfMapEntry.java +new file mode 100644 +index 0000000000000..0924783402ae3 +--- /dev/null ++++ b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfMapEntry.java +@@ -0,0 +1,7 @@ ++package test; ++ ++import java.util.*; ++ ++public interface SubclassOfMapEntry extends Map.Entry { ++ V setValue(V v); ++} +diff --git a/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfMapEntry.kt b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfMapEntry.kt +new file mode 100644 +index 0000000000000..73127c5cab896 +--- /dev/null ++++ b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfMapEntry.kt +@@ -0,0 +1,5 @@ ++package test ++ ++public trait SubclassOfMapEntry: MutableMap.MutableEntry { ++ override fun setValue(p0: V) : V ++} +diff --git a/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfMapEntry.txt b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfMapEntry.txt +new file mode 100644 +index 0000000000000..0c728cfaf3dc9 +--- /dev/null ++++ b/compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfMapEntry.txt +@@ -0,0 +1,7 @@ ++namespace test ++ ++public abstract trait test.SubclassOfMapEntry : jet.MutableMap.MutableEntry { ++ public abstract override /*1*/ /*fake_override*/ fun getKey(): K ++ public abstract override /*1*/ /*fake_override*/ fun getValue(): V ++ public abstract override /*1*/ fun setValue(/*0*/ p0: V): V ++} +diff --git a/compiler/testData/loadJava/modality/ModalityOfFakeOverrides.txt b/compiler/testData/loadJava/modality/ModalityOfFakeOverrides.txt +index 0f79be88c2f99..aaf74fc13a53b 100644 +--- a/compiler/testData/loadJava/modality/ModalityOfFakeOverrides.txt ++++ b/compiler/testData/loadJava/modality/ModalityOfFakeOverrides.txt +@@ -18,7 +18,7 @@ public open class test.ModalityOfFakeOverrides : java.util.AbstractList + protected final override /*1*/ /*fake_override*/ var modCount: jet.Int + public open override /*1*/ /*fake_override*/ fun remove(/*0*/ p0: jet.Any?): jet.Boolean +- public open override /*1*/ /*fake_override*/ fun remove(/*0*/ p0: jet.Int): jet.String? ++ public open override /*1*/ /*fake_override*/ fun remove(/*0*/ p0: jet.Int): jet.String + public open override /*1*/ /*fake_override*/ fun removeAll(/*0*/ p0: jet.Collection): jet.Boolean + protected open override /*1*/ /*fake_override*/ fun removeRange(/*0*/ p0: jet.Int, /*1*/ p1: jet.Int): jet.Tuple0 + public open override /*1*/ /*fake_override*/ fun retainAll(/*0*/ p0: jet.Collection): jet.Boolean +diff --git a/compiler/tests/org/jetbrains/jet/jvm/compiler/LoadJavaTestGenerated.java b/compiler/tests/org/jetbrains/jet/jvm/compiler/LoadJavaTestGenerated.java +index 09f489924fed1..cd3dd08bbb775 100644 +--- a/compiler/tests/org/jetbrains/jet/jvm/compiler/LoadJavaTestGenerated.java ++++ b/compiler/tests/org/jetbrains/jet/jvm/compiler/LoadJavaTestGenerated.java +@@ -649,6 +649,16 @@ public void testSameProjectionKind() throws Exception { + doTest(""compiler/testData/loadJava/kotlinSignature/propagation/return/SameProjectionKind.java""); + } + ++ @TestMetadata(""SubclassOfCollection.java"") ++ public void testSubclassOfCollection() throws Exception { ++ doTest(""compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfCollection.java""); ++ } ++ ++ @TestMetadata(""SubclassOfMapEntry.java"") ++ public void testSubclassOfMapEntry() throws Exception { ++ doTest(""compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfMapEntry.java""); ++ } ++ + @TestMetadata(""TwoSuperclassesConflictingProjectionKinds.java"") + public void testTwoSuperclassesConflictingProjectionKinds() throws Exception { + doTest(""compiler/testData/loadJava/kotlinSignature/propagation/return/TwoSuperclassesConflictingProjectionKinds.java""); +diff --git a/compiler/tests/org/jetbrains/jet/lang/resolve/lazy/LazyResolveNamespaceComparingTestGenerated.java b/compiler/tests/org/jetbrains/jet/lang/resolve/lazy/LazyResolveNamespaceComparingTestGenerated.java +index c22111f4f4093..736054fa0d33d 100644 +--- a/compiler/tests/org/jetbrains/jet/lang/resolve/lazy/LazyResolveNamespaceComparingTestGenerated.java ++++ b/compiler/tests/org/jetbrains/jet/lang/resolve/lazy/LazyResolveNamespaceComparingTestGenerated.java +@@ -1539,6 +1539,16 @@ public void testSameProjectionKind() throws Exception { + doTestSinglePackage(""compiler/testData/loadJava/kotlinSignature/propagation/return/SameProjectionKind.kt""); + } + ++ @TestMetadata(""SubclassOfCollection.kt"") ++ public void testSubclassOfCollection() throws Exception { ++ doTestSinglePackage(""compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfCollection.kt""); ++ } ++ ++ @TestMetadata(""SubclassOfMapEntry.kt"") ++ public void testSubclassOfMapEntry() throws Exception { ++ doTestSinglePackage(""compiler/testData/loadJava/kotlinSignature/propagation/return/SubclassOfMapEntry.kt""); ++ } ++ + @TestMetadata(""TwoSuperclassesConflictingProjectionKinds.kt"") + public void testTwoSuperclassesConflictingProjectionKinds() throws Exception { + doTestSinglePackage(""compiler/testData/loadJava/kotlinSignature/propagation/return/TwoSuperclassesConflictingProjectionKinds.kt""); +diff --git a/generators/generators.iml b/generators/generators.iml +index a02cc242438e9..06ad1f8bf7f10 100644 +--- a/generators/generators.iml ++++ b/generators/generators.iml +@@ -14,6 +14,7 @@ + + + ++ + + + +diff --git a/generators/org/jetbrains/jet/generators/GenerateJavaToKotlinMethodMap.java b/generators/org/jetbrains/jet/generators/GenerateJavaToKotlinMethodMap.java +new file mode 100644 +index 0000000000000..7cfbc1d08c962 +--- /dev/null ++++ b/generators/org/jetbrains/jet/generators/GenerateJavaToKotlinMethodMap.java +@@ -0,0 +1,248 @@ ++/* ++ * Copyright 2010-2012 JetBrains s.r.o. ++ * ++ * Licensed under the Apache License, Version 2.0 (the ""License""); ++ * you may not use this file except in compliance with the License. ++ * You may obtain a copy of the License at ++ * ++ * http://www.apache.org/licenses/LICENSE-2.0 ++ * ++ * Unless required by applicable law or agreed to in writing, software ++ * distributed under the License is distributed on an ""AS IS"" BASIS, ++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++ * See the License for the specific language governing permissions and ++ * limitations under the License. ++ */ ++ ++package org.jetbrains.jet.generators; ++ ++import com.google.common.collect.Lists; ++import com.intellij.openapi.components.ServiceManager; ++import com.intellij.openapi.project.Project; ++import com.intellij.openapi.util.Pair; ++import com.intellij.openapi.util.io.FileUtil; ++import com.intellij.psi.PsiClass; ++import com.intellij.psi.PsiMethod; ++import com.intellij.psi.impl.file.impl.JavaFileManager; ++import com.intellij.psi.search.GlobalSearchScope; ++import org.jetbrains.annotations.NotNull; ++import org.jetbrains.annotations.Nullable; ++import org.jetbrains.jet.CompileCompilerDependenciesTest; ++import org.jetbrains.jet.ConfigurationKind; ++import org.jetbrains.jet.TestJdkKind; ++import org.jetbrains.jet.cli.jvm.compiler.CompileEnvironmentUtil; ++import org.jetbrains.jet.cli.jvm.compiler.JetCoreEnvironment; ++import org.jetbrains.jet.lang.descriptors.ClassDescriptor; ++import org.jetbrains.jet.lang.descriptors.DeclarationDescriptor; ++import org.jetbrains.jet.lang.descriptors.FunctionDescriptor; ++import org.jetbrains.jet.lang.resolve.DescriptorUtils; ++import org.jetbrains.jet.lang.resolve.java.JavaToKotlinClassMapBuilder; ++import org.jetbrains.jet.lang.types.lang.KotlinBuiltIns; ++import org.jetbrains.jet.resolve.DescriptorRenderer; ++import org.jetbrains.jet.utils.Printer; ++ ++import java.io.File; ++import java.io.FileWriter; ++import java.io.IOException; ++import java.util.Collections; ++import java.util.Comparator; ++import java.util.List; ++ ++import static org.jetbrains.jet.lang.resolve.java.JavaToKotlinMethodMap.serializeFunction; ++import static org.jetbrains.jet.lang.resolve.java.JavaToKotlinMethodMap.serializePsiMethod; ++ ++public class GenerateJavaToKotlinMethodMap { ++ ++ public static final String BUILTINS_FQNAME_PREFIX = KotlinBuiltIns.BUILT_INS_PACKAGE_FQ_NAME.getFqName() + "".""; ++ ++ public static void main(String[] args) throws IOException { ++ JetCoreEnvironment coreEnvironment = new JetCoreEnvironment( ++ CompileEnvironmentUtil.createMockDisposable(), ++ CompileCompilerDependenciesTest.compilerConfigurationForTests(ConfigurationKind.JDK_ONLY, TestJdkKind.FULL_JDK)); ++ ++ StringBuilder buf = new StringBuilder(); ++ Printer printer = new Printer(buf); ++ ++ printer.print(FileUtil.loadFile(new File(""injector-generator/copyright.txt""))) ++ .println() ++ .println(""package org.jetbrains.jet.lang.resolve.java;"") ++ .println() ++ .println(""import com.google.common.collect.ImmutableMultimap;"") ++ .println() ++ .println(""import static org.jetbrains.jet.lang.resolve.java.JavaToKotlinMethodMap.*;"") ++ .println() ++ .println(""/* This file is generated by "", GenerateJavaToKotlinMethodMap.class.getName(), "". DO NOT EDIT! */"") ++ .println(""@SuppressWarnings(\""unchecked\"")"") ++ .println(""class JavaToKotlinMethodMapGenerated {"").pushIndent() ++ .println(""final ImmutableMultimap map;"") ++ .println() ++ .println(""JavaToKotlinMethodMapGenerated() {"").pushIndent() ++ .println(""ImmutableMultimap.Builder b = ImmutableMultimap.builder();"") ++ .println(); ++ ++ MyMapBuilder builder = new MyMapBuilder(coreEnvironment.getProject()); ++ printer.printWithNoIndent(builder.toString()); ++ ++ printer.println(""map = b.build();""); ++ printer.popIndent().println(""}""); ++ printer.popIndent().println(""}""); ++ ++ //noinspection IOResourceOpenedButNotSafelyClosed ++ FileWriter out = ++ new FileWriter(""compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/JavaToKotlinMethodMapGenerated.java""); ++ ++ out.write(buf.toString()); ++ out.close(); ++ } ++ ++ private static class MyMapBuilder extends JavaToKotlinClassMapBuilder { ++ private final Project project; ++ private final StringBuilder buf = new StringBuilder(); ++ private final Printer printer = new Printer(buf).pushIndent().pushIndent(); ++ ++ public MyMapBuilder(@NotNull Project project) { ++ this.project = project; ++ init(); ++ } ++ ++ @Override ++ protected void register(@NotNull Class javaClass, @NotNull ClassDescriptor kotlinDescriptor, @NotNull Direction direction) { ++ processClass(javaClass, kotlinDescriptor); ++ } ++ ++ @Override ++ protected void register(@NotNull Class javaClass, ++ @NotNull ClassDescriptor kotlinDescriptor, ++ @NotNull ClassDescriptor kotlinMutableDescriptor, ++ @NotNull Direction direction) { ++ processClass(javaClass, kotlinDescriptor); ++ processClass(javaClass, kotlinMutableDescriptor); ++ } ++ ++ private void processClass(@NotNull Class javaClass, @NotNull ClassDescriptor kotlinClass) { ++ JavaFileManager javaFileManager = ServiceManager.getService(project, JavaFileManager.class); ++ PsiClass psiClass = javaFileManager.findClass(javaClass.getCanonicalName(), GlobalSearchScope.allScope(project)); ++ assert psiClass != null; ++ ++ List> methods2Functions = getClassMethods2Functions(kotlinClass, psiClass); ++ if (!methods2Functions.isEmpty()) { ++ appendBeforeClass(kotlinClass, psiClass); ++ appendClass(methods2Functions); ++ appendAfterClass(); ++ } ++ } ++ ++ private static List> getClassMethods2Functions( ++ @NotNull ClassDescriptor kotlinClass, ++ @NotNull PsiClass psiClass ++ ) { ++ PsiMethod[] methods = psiClass.getMethods(); ++ ++ List> result = Lists.newArrayList(); ++ ++ for (DeclarationDescriptor member : kotlinClass.getDefaultType().getMemberScope().getAllDescriptors()) { ++ if (!(member instanceof FunctionDescriptor) || member.getContainingDeclaration() != kotlinClass) { ++ continue; ++ } ++ ++ FunctionDescriptor fun = (FunctionDescriptor) member; ++ PsiMethod foundMethod = findMethod(methods, fun); ++ if (foundMethod != null) { ++ result.add(Pair.create(foundMethod, fun)); ++ } ++ } ++ ++ Collections.sort(result, new Comparator>() { ++ @Override ++ public int compare(Pair pair1, Pair pair2) { ++ PsiMethod method1 = pair1.first; ++ PsiMethod method2 = pair2.first; ++ ++ String name1 = method1.getName(); ++ String name2 = method2.getName(); ++ if (!name1.equals(name2)) { ++ return name1.compareTo(name2); ++ } ++ ++ String serialized1 = serializePsiMethod(method1); ++ String serialized2 = serializePsiMethod(method2); ++ return serialized1.compareTo(serialized2); ++ } ++ }); ++ return result; ++ } ++ ++ private static boolean match(@NotNull PsiMethod method, @NotNull FunctionDescriptor fun) { ++ // Compare method an function by name and parameters count. For all methods except one (List.remove) it is enough. ++ // If this changes, there will be assertion error in findMethod() ++ if (method.getName().equals(fun.getName().getIdentifier()) ++ && method.getParameterList().getParametersCount() == fun.getValueParameters().size()) { ++ ++ // ""special case"": remove(Int) and remove(Any?) in MutableList ++ if (method.getName().equals(""remove"") && method.getContainingClass().getName().equals(""List"")) { ++ String psiType = method.getParameterList().getParameters()[0].getType().getPresentableText(); ++ String jetType = DescriptorRenderer.TEXT.renderTypeWithShortNames(fun.getValueParameters().get(0).getType()); ++ String string = psiType + ""|"" + jetType; ++ ++ return ""int|Int"".equals(string) || ""Object|Any?"".equals(string); ++ } ++ ++ return true; ++ } ++ return false; ++ } ++ ++ @Nullable ++ private static PsiMethod findMethod(@NotNull PsiMethod[] methods, @NotNull FunctionDescriptor fun) { ++ PsiMethod found = null; ++ for (PsiMethod method : methods) { ++ if (match(method, fun)) { ++ if (found != null) { ++ throw new AssertionError(""Duplicate for "" + fun); ++ } ++ ++ found = method; ++ } ++ } + -+ Set> adjacencyList = g.getAdjacencyList(0); -+ Edge e = new SimpleDirectedTypedEdge(""type-1"",0, 1); -+ assertTrue(adjacencyList.contains(e)); -+ assertTrue(adjacencyList.remove(e)); -+ assertEquals(8, adjacencyList.size()); -+ assertEquals( (10 * 9) / 2 - 1, g.size()); -+ } ++ return found; ++ } + -+ public void testAdjacencyListAddEdge() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ for (int i = 0; i < 10; ++i) -+ for (int j = i + 2; j < 10; ++j) -+ g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); ++ private void appendBeforeClass(@NotNull ClassDescriptor kotlinClass, @NotNull PsiClass psiClass) { ++ String psiFqName = psiClass.getQualifiedName(); ++ String kotlinFqName = DescriptorUtils.getFQName(kotlinClass).toSafe().getFqName(); + -+ assertEquals( (10 * 9) / 2 - 9, g.size()); ++ assert kotlinFqName.startsWith(BUILTINS_FQNAME_PREFIX); ++ String kotlinSubQualifiedName = kotlinFqName.substring(BUILTINS_FQNAME_PREFIX.length()); ++ printer.println(""put(b, \"""", psiFqName, ""\"", \"""", kotlinSubQualifiedName, ""\"","").pushIndent(); ++ } + -+ Set> adjacencyList = g.getAdjacencyList(0); -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",0, 1); -+ assertFalse(adjacencyList.contains(e)); -+ assertFalse(g.contains(e)); -+ -+ assertTrue(adjacencyList.add(e)); -+ assertTrue(g.contains(e)); -+ -+ assertEquals(9, adjacencyList.size()); -+ assertEquals( (10 * 9) / 2 - 8, g.size()); -+ } ++ private void appendClass(@NotNull List> methods2Functions) { ++ int index = 0; ++ for (Pair method2Function : methods2Functions) { ++ printer.print(""pair(\"""", serializePsiMethod(method2Function.first), ""\"", \"""", serializeFunction(method2Function.second), ++ ""\"")""); + -+ @Test public void testAdjacencyListIterator() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ for (int i = 0; i < 10; ++i) { -+ for (int j = i + 1; j < 10; ++j) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); -+ g.add(e); ++ if (index != methods2Functions.size() - 1) { ++ printer.printWithNoIndent("",""); ++ } ++ ++ printer.println(); ++ ++ index++; + } + } + -+ Set> test = new HashSet>(); -+ Set> adjacencyList = g.getAdjacencyList(0); -+ assertEquals(9, adjacencyList.size()); -+ -+ Iterator> it = adjacencyList.iterator(); -+ int i = 0; -+ while (it.hasNext()) -+ assertTrue(test.add(it.next())); -+ assertEquals(9, test.size()); ++ private void appendAfterClass() { ++ printer.popIndent().println("");"").println(); ++ } ++ ++ ++ public String toString() { ++ return buf.toString(); ++ } + } + -+ @Test public void testAdjacencyListNoVertex() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ Set> adjacencyList = g.getAdjacencyList(0); -+ assertEquals(0, adjacencyList.size()); ++ private GenerateJavaToKotlinMethodMap() { + } ++} +diff --git a/jdk-annotations/java/util/annotations.xml b/jdk-annotations/java/util/annotations.xml +index 3251649264e78..c1f62eced16e3 100644 +--- a/jdk-annotations/java/util/annotations.xml ++++ b/jdk-annotations/java/util/annotations.xml +@@ -1,4 +1,10 @@ + ++ ++ ++ ++ ++ ++ + + + +@@ -679,12 +685,12 @@ + + + +- ++ + + + + +- ++ + + + +@@ -699,7 +705,7 @@ + + + +- ++ + + + " +614faccf1d353c3b4835e6df0e6902839d54b5f6,hadoop,YARN-1910. Fixed a race condition in TestAMRMTokens- that causes the test to fail more often on Windows. Contributed by Xuan Gong.- svn merge --ignore-ancestry -c 1586192 ../../trunk/--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1586193 13f79535-47bb-0310-9956-ffa450edef68-,c,https://github.com/apache/hadoop,"diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt +index 2abb35dfa9b02..188a80035ac85 100644 +--- a/hadoop-yarn-project/CHANGES.txt ++++ b/hadoop-yarn-project/CHANGES.txt +@@ -79,6 +79,9 @@ Release 2.4.1 - UNRELEASED + YARN-1908. Fixed DistributedShell to not fail in secure clusters. (Vinod + Kumar Vavilapalli and Jian He via vinodkv) + ++ YARN-1910. Fixed a race condition in TestAMRMTokens that causes the test to ++ fail more often on Windows. (Xuan Gong via vinodkv) + -+ @Test(expected=NoSuchElementException.class) -+ public void testAdjacencyListIteratorNextOffEnd() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ for (int i = 0; i < 10; ++i) { -+ for (int j = i + 1; j < 10; ++j) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); -+ g.add(e); -+ } -+ } + Release 2.4.0 - 2014-04-07 + + INCOMPATIBLE CHANGES +diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestAMRMTokens.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestAMRMTokens.java +index aa894c5f6a920..64602bd888e27 100644 +--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestAMRMTokens.java ++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestAMRMTokens.java +@@ -48,6 +48,7 @@ + import org.apache.hadoop.yarn.server.resourcemanager.TestAMAuthorization.MyContainerManager; + import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; + import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; ++import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; + import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptContainerFinishedEvent; + import org.apache.hadoop.yarn.server.utils.BuilderUtils; + import org.apache.hadoop.yarn.util.Records; +@@ -63,6 +64,7 @@ public class TestAMRMTokens { + private static final Log LOG = LogFactory.getLog(TestAMRMTokens.class); + + private final Configuration conf; ++ private static final int maxWaitAttempts = 50; + + @Parameters + public static Collection configs() { +@@ -153,6 +155,16 @@ public void testTokenExpiry() throws Exception { + new RMAppAttemptContainerFinishedEvent(applicationAttemptId, + containerStatus)); + ++ // Make sure the RMAppAttempt is at Finished State. ++ // Both AMRMToken and ClientToAMToken have been removed. ++ int count = 0; ++ while (attempt.getState() != RMAppAttemptState.FINISHED ++ && count < maxWaitAttempts) { ++ Thread.sleep(100); ++ count++; ++ } ++ Assert.assertTrue(attempt.getState() == RMAppAttemptState.FINISHED); + -+ Set> test = new HashSet>(); -+ Set> adjacencyList = g.getAdjacencyList(0); -+ assertEquals(9, adjacencyList.size()); -+ -+ Iterator> it = adjacencyList.iterator(); -+ int i = 0; -+ while (it.hasNext()) -+ assertTrue(test.add(it.next())); -+ assertEquals(9, test.size()); -+ it.next(); + // Now simulate trying to allocate. RPC call itself should throw auth + // exception. + rpc.stopProxy(rmClient, conf); // To avoid using cached client" +649a65fae047377bf52978ef47f3d50020f1c048,Delta Spike,"DELTASPIKE-277 create a proper unit test and remove unused imports +",p,https://github.com/apache/deltaspike,"diff --git a/deltaspike/modules/jsf/impl/src/main/java/org/apache/deltaspike/jsf/impl/message/JsfMessageProducer.java b/deltaspike/modules/jsf/impl/src/main/java/org/apache/deltaspike/jsf/impl/message/JsfMessageProducer.java +index 2a26c82d4..ca5fca5d6 100644 +--- a/deltaspike/modules/jsf/impl/src/main/java/org/apache/deltaspike/jsf/impl/message/JsfMessageProducer.java ++++ b/deltaspike/modules/jsf/impl/src/main/java/org/apache/deltaspike/jsf/impl/message/JsfMessageProducer.java +@@ -26,7 +26,6 @@ + import java.lang.reflect.ParameterizedType; + import java.lang.reflect.Type; + +-import org.apache.deltaspike.core.util.ReflectionUtils; + import org.apache.deltaspike.jsf.message.JsfMessage; + + /** +diff --git a/deltaspike/modules/jsf/impl/src/test/java/org/apache/deltaspike/test/jsf/impl/message/JsfMessageTest.java b/deltaspike/modules/jsf/impl/src/test/java/org/apache/deltaspike/test/jsf/impl/message/JsfMessageTest.java +index c01ff4d7d..2e1767ad1 100644 +--- a/deltaspike/modules/jsf/impl/src/test/java/org/apache/deltaspike/test/jsf/impl/message/JsfMessageTest.java ++++ b/deltaspike/modules/jsf/impl/src/test/java/org/apache/deltaspike/test/jsf/impl/message/JsfMessageTest.java +@@ -34,9 +34,13 @@ + import org.jboss.shrinkwrap.api.asset.EmptyAsset; + import org.jboss.shrinkwrap.api.spec.WebArchive; + import org.junit.Test; ++import org.junit.Assert; + import org.junit.experimental.categories.Category; + import org.junit.runner.RunWith; ++import org.openqa.selenium.By; + import org.openqa.selenium.WebDriver; ++import org.openqa.selenium.WebElement; ++import org.openqa.selenium.support.ui.ExpectedConditions; + + + /** +@@ -76,15 +80,27 @@ public void testViewScopedContext() throws Exception + //X TODO remove, it's just for debugging the server side: + //X Thread.sleep(600000L); + +-/*X +- WebElement inputField = driver.findElement(By.id(""test:valueInput"")); +- inputField.sendKeys(""23""); + +- WebElement button = driver.findElement(By.id(""test:saveButton"")); +- button.click(); ++ // check the JSF FacesMessages ++ ++ Assert.assertNotNull(ExpectedConditions.presenceOfElementLocated(By.xpath(""id('messages')"")).apply(driver)); ++ ++ Assert.assertTrue(ExpectedConditions.textToBePresentInElement( ++ By.xpath(""id('messages')/ul/li[1]""), ""message with details warnInfo!"").apply(driver)); ++ ++ Assert.assertTrue(ExpectedConditions.textToBePresentInElement( ++ By.xpath(""id('messages')/ul/li[2]""), ""message without detail but parameter errorInfo."").apply(driver)); ++ ++ Assert.assertTrue(ExpectedConditions.textToBePresentInElement( ++ By.xpath(""id('messages')/ul/li[3]""), ""a simple message without a param."").apply(driver)); ++ ++ Assert.assertTrue(ExpectedConditions.textToBePresentInElement( ++ By.xpath(""id('messages')/ul/li[4]""), ""simple message with a string param fatalInfo."").apply(driver)); ++ ++ // check the free message usage ++ Assert.assertTrue(ExpectedConditions.textToBePresentInElement( ++ By.id(""test:valueOutput""), ""a simple message without a param."").apply(driver)); + +- Assert.assertTrue(ExpectedConditions.textToBePresentInElement(By.id(""test:valueOutput""), ""23"").apply(driver)); +-*/ + } + + }" +7b22bc146b318790552aa8ec1ece25a3a06d1316,Valadoc,"Add support for cgraphs + +Based on patch by Richard Schwarting + +Fixes bug 703688. +",a,https://github.com/GNOME/vala/,"diff --git a/configure.ac b/configure.ac +index 79aa766f38..d4d639055b 100644 +--- a/configure.ac ++++ b/configure.ac +@@ -53,6 +53,35 @@ AC_SUBST(LIBGEE_LIBS) + + + ++AC_MSG_CHECKING([for CGRAPH]) ++valadoc_tmp_LIBADD=""$LIBADD"" ++valadoc_tmp_CFLAGS=""$CFLAGGS"" ++LIBADD=""$LIBADD $LIBGVC_LIBS"" ++CFLAGS=""$CFLAGS $LIBGVC_CFLAGS"" ++AC_RUN_IFELSE( ++ [AC_LANG_SOURCE([ ++ #include ++ ++ int main(void) { ++ #ifdef WITH_CGRAPH ++ return 0; ++ #else ++ return -1; ++ #endif ++ } ++ ])], [ ++ AC_MSG_RESULT([yes]) ++ VALAFLAGS=""$VALAFLAGS -D WITH_CGRAPH"" ++ have_cgraph=yes ++ ], [ ++ AC_MSG_RESULT([no]) ++ have_cgraph=no ++ ] ++) ++LIBADD=""$valadoc_tmp_LIBADD"" ++CFLAGS=""$valadoc_tmp_CFLAGS"" ++AM_CONDITIONAL(HAVE_CGRAPH, test ""$have_cgraph"" = ""yes"") ++ + + ## + ## Drivers: +diff --git a/src/doclets/devhelp/Makefile.am b/src/doclets/devhelp/Makefile.am +index e7c2446290..de82ec1ed2 100644 +--- a/src/doclets/devhelp/Makefile.am ++++ b/src/doclets/devhelp/Makefile.am +@@ -4,6 +4,7 @@ NULL = + AM_CFLAGS = -g \ + -DPACKAGE_ICONDIR=\""$(datadir)/valadoc/icons/\"" \ + -I ../../libvaladoc/ \ ++ $(LIBGVC_CFLAGS) \ + $(GLIB_CFLAGS) \ + $(LIBGEE_CFLAGS) \ + $(NULL) +diff --git a/src/doclets/gtkdoc/Makefile.am b/src/doclets/gtkdoc/Makefile.am +index 66dcc5576f..81e3607533 100644 +--- a/src/doclets/gtkdoc/Makefile.am ++++ b/src/doclets/gtkdoc/Makefile.am +@@ -5,6 +5,7 @@ AM_CFLAGS = -g \ + -DPACKAGE_ICONDIR=\""$(datadir)/valadoc/icons/\"" \ + -I ../../libvaladoc/ \ + $(GLIB_CFLAGS) \ ++ $(LIBGVC_CFLAGS) \ + $(LIBGEE_CFLAGS) \ + $(NULL) + +diff --git a/src/doclets/htm/Makefile.am b/src/doclets/htm/Makefile.am +index 5f3be836e2..7127e1d350 100644 +--- a/src/doclets/htm/Makefile.am ++++ b/src/doclets/htm/Makefile.am +@@ -4,10 +4,21 @@ NULL = + AM_CFLAGS = -g \ + -DPACKAGE_ICONDIR=\""$(datadir)/valadoc/icons/\"" \ + -I ../../libvaladoc/ \ ++ $(LIBGVC_CFLAGS) \ + $(GLIB_CFLAGS) \ + $(LIBGEE_CFLAGS) \ + $(NULL) + ++# Without the LIBGVC_CFLAGS, we get ++# make[5]: Entering directory `/home/richard/.local/src/valadoc/src/doclets/devhelp' ++# CC doclet.lo ++# In file included from /usr/include/graphviz/gvc.h:17:0, ++# from ../../libvaladoc/valadoc-1.0.h:15, ++# from doclet.c:28: ++# /usr/include/graphviz/types.h:49:20: fatal error: cgraph.h: No such file or directory ++# #include ++# ^ ++# compilation terminated. + + + BUILT_SOURCES = libdoclet.vala.stamp +diff --git a/src/libvaladoc/charts/chart.vala b/src/libvaladoc/charts/chart.vala +index f4b99088c3..03dab30f75 100644 +--- a/src/libvaladoc/charts/chart.vala ++++ b/src/libvaladoc/charts/chart.vala +@@ -27,7 +27,9 @@ public class Valadoc.Charts.Chart : Api.Visitor { + protected Factory factory; + + static construct { ++ #if !WITH_CGRAPH + Gvc.init (); ++ #endif + } + + public Chart (Factory factory, Api.Node node) { +diff --git a/src/libvaladoc/charts/chartfactory.vala b/src/libvaladoc/charts/chartfactory.vala +index 6a3351a7c0..c2582ab9f3 100644 +--- a/src/libvaladoc/charts/chartfactory.vala ++++ b/src/libvaladoc/charts/chartfactory.vala +@@ -23,7 +23,11 @@ + + public abstract class Valadoc.Charts.Factory : Object { + protected Gvc.Node create_type (Gvc.Graph graph, Api.Node item) { ++ #if WITH_CGRAPH ++ return graph.create_node (item.get_full_name (), 1); ++ #else + return graph.create_node (item.get_full_name ()); ++ #endif + } + + public abstract Gvc.Graph create_graph (Api.Node item); +diff --git a/src/libvaladoc/charts/simplechartfactory.vala b/src/libvaladoc/charts/simplechartfactory.vala +index a1f09fbaae..06da97e133 100644 +--- a/src/libvaladoc/charts/simplechartfactory.vala ++++ b/src/libvaladoc/charts/simplechartfactory.vala +@@ -31,7 +31,11 @@ public class Valadoc.Charts.SimpleFactory : Charts.Factory { + } + + public override Gvc.Graph create_graph (Api.Node item) { ++ #if WITH_CGRAPH ++ var graph = new Gvc.Graph (item.get_full_name (), Gvc.Agdirected, 0); ++ #else + var graph = new Gvc.Graph (item.get_full_name (), Gvc.GraphKind.AGDIGRAPH); ++ #endif + return graph; + }" +c9a360e36059aa46d7090e879762d44d54fa2782,hbase,HBASE-7197. Add multi get to RemoteHTable- (Elliott Clark)--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1422143 13f79535-47bb-0310-9956-ffa450edef68-,a,https://github.com/apache/hbase,"diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java +index beebe960b05b..92fe09202f61 100644 +--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java ++++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java +@@ -148,6 +148,29 @@ protected String buildRowSpec(final byte[] row, final Map familyMap, + return sb.toString(); + } + ++ protected String buildMultiRowSpec(final byte[][] rows, int maxVersions) { ++ StringBuilder sb = new StringBuilder(); ++ sb.append('/'); ++ sb.append(Bytes.toStringBinary(name)); ++ sb.append(""/multiget/""); ++ if (rows == null || rows.length == 0) { ++ return sb.toString(); ++ } ++ sb.append(""?""); ++ for(int i=0; i g = new DirectedMultigraph(); -+ for (int i = 0; i < 10; ++i) { -+ for (int j = i + 1; j < 10; ++j) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); -+ g.add(e); -+ } -+ } ++ return sb.toString(); ++ } + -+ Set> test = new HashSet>(); -+ Set> adjacencyList = g.getAdjacencyList(0); -+ assertEquals(9, adjacencyList.size()); -+ -+ Iterator> it = adjacencyList.iterator(); -+ assertTrue(it.hasNext()); -+ Edge e = it.next(); -+ it.remove(); -+ assertFalse(adjacencyList.contains(e)); -+ assertEquals(8, adjacencyList.size()); -+ assertFalse(g.contains(e)); -+ assertEquals( (10 * 9) / 2 - 1, g.size()); + protected Result[] buildResultFromModel(final CellSetModel model) { + List results = new ArrayList(); + for (RowModel row: model.getRows()) { +@@ -273,31 +296,66 @@ public Result get(Get get) throws IOException { + if (get.getFilter() != null) { + LOG.warn(""filters not supported on gets""); + } ++ Result[] results = getResults(spec); ++ if (results.length > 0) { ++ if (results.length > 1) { ++ LOG.warn(""too many results for get ("" + results.length + "")""); ++ } ++ return results[0]; ++ } else { ++ return new Result(); + } ++ } + -+ @Test(expected=UnsupportedOperationException.class) -+ public void testAdjacencyListIteratorRemoveFirst() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ for (int i = 0; i < 10; ++i) { -+ for (int j = i + 1; j < 10; ++j) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); -+ g.add(e); -+ } -+ } ++ public Result[] get(List gets) throws IOException { ++ byte[][] rows = new byte[gets.size()][]; ++ int maxVersions = 1; ++ int count = 0; + -+ Set> test = new HashSet>(); -+ Set> adjacencyList = g.getAdjacencyList(0); -+ assertEquals(9, adjacencyList.size()); -+ -+ Iterator> it = adjacencyList.iterator(); -+ it.remove(); ++ for(Get g:gets) { ++ ++ if ( count == 0 ) { ++ maxVersions = g.getMaxVersions(); ++ } else if (g.getMaxVersions() != maxVersions) { ++ LOG.warn(""MaxVersions on Gets do not match, using the first in the list (""+maxVersions+"")""); ++ } ++ ++ if (g.getFilter() != null) { ++ LOG.warn(""filters not supported on gets""); ++ } ++ ++ rows[count] = g.getRow(); ++ count ++; + } + -+ @Test(expected=UnsupportedOperationException.class) -+ public void testAdjacencyListIteratorRemoveTwice() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ for (int i = 0; i < 10; ++i) { -+ for (int j = i + 1; j < 10; ++j) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); -+ g.add(e); -+ } -+ } ++ String spec = buildMultiRowSpec(rows, maxVersions); ++ ++ return getResults(spec); ++ } ++ ++ private Result[] getResults(String spec) throws IOException { + for (int i = 0; i < maxRetries; i++) { + Response response = client.get(spec, Constants.MIMETYPE_PROTOBUF); + int code = response.getCode(); + switch (code) { +- case 200: +- CellSetModel model = new CellSetModel(); +- model.getObjectFromMessage(response.getBody()); +- Result[] results = buildResultFromModel(model); +- if (results.length > 0) { +- if (results.length > 1) { +- LOG.warn(""too many results for get ("" + results.length + "")""); ++ case 200: ++ CellSetModel model = new CellSetModel(); ++ model.getObjectFromMessage(response.getBody()); ++ Result[] results = buildResultFromModel(model); ++ if ( results.length > 0) { ++ return results; + } +- return results[0]; +- } +- // fall through +- case 404: +- return new Result(); ++ // fall through ++ case 404: ++ return new Result[0]; + +- case 509: +- try { +- Thread.sleep(sleepTime); +- } catch (InterruptedException e) { } +- break; +- default: +- throw new IOException(""get request returned "" + code); ++ case 509: ++ try { ++ Thread.sleep(sleepTime); ++ } catch (InterruptedException e) { } ++ break; ++ default: ++ throw new IOException(""get request returned "" + code); + } + } + throw new IOException(""get request timed out""); +@@ -708,11 +766,6 @@ public Object[] batchCallback(List actions, Batch.Callback + throw new IOException(""batchCallback not supported""); + } + +- @Override +- public Result[] get(List gets) throws IOException { +- throw new IOException(""get(List) not supported""); +- } +- + @Override + public T coprocessorProxy(Class protocol, + byte[] row) { +diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java +index 01e23d99a0ed..b52a167fbbc9 100644 +--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java ++++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java +@@ -216,6 +216,45 @@ public void testGet() throws IOException { + assertEquals(2, count); + } + ++ @Test ++ public void testMultiGet() throws Exception { ++ ArrayList gets = new ArrayList(); ++ gets.add(new Get(ROW_1)); ++ gets.add(new Get(ROW_2)); ++ Result[] results = remoteTable.get(gets); ++ assertNotNull(results); ++ assertEquals(2, results.length); ++ assertEquals(1, results[0].size()); ++ assertEquals(2, results[1].size()); ++ ++ //Test Versions ++ gets = new ArrayList(); ++ Get g = new Get(ROW_1); ++ g.setMaxVersions(3); ++ gets.add(g); ++ gets.add(new Get(ROW_2)); ++ results = remoteTable.get(gets); ++ assertNotNull(results); ++ assertEquals(2, results.length); ++ assertEquals(1, results[0].size()); ++ assertEquals(3, results[1].size()); ++ ++ //404 ++ gets = new ArrayList(); ++ gets.add(new Get(Bytes.toBytes(""RESALLYREALLYNOTTHERE""))); ++ results = remoteTable.get(gets); ++ assertNotNull(results); ++ assertEquals(0, results.length); ++ ++ gets = new ArrayList(); ++ gets.add(new Get(Bytes.toBytes(""RESALLYREALLYNOTTHERE""))); ++ gets.add(new Get(ROW_1)); ++ gets.add(new Get(ROW_2)); ++ results = remoteTable.get(gets); ++ assertNotNull(results); ++ assertEquals(0, results.length); ++ } ++ + @Test + public void testPut() throws IOException { + Put put = new Put(ROW_3);" +5bc30fcc6c587c5b4581fbcc772cb4625edf2d4c,Mylyn Reviews,"Merge branch 'master' of git://git.eclipse.org/gitroot/mylyn/org.eclipse.mylyn.reviews +",p,https://github.com/eclipse-mylyn/org.eclipse.mylyn.reviews,"diff --git a/gerrit/org.eclipse.mylyn.gerrit.ui/src/org/eclipse/mylyn/internal/gerrit/ui/wizards/GerritCustomQueryPage.java b/gerrit/org.eclipse.mylyn.gerrit.ui/src/org/eclipse/mylyn/internal/gerrit/ui/wizards/GerritCustomQueryPage.java +index 32074577..133e1cd0 100644 +--- a/gerrit/org.eclipse.mylyn.gerrit.ui/src/org/eclipse/mylyn/internal/gerrit/ui/wizards/GerritCustomQueryPage.java ++++ b/gerrit/org.eclipse.mylyn.gerrit.ui/src/org/eclipse/mylyn/internal/gerrit/ui/wizards/GerritCustomQueryPage.java +@@ -10,6 +10,7 @@ + *********************************************************************/ + package org.eclipse.mylyn.internal.gerrit.ui.wizards; + ++import org.eclipse.jface.wizard.IWizardContainer; + import org.eclipse.mylyn.internal.gerrit.core.GerritQuery; + import org.eclipse.mylyn.tasks.core.IRepositoryQuery; + import org.eclipse.mylyn.tasks.core.TaskRepository; +@@ -122,7 +123,10 @@ public void keyReleased(KeyEvent e) { + SelectionListener buttonSelectionListener = new SelectionListener() { + public void widgetSelected(SelectionEvent e) { + projectText.setEnabled(byProjectButton.getSelection()); +- getContainer().updateButtons(); ++ IWizardContainer c = getContainer(); ++ if (c.getCurrentPage() != null) { ++ c.updateButtons(); ++ } + } + + public void widgetDefaultSelected(SelectionEvent e) {" +fcfbdf64406ac44b771a3c1b91b95d9d9a465391,hadoop,YARN-3181. FairScheduler: Fix up outdated findbugs- issues. (kasha)--(cherry picked from commit c2b185def846f5577a130003a533b9c377b58fab)-,p,https://github.com/apache/hadoop,"diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt +index 6d27c85bf5be8..87524586bf320 100644 +--- a/hadoop-yarn-project/CHANGES.txt ++++ b/hadoop-yarn-project/CHANGES.txt +@@ -243,6 +243,8 @@ Release 2.7.0 - UNRELEASED + YARN-2079. Recover NonAggregatingLogHandler state upon nodemanager + restart. (Jason Lowe via junping_du) + ++ YARN-3181. FairScheduler: Fix up outdated findbugs issues. (kasha) ++ + YARN-3124. Fixed CS LeafQueue/ParentQueue to use QueueCapacities to track + capacities-by-label. (Wangda Tan via jianhe) + +diff --git a/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml b/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml +index c45634e1be07b..70f1a71fbcb74 100644 +--- a/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml ++++ b/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml +@@ -142,22 +142,12 @@ + + + +- +- +- +- +- + + + + + + +- +- +- +- +- + + + +@@ -215,18 +205,6 @@ + + + +- +- +- +- +- +- +- +- +- +- +- +- + + + +@@ -426,11 +404,6 @@ + + + +- +- +- +- +- + + + +diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationConfiguration.java +index 0ea731403029e..9cb767d38a5d5 100644 +--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationConfiguration.java ++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationConfiguration.java +@@ -33,6 +33,9 @@ + + import com.google.common.annotations.VisibleForTesting; + ++import javax.annotation.concurrent.ThreadSafe; + -+ Set> test = new HashSet>(); -+ Set> adjacencyList = g.getAdjacencyList(0); -+ assertEquals(9, adjacencyList.size()); -+ -+ Iterator> it = adjacencyList.iterator(); -+ assertTrue(it.hasNext()); -+ it.next(); -+ it.remove(); -+ it.remove(); ++@ThreadSafe + public class AllocationConfiguration extends ReservationSchedulerConfiguration { + private static final AccessControlList EVERYBODY_ACL = new AccessControlList(""*""); + private static final AccessControlList NOBODY_ACL = new AccessControlList("" ""); +@@ -204,12 +207,16 @@ public float getFairSharePreemptionThreshold(String queueName) { + } + + public ResourceWeights getQueueWeight(String queue) { +- ResourceWeights weight = queueWeights.get(queue); +- return (weight == null) ? ResourceWeights.NEUTRAL : weight; ++ synchronized (queueWeights) { ++ ResourceWeights weight = queueWeights.get(queue); ++ return (weight == null) ? ResourceWeights.NEUTRAL : weight; + } + } + + public void setQueueWeight(String queue, ResourceWeights weight) { +- queueWeights.put(queue, weight); ++ synchronized (queueWeights) { ++ queueWeights.put(queue, weight); ++ } + } + + public int getUserMaxApps(String user) { +diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java +index 76fa588fc767f..c19aa513e1c1d 100644 +--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java ++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java +@@ -201,7 +201,7 @@ public synchronized void setReloadListener(Listener reloadListener) { + * @throws ParserConfigurationException if XML parser is misconfigured. + * @throws SAXException if config file is malformed. + */ +- public synchronized void reloadAllocations() throws IOException, ++ public void reloadAllocations() throws IOException, + ParserConfigurationException, SAXException, AllocationConfigurationException { + if (allocFile == null) { + return; +diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSOpDurations.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSOpDurations.java +index c2282fdb736ca..c50f281cb6645 100644 +--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSOpDurations.java ++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSOpDurations.java +@@ -31,6 +31,8 @@ + import static org.apache.hadoop.metrics2.lib.Interns.info; + import org.apache.hadoop.metrics2.lib.MutableRate; + ++import javax.annotation.concurrent.ThreadSafe; + -+ /****************************************************************** -+ * -+ * -+ * AdjacentVerticesView tests -+ * -+ * -+ ******************************************************************/ -+ + /** + * Class to capture the performance metrics of FairScheduler. + * This should be a singleton. +@@ -38,6 +40,7 @@ + @InterfaceAudience.Private + @InterfaceStability.Unstable + @Metrics(context=""fairscheduler-op-durations"") ++@ThreadSafe + public class FSOpDurations implements MetricsSource { + + @Metric(""Duration for a continuous scheduling run"")" +c8dec345b6e2ad04ffcede24779dd75efd25d599,Vala,"Add used DBus attribute and fix UnixMountEntry lower_case_cprefix + +Fixes bug 741089 +",a,https://github.com/GNOME/vala/,"diff --git a/vala/valausedattr.vala b/vala/valausedattr.vala +index 793c5bc23b..4a17562848 100644 +--- a/vala/valausedattr.vala ++++ b/vala/valausedattr.vala +@@ -67,6 +67,8 @@ public class Vala.UsedAttr : CodeVisitor { + ""GtkChild"", ""name"", ""internal"", """", + ""GtkTemplate"", ""ui"", """", + ""GtkCallback"", ""name"", """", ++ ++ ""DBus"", ""name"", ""no_reply"", ""result"", ""use_string_marshalling"", ""value"", ""signature"", """", + + ""GIR"", ""name"", """" + +diff --git a/vapi/gio-unix-2.0.vapi b/vapi/gio-unix-2.0.vapi +index c426491346..c662002dfb 100644 +--- a/vapi/gio-unix-2.0.vapi ++++ b/vapi/gio-unix-2.0.vapi +@@ -73,34 +73,23 @@ namespace GLib { + public bool close_fd { get; set; } + public int fd { get; construct; } + } +- [CCode (cheader_filename = ""gio/gunixmounts.h"", cname = ""GUnixMountEntry"", free_function = ""g_unix_mount_free"", lower_case_prefix = ""g_unix_mount_"")] ++ [CCode (cheader_filename = ""gio/gunixmounts.h"", cname = ""GUnixMountEntry"", free_function = ""g_unix_mount_free"", lower_case_cprefix = ""g_unix_mount_"")] + [Compact] + public class UnixMountEntry { + [CCode (cname = ""g_unix_mount_at"")] + public UnixMountEntry (string mount_path, uint64 time_read); +- [CCode (cname = ""g_unix_mount_compare"")] + public int compare (GLib.UnixMountEntry mount); + [CCode (cheader_filename = ""gio/gunixmounts.h"", cname = ""g_unix_mounts_get"")] + public static GLib.List @get (out uint64 time_read = null); +- [CCode (cname = ""g_unix_mount_get_device_path"")] + public unowned string get_device_path (); +- [CCode (cname = ""g_unix_mount_get_fs_type"")] + public unowned string get_fs_type (); +- [CCode (cname = ""g_unix_mount_get_mount_path"")] + public unowned string get_mount_path (); +- [CCode (cname = ""g_unix_mount_guess_can_eject"")] + public bool guess_can_eject (); +- [CCode (cname = ""g_unix_mount_guess_icon"")] + public GLib.Icon guess_icon (); +- [CCode (cname = ""g_unix_mount_guess_name"")] + public string guess_name (); +- [CCode (cname = ""g_unix_mount_guess_should_display"")] + public bool guess_should_display (); +- [CCode (cname = ""g_unix_mount_guess_symbolic_icon"")] + public GLib.Icon guess_symbolic_icon (); +- [CCode (cname = ""g_unix_mount_is_readonly"")] + public bool is_readonly (); +- [CCode (cname = ""g_unix_mount_is_system_internal"")] + public bool is_system_internal (); + } + [CCode (cheader_filename = ""gio/gunixmounts.h"")] +diff --git a/vapi/packages/gio-unix-2.0/gio-unix-2.0-custom.vala b/vapi/packages/gio-unix-2.0/gio-unix-2.0-custom.vala +index 4b2b8741f6..424e2ec29c 100644 +--- a/vapi/packages/gio-unix-2.0/gio-unix-2.0-custom.vala ++++ b/vapi/packages/gio-unix-2.0/gio-unix-2.0-custom.vala +@@ -22,31 +22,20 @@ + + namespace GLib { + [Compact] +- [CCode (cname = ""GUnixMountEntry"", cheader_filename = ""gio/gunixmounts.h"", lower_case_prefix = ""g_unix_mount_"", free_function = ""g_unix_mount_free"")] ++ [CCode (cname = ""GUnixMountEntry"", cheader_filename = ""gio/gunixmounts.h"", lower_case_cprefix = ""g_unix_mount_"", free_function = ""g_unix_mount_free"")] + public class UnixMountEntry { + [CCode (cname = ""g_unix_mount_at"")] + public UnixMountEntry (string mount_path, uint64 time_read); +- [CCode (cname = ""g_unix_mount_compare"")] + public int compare (GLib.UnixMountEntry mount); +- [CCode (cname = ""g_unix_mount_get_device_path"")] + public unowned string get_device_path (); +- [CCode (cname = ""g_unix_mount_get_fs_type"")] + public unowned string get_fs_type (); +- [CCode (cname = ""g_unix_mount_get_mount_path"")] + public unowned string get_mount_path (); +- [CCode (cname = ""g_unix_mount_guess_can_eject"")] + public bool guess_can_eject (); +- [CCode (cname = ""g_unix_mount_guess_icon"")] + public GLib.Icon guess_icon (); +- [CCode (cname = ""g_unix_mount_guess_name"")] + public string guess_name (); +- [CCode (cname = ""g_unix_mount_guess_should_display"")] + public bool guess_should_display (); +- [CCode (cname = ""g_unix_mount_guess_symbolic_icon"")] + public GLib.Icon guess_symbolic_icon (); +- [CCode (cname = ""g_unix_mount_is_readonly"")] + public bool is_readonly (); +- [CCode (cname = ""g_unix_mount_is_system_internal"")] + public bool is_system_internal (); + + [CCode (cname = ""g_unix_mounts_get"", cheader_filename = ""gio/gunixmounts.h"")]" +9c301fc289a15a84552950737df03f9d0487150f,Delta Spike,"DELTASPIKE-382 add ConfigFilter mechanism + +This allows to filter DeltaSpike configuration values, +e.g. for applying decryption of secure configuration values +on the fly. +",a,https://github.com/apache/deltaspike,"diff --git a/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/config/ConfigResolver.java b/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/config/ConfigResolver.java +index b7802a38b..4cd956e33 100644 +--- a/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/config/ConfigResolver.java ++++ b/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/config/ConfigResolver.java +@@ -32,6 +32,7 @@ + import javax.enterprise.inject.Typed; + + import org.apache.deltaspike.core.api.projectstage.ProjectStage; ++import org.apache.deltaspike.core.spi.config.ConfigFilter; + import org.apache.deltaspike.core.spi.config.ConfigSource; + import org.apache.deltaspike.core.spi.config.ConfigSourceProvider; + import org.apache.deltaspike.core.util.ClassUtils; +@@ -58,6 +59,13 @@ public final class ConfigResolver + private static Map configSources + = new ConcurrentHashMap(); + ++ /** ++ * The content of this map will hold the List of ConfigFilters ++ * for each WebApp/EAR, etc (thus the ClassLoader). ++ */ ++ private static Map> configFilters ++ = new ConcurrentHashMap>(); + -+ @Test public void testAdjacentVertices() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ for (int i = 0; i < 10; ++i) { -+ for (int j = i + 1; j < 10; ++j) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); -+ g.add(e); -+ } -+ } + private static volatile ProjectStage projectStage = null; + + private ConfigResolver() +@@ -96,6 +104,35 @@ public static synchronized void freeConfigSources() + configSources.remove(ClassUtils.getClassLoader(null)); + } + ++ /** ++ * Add a {@link ConfigFilter} to the ConfigResolver. ++ * This will only affect the current WebApp ++ * (or more precisely the current ClassLoader and it's children). ++ * @param configFilter ++ */ ++ public static void addConfigFilter(ConfigFilter configFilter) ++ { + -+ Set test = new HashSet(); -+ Set adjacent = g.getNeighbors(0); -+ assertEquals(9, adjacent.size()); -+ for (int i = 1; i < 10; ++i) -+ assertTrue(adjacent.contains(i)); -+ assertFalse(adjacent.contains(0)); -+ assertFalse(adjacent.contains(10)); ++ List currentConfigFilters = getConfigFilters(); ++ currentConfigFilters.add(configFilter); + } + -+ @Test(expected=UnsupportedOperationException.class) public void testAdjacentVerticesAdd() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ for (int i = 0; i < 10; ++i) { -+ for (int j = i + 1; j < 10; ++j) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); -+ g.add(e); -+ } ++ /** ++ * @return the {@link ConfigFilter}s for the current application. ++ */ ++ public static List getConfigFilters() ++ { ++ ClassLoader cl = ClassUtils.getClassLoader(null); ++ List currentConfigFilters = configFilters.get(cl); ++ if (currentConfigFilters == null) ++ { ++ currentConfigFilters = new ArrayList(); ++ configFilters.put(cl, currentConfigFilters); + } + -+ Set test = new HashSet(); -+ Set adjacent = g.getNeighbors(0); -+ adjacent.add(1); ++ return currentConfigFilters; + } + -+ @Test(expected=UnsupportedOperationException.class) public void testAdjacentVerticesRemove() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ for (int i = 0; i < 10; ++i) { -+ for (int j = i + 1; j < 10; ++j) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); -+ g.add(e); -+ } -+ } + /** + * Resolve the property value by going through the list of configured {@link ConfigSource}s + * and use the one with the highest priority. If no configured value has been found that +@@ -133,8 +170,8 @@ public static String getPropertyValue(String key) + if (value != null) + { + LOG.log(Level.FINE, ""found value {0} for key {1} in ConfigSource {2}."", +- new Object[]{value, key, configSource.getConfigName()}); +- return value; ++ new Object[]{filterConfigValueForLog(key, value), key, configSource.getConfigName()}); ++ return filterConfigValue(key, value); + } + + LOG.log(Level.FINER, ""NO value found for key {0} in ConfigSource {1}."", +@@ -275,9 +312,13 @@ public static List getAllPropertyValues(String key) + { + value = configSource.getPropertyValue(key); + +- if (value != null && !result.contains(value)) ++ if (value != null) + { +- result.add(value); ++ value = filterConfigValue(key, value); ++ if (!result.contains(value)) ++ { ++ result.add(value); ++ } + } + } + +@@ -399,5 +440,31 @@ private static String fallbackToDefaultIfEmpty(String key, String value, String + return value; + } + ++ private static String filterConfigValue(String key, String value) ++ { ++ List currentConfigFilters = getConfigFilters(); + -+ Set test = new HashSet(); -+ Set adjacent = g.getNeighbors(0); -+ adjacent.remove(1); -+ } ++ String filteredValue = value; + -+ @Test public void testAdjacentVerticesIterator() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ for (int i = 0; i < 10; ++i) { -+ for (int j = i + 1; j < 10; ++j) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); -+ g.add(e); -+ } ++ for (ConfigFilter filter : currentConfigFilters) ++ { ++ filteredValue = filter.filterValue(key, filteredValue); + } -+ -+ Set test = new HashSet(); -+ Set adjacent = g.getNeighbors(0); -+ Iterator it = adjacent.iterator(); -+ while (it.hasNext()) -+ assertTrue(test.add(it.next())); -+ assertEquals(9, test.size()); ++ return filteredValue; + } + ++ private static String filterConfigValueForLog(String key, String value) ++ { ++ List currentConfigFilters = getConfigFilters(); + -+ @Test(expected=UnsupportedOperationException.class) public void testAdjacentVerticesIteratorRemove() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ for (int i = 0; i < 10; ++i) { -+ for (int j = i + 1; j < 10; ++j) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); -+ g.add(e); -+ } -+ } ++ String logValue = value; + -+ Set test = new HashSet(); -+ Set adjacent = g.getNeighbors(0); -+ Iterator it = adjacent.iterator(); -+ assertTrue(it.hasNext()); -+ it.next(); -+ it.remove(); ++ for (ConfigFilter filter : currentConfigFilters) ++ { ++ logValue = filter.filterValueForLog(key, logValue); ++ } ++ return logValue; + } + -+ /****************************************************************** -+ * -+ * -+ * Subgraph tests -+ * -+ * -+ ******************************************************************/ -+ -+ @Test public void testSubgraph() { -+ DirectedMultigraph g = new DirectedMultigraph(); + + } +diff --git a/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/spi/config/ConfigFilter.java b/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/spi/config/ConfigFilter.java +new file mode 100644 +index 000000000..5f851e2c6 +--- /dev/null ++++ b/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/spi/config/ConfigFilter.java +@@ -0,0 +1,45 @@ ++/* ++ * Licensed to the Apache Software Foundation (ASF) under one ++ * or more contributor license agreements. See the NOTICE file ++ * distributed with this work for additional information ++ * regarding copyright ownership. The ASF licenses this file ++ * to you under the Apache License, Version 2.0 (the ++ * ""License""); you may not use this file except in compliance ++ * with the License. You may obtain a copy of the License at ++ * ++ * http://www.apache.org/licenses/LICENSE-2.0 ++ * ++ * Unless required by applicable law or agreed to in writing, ++ * software distributed under the License is distributed on an ++ * ""AS IS"" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY ++ * KIND, either express or implied. See the License for the ++ * specific language governing permissions and limitations ++ * under the License. ++ */ ++package org.apache.deltaspike.core.spi.config; + -+ // fully connected -+ for (int i = 0; i < 10; i++) { -+ for (int j = i+1; j < 10; ++j) -+ g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); -+ } ++/** ++ * A filter which can be added to the ++ * {@link org.apache.deltaspike.core.api.config.ConfigResolver}. ++ * The filter can be used to decrypt config values or prepare ++ * values for logging. ++ */ ++public interface ConfigFilter ++{ ++ /** ++ * Filter the given configuration value ++ * @param key ++ * @param value ++ * @return the filtered value or the original input String if no filter shall be applied ++ */ ++ String filterValue(String key, String value); + -+ // (n * (n-1)) / 2 -+ assertEquals( (10 * 9) / 2, g.size()); -+ assertEquals(10, g.order()); ++ /** ++ * Filter the given configuration value for usage in logs. ++ * This might be used to mask out passwords, etc. ++ * @param key ++ * @param value ++ * @return the filtered value or the original input String if no filter shall be applied ++ */ ++ String filterValueForLog(String key, String value); ++} +diff --git a/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/ConfigResolverTest.java b/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/ConfigResolverTest.java +index d30e7de52..6aa81dae6 100644 +--- a/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/ConfigResolverTest.java ++++ b/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/ConfigResolverTest.java +@@ -20,6 +20,7 @@ + + import org.apache.deltaspike.core.api.config.ConfigResolver; + import org.apache.deltaspike.core.api.projectstage.ProjectStage; ++import org.apache.deltaspike.core.spi.config.ConfigFilter; + import org.apache.deltaspike.core.util.ProjectStageProducer; + import org.junit.Assert; + import org.junit.Test; +@@ -74,7 +75,8 @@ public void testGetProjectStageAwarePropertyValue() + } + + @Test +- public void testGetPropertyAwarePropertyValue() { ++ public void testGetPropertyAwarePropertyValue() ++ { + ProjectStageProducer.setProjectStage(ProjectStage.UnitTest); + + Assert.assertNull(ConfigResolver.getPropertyAwarePropertyValue(""notexisting"", null)); +@@ -100,4 +102,51 @@ public void testGetPropertyAwarePropertyValue() { + Assert.assertEquals(""DefaultDataSource"", ConfigResolver.getPropertyAwarePropertyValue(""dataSource"", ""dbvendorX"", null)); + Assert.assertEquals(DEFAULT_VALUE, ConfigResolver.getPropertyAwarePropertyValue(""dataSourceX"", ""dbvendorX"", DEFAULT_VALUE)); + } + -+ Set vertices = new LinkedHashSet(); -+ for (int i = 0; i < 5; ++i) -+ vertices.add(i); ++ @Test ++ public void testConfigFilter() ++ { + -+ DirectedMultigraph subgraph = g.subgraph(vertices); -+ assertEquals(5, subgraph.order()); -+ assertEquals( (5 * 4) / 2, subgraph.size()); -+ } ++ ConfigFilter configFilter = new TestConfigFilter(); + -+ @Test public void testSubgraphContainsVertex() { -+ DirectedMultigraph g = new DirectedMultigraph(); ++ Assert.assertEquals(""shouldGetDecrypted: value"", configFilter.filterValue(""somekey.encrypted"", ""value"")); ++ Assert.assertEquals(""**********"", configFilter.filterValueForLog(""somekey.password"", ""value"")); + -+ // fully connected -+ for (int i = 0; i < 10; i++) { -+ for (int j = i+1; j < 10; ++j) -+ g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); -+ } ++ ConfigResolver.addConfigFilter(configFilter); + -+ // (n * (n-1)) / 2 -+ assertEquals( (10 * 9) / 2, g.size()); -+ assertEquals(10, g.order()); ++ Assert.assertEquals(""shouldGetDecrypted: value"", ConfigResolver.getPropertyValue(""testkey4.encrypted"")); ++ Assert.assertEquals(""shouldGetDecrypted: value"", ConfigResolver.getProjectStageAwarePropertyValue(""testkey4.encrypted"")); ++ Assert.assertEquals(""shouldGetDecrypted: value"", ConfigResolver.getProjectStageAwarePropertyValue(""testkey4.encrypted"", null)); ++ Assert.assertEquals(""shouldGetDecrypted: value"", ConfigResolver.getPropertyAwarePropertyValue(""testkey4.encrypted"", ""dbvendor"")); ++ Assert.assertEquals(""shouldGetDecrypted: value"", ConfigResolver.getPropertyAwarePropertyValue(""testkey4.encrypted"", ""dbvendor"", null)); + -+ Set vertices = new LinkedHashSet(); -+ for (int i = 0; i < 5; ++i) -+ vertices.add(i); ++ List allPropertyValues = ConfigResolver.getAllPropertyValues(""testkey4.encrypted""); ++ Assert.assertNotNull(allPropertyValues); ++ Assert.assertEquals(1, allPropertyValues.size()); ++ Assert.assertEquals(""shouldGetDecrypted: value"", allPropertyValues.get(0)); + -+ DirectedMultigraph subgraph = g.subgraph(vertices); -+ assertEquals(5, subgraph.order()); -+ assertEquals( (5 * 4) / 2, subgraph.size()); -+ for (int i = 0; i < 5; ++i) -+ assertTrue(subgraph.contains(i)); -+ for (int i = 5; i < 10; ++i) { -+ assertTrue(g.contains(i)); -+ assertFalse(subgraph.contains(i)); -+ } + } + -+ @Test public void testSubgraphContainsEdge() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ -+ // fully connected -+ for (int i = 0; i < 10; i++) { -+ for (int j = i+1; j < 10; ++j) -+ g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); -+ } -+ -+ // (n * (n-1)) / 2 -+ assertEquals( (10 * 9) / 2, g.size()); -+ assertEquals(10, g.order()); -+ -+ Set vertices = new LinkedHashSet(); -+ for (int i = 0; i < 5; ++i) -+ vertices.add(i); -+ -+ DirectedMultigraph subgraph = g.subgraph(vertices); -+ assertEquals(5, subgraph.order()); -+ assertEquals( (5 * 4) / 2, subgraph.size()); -+ for (int i = 0; i < 5; ++i) { -+ for (int j = i+1; j < 5; ++j) { -+ assertTrue(subgraph.contains(new SimpleDirectedTypedEdge(""type-1"",i, j))); ++ public static class TestConfigFilter implements ConfigFilter ++ { ++ @Override ++ public String filterValue(String key, String value) ++ { ++ if (key.contains(""encrypted"")) ++ { ++ return ""shouldGetDecrypted: "" + value; + } ++ return value; + } + -+ for (int i = 5; i < 10; ++i) { -+ for (int j = i+1; j < 10; ++j) { -+ DirectedTypedEdge e = new SimpleDirectedTypedEdge(""type-1"",i, j); -+ assertTrue(g.contains(e)); -+ assertFalse(subgraph.contains(e)); ++ @Override ++ public String filterValueForLog(String key, String value) ++ { ++ if (key.contains(""password"")) ++ { ++ return ""**********""; + } ++ return value; + } + } + } +diff --git a/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/TestConfigSource.java b/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/TestConfigSource.java +index d0f2c938b..3b073a9b2 100644 +--- a/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/TestConfigSource.java ++++ b/deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/TestConfigSource.java +@@ -62,6 +62,9 @@ public TestConfigSource() + props.put(""dbvendor2.Production"", ""mysql""); + props.put(""dbvendor2"", ""postgresql""); + ++ props.put(""testkey4.encrypted"", ""value""); ++ props.put(""testkey4.password"", ""mysecretvalue""); + -+ @Test public void testSubgraphAddEdge() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ -+ // fully connected -+ for (int i = 0; i < 10; i++) { -+ for (int j = i+1; j < i+2 && j < 10; ++j) -+ assertTrue(g.add(new SimpleDirectedTypedEdge(""type-1"",i, j))); -+ } -+ -+ assertEquals(9, g.size()); -+ assertEquals(10, g.order()); -+ -+ Set vertices = new LinkedHashSet(); -+ for (int i = 0; i < 5; ++i) -+ vertices.add(i); -+ -+ DirectedMultigraph subgraph = g.subgraph(vertices); -+ assertEquals(5, subgraph.order()); -+ assertEquals(4, subgraph.size()); -+ -+ // Add an edge to a new vertex -+ assertTrue(subgraph.add(new SimpleDirectedTypedEdge(""type-1"", 1, 0))); -+ assertEquals(5, subgraph.size()); -+ assertEquals(5, subgraph.order()); -+ assertEquals(10, g.size()); -+ -+ } -+ -+ @Test(expected=UnsupportedOperationException.class) public void testSubgraphAddEdgeNewVertex() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ -+ // fully connected -+ for (int i = 0; i < 10; i++) { -+ for (int j = i+1; j < 10; ++j) -+ g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); -+ } -+ -+ // (n * (n-1)) / 2 -+ assertEquals( (10 * 9) / 2, g.size()); -+ assertEquals(10, g.order()); -+ -+ Set vertices = new LinkedHashSet(); -+ for (int i = 0; i < 5; ++i) -+ vertices.add(i); -+ -+ DirectedMultigraph subgraph = g.subgraph(vertices); -+ assertEquals(5, subgraph.order()); -+ assertEquals( (5 * 4) / 2, subgraph.size()); -+ -+ // Add an edge to a new vertex -+ assertTrue(subgraph.add(new SimpleDirectedTypedEdge(""type-1"",0, 5))); -+ assertEquals( (5 * 4) / 2 + 1, subgraph.size()); -+ assertEquals(6, subgraph.order()); -+ assertEquals(11, g.order()); -+ assertEquals( (9*10)/2 + 1, g.size()); -+ } -+ -+ @Test public void testSubgraphRemoveEdge() { -+ DirectedMultigraph g = new DirectedMultigraph(); -+ -+ // fully connected -+ for (int i = 0; i < 10; i++) { -+ for (int j = i+1; j < 10; ++j) -+ g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); -+ } + } + + @Override" +e98541030c5e0aadfdb194dbb55254f404219600,orientdb,Huge refactoring on GraphDB: - changed class- names in vertex and edge - Optimized memory consumption by removing nested- records - Optimized speed in ORecord.equals() and hashCode(): now avoid field- checks (experimental)--,p,https://github.com/orientechnologies/orientdb,"diff --git a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordAbstract.java +index cd054ea61c5..cc4674f4842 100644 +--- a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordAbstract.java ++++ b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordAbstract.java +@@ -185,6 +185,8 @@ public ORecordAbstract save() { + OSerializationThreadLocal.INSTANCE.get().clear(); + + _database.save(this); ++ ++ OSerializationThreadLocal.INSTANCE.get().clear(); + return this; + } + +diff --git a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordVirtualAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordVirtualAbstract.java +index 91a40883f02..6ea13c242fc 100644 +--- a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordVirtualAbstract.java ++++ b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordVirtualAbstract.java +@@ -15,10 +15,8 @@ + */ + package com.orientechnologies.orient.core.record; + +-import java.util.Collection; + import java.util.LinkedHashMap; + import java.util.Map; +-import java.util.Map.Entry; + + import com.orientechnologies.orient.core.db.record.ODatabaseRecord; + import com.orientechnologies.orient.core.metadata.schema.OType; +@@ -73,22 +71,22 @@ public ORecordSchemaAwareAbstract reset() { + @Override + public int hashCode() { + int result = super.hashCode(); +- +- if (!_recordId.isValid() && _fieldValues != null) +- for (Entry field : _fieldValues.entrySet()) { +- if (field.getKey() != null) +- result += field.getKey().hashCode(); +- +- if (field.getValue() != null) +- if (field.getValue() instanceof ORecord) +- // AVOID TO GET THE HASH-CODE OF THE VALUE TO AVOID STACK OVERFLOW FOR CIRCULAR REFS +- result += 31 * ((ORecord) field.getValue()).getIdentity().hashCode(); +- else if (field.getValue() instanceof Collection) +- // AVOID TO GET THE HASH-CODE OF THE VALUE TO AVOID STACK OVERFLOW FOR CIRCULAR REFS +- result += ((Collection) field.getValue()).size() * 31; +- else +- result += field.getValue().hashCode(); +- } ++ // ++ // if (!_recordId.isValid() && _fieldValues != null) ++ // for (Entry field : _fieldValues.entrySet()) { ++ // if (field.getKey() != null) ++ // result += field.getKey().hashCode(); ++ // ++ // if (field.getValue() != null) ++ // if (field.getValue() instanceof ORecord) ++ // // AVOID TO GET THE HASH-CODE OF THE VALUE TO AVOID STACK OVERFLOW FOR CIRCULAR REFS ++ // result += 31 * ((ORecord) field.getValue()).getIdentity().hashCode(); ++ // else if (field.getValue() instanceof Collection) ++ // // AVOID TO GET THE HASH-CODE OF THE VALUE TO AVOID STACK OVERFLOW FOR CIRCULAR REFS ++ // result += ((Collection) field.getValue()).size() * 31; ++ // else ++ // result += field.getValue().hashCode(); ++ // } + + return result; + } +@@ -99,37 +97,39 @@ public boolean equals(Object obj) { + return false; + + if (!_recordId.isValid()) { +- final ORecordVirtualAbstract other = (ORecordVirtualAbstract) obj; +- +- // NO PERSISTENT OBJECT: COMPARE EACH FIELDS +- if (_fieldValues == null || other._fieldValues == null) +- // CAN'T COMPARE FIELDS: RETURN FALSE +- return false; +- +- if (_fieldValues.size() != other._fieldValues.size()) +- // FIELD SIZES ARE DIFFERENTS +- return false; +- +- String k; +- Object v; +- Object otherV; +- for (Entry field : _fieldValues.entrySet()) { +- k = field.getKey(); +- if (k != null && !other.containsField(k)) +- // FIELD NOT PRESENT IN THE OTHER RECORD +- return false; +- +- v = _fieldValues.get(k); +- otherV = other._fieldValues.get(k); +- if (v == null && otherV == null) +- continue; +- +- if (v == null && otherV != null || otherV == null && v != null) +- return false; +- +- if (!v.equals(otherV)) +- return false; +- } ++ // ++ // final ORecordVirtualAbstract other = (ORecordVirtualAbstract) obj; ++ // ++ // // NO PERSISTENT OBJECT: COMPARE EACH FIELDS ++ // if (_fieldValues == null || other._fieldValues == null) ++ // // CAN'T COMPARE FIELDS: RETURN FALSE ++ // return false; ++ // ++ // if (_fieldValues.size() != other._fieldValues.size()) ++ // // FIELD SIZES ARE DIFFERENTS ++ // return false; ++ // ++ // String k; ++ // Object v; ++ // Object otherV; ++ // for (Entry field : _fieldValues.entrySet()) { ++ // k = field.getKey(); ++ // if (k != null && !other.containsField(k)) ++ // // FIELD NOT PRESENT IN THE OTHER RECORD ++ // return false; ++ // ++ // v = _fieldValues.get(k); ++ // otherV = other._fieldValues.get(k); ++ // if (v == null && otherV == null) ++ // continue; ++ // ++ // if (v == null && otherV != null || otherV == null && v != null) ++ // return false; ++ // ++ // if (!v.equals(otherV)) ++ // return false; ++ // } ++ return false; + } + + return true; +diff --git a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/OSerializationThreadLocal.java b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/OSerializationThreadLocal.java +index 4faafab4991..b37dfb62aaa 100644 +--- a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/OSerializationThreadLocal.java ++++ b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/OSerializationThreadLocal.java +@@ -15,7 +15,7 @@ + */ + package com.orientechnologies.orient.core.serialization.serializer.record; + +-import java.util.HashMap; ++import java.util.IdentityHashMap; + import java.util.Map; + + import com.orientechnologies.orient.core.id.ORecordId; +@@ -26,6 +26,6 @@ public class OSerializationThreadLocal extends ThreadLocal, ORecordId> initialValue() { +- return new HashMap, ORecordId>(); ++ return new IdentityHashMap, ORecordId>(); + } + } +\ No newline at end of file +diff --git a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerSchemaAware2CSV.java b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerSchemaAware2CSV.java +index 3c99ffa202c..d172294974b 100644 +--- a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerSchemaAware2CSV.java ++++ b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerSchemaAware2CSV.java +@@ -202,6 +202,8 @@ protected String toString(ORecordInternal iRecord, final String iFormat, fina + + i++; + } ++ ++ iMarshalledRecords.remove(record); + + return buffer.toString(); + }" +b753b88a432fca3a8947a1ea0743e44f626e23a2,Valadoc,"Add warning & note tags +",a,https://github.com/GNOME/vala/,"diff --git a/icons/Makefile.am b/icons/Makefile.am +index 5869bff058..4859e248bd 100644 +--- a/icons/Makefile.am ++++ b/icons/Makefile.am +@@ -5,6 +5,7 @@ iconsdir = $(datadir)/valadoc/icons + + + dist_icons_DATA = \ ++ tip.png \ + warning.png \ + abstractclass.png \ + abstractmethod.png \ +diff --git a/icons/tip.png b/icons/tip.png +new file mode 100644 +index 0000000000..6ccf512f3d +Binary files /dev/null and b/icons/tip.png differ +diff --git a/src/doclets/gtkdoc/commentconverter.vala b/src/doclets/gtkdoc/commentconverter.vala +index a0c3372ce3..e10b8aa3a5 100755 +--- a/src/doclets/gtkdoc/commentconverter.vala ++++ b/src/doclets/gtkdoc/commentconverter.vala +@@ -172,7 +172,19 @@ public class Gtkdoc.CommentConverter : ContentVisitor { + current_builder.append (""""); + } + } +- + -+ // (n * (n-1)) / 2 -+ assertEquals( (10 * 9) / 2, g.size()); -+ assertEquals(10, g.order()); ++ public override void visit_warning (Warning element) { ++ current_builder.append (""""); ++ element.accept_children (this); ++ current_builder.append (""""); ++ } + -+ Set vertices = new LinkedHashSet(); -+ for (int i = 0; i < 5; ++i) -+ vertices.add(i); ++ public override void visit_note (Note element) { ++ current_builder.append (""""); ++ element.accept_children (this); ++ current_builder.append (""""); ++ } + -+ DirectedMultigraph subgraph = g.subgraph(vertices); -+ assertEquals(5, subgraph.order()); -+ assertEquals( (5 * 4) / 2, subgraph.size()); + public override void visit_page (Page page) { + page.accept_children (this); + } +diff --git a/src/libvaladoc/Makefile.am b/src/libvaladoc/Makefile.am +index 310a3dbd13..fe3115c8e1 100755 +--- a/src/libvaladoc/Makefile.am ++++ b/src/libvaladoc/Makefile.am +@@ -26,11 +26,13 @@ libvaladoc_la_VALASOURCES = \ + moduleloader.vala \ + settings.vala \ + markupwriter.vala \ ++ gtkdocmarkupwriter.vala \ + devhelp-markupwriter.vala \ + ctyperesolver.vala \ + markupsourcelocation.vala \ + markuptokentype.vala \ + markupreader.vala \ ++ gtkdocrenderer.vala \ + documentation/commentscanner.vala \ + documentation/documentation.vala \ + documentation/documentationparser.vala \ +@@ -103,6 +105,8 @@ libvaladoc_la_VALASOURCES = \ + content/listitem.vala \ + content/page.vala \ + content/paragraph.vala \ ++ content/warning.vala \ ++ content/note.vala \ + content/resourcelocator.vala \ + content/run.vala \ + content/sourcecode.vala \ +diff --git a/src/libvaladoc/content/contentfactory.vala b/src/libvaladoc/content/contentfactory.vala +index 55c44f7ac0..19cf41be21 100755 +--- a/src/libvaladoc/content/contentfactory.vala ++++ b/src/libvaladoc/content/contentfactory.vala +@@ -76,6 +76,13 @@ public class Valadoc.Content.ContentFactory : Object { + return (Paragraph) configure (new Paragraph ()); + } + ++ public Warning create_warning () { ++ return (Warning) configure (new Warning ()); ++ } ++ public Note create_note () { ++ return (Note) configure (new Note ()); ++ } + -+ // Remove an existing edge -+ assertTrue(subgraph.remove(new SimpleDirectedTypedEdge(""type-1"",0, 1))); -+ assertEquals( (5 * 4) / 2 - 1, subgraph.size()); -+ assertEquals(5, subgraph.order()); -+ assertEquals(10, g.order()); -+ assertEquals( (9*10)/2 - 1, g.size()); + public Run create_run (Run.Style style) { + return (Run) configure (new Run (style)); + } +diff --git a/src/libvaladoc/content/contentvisitor.vala b/src/libvaladoc/content/contentvisitor.vala +index bacffc3ddf..3eb3a4b3d4 100755 +--- a/src/libvaladoc/content/contentvisitor.vala ++++ b/src/libvaladoc/content/contentvisitor.vala +@@ -52,6 +52,12 @@ public abstract class Valadoc.Content.ContentVisitor : Object { + public virtual void visit_paragraph (Paragraph element) { + } + ++ public virtual void visit_warning (Warning element) { ++ } + -+ // Remove a non-existent edge, which should have no effect even though -+ // the edge is present in the backing graph -+ assertFalse(subgraph.remove(new SimpleDirectedTypedEdge(""type-1"",0, 6))); -+ assertEquals( (5 * 4) / 2 - 1, subgraph.size()); -+ assertEquals(5, subgraph.order()); -+ assertEquals(10, g.order()); -+ assertEquals( (9*10)/2 - 1, g.size()); -+ } ++ public virtual void visit_note (Note element) { ++ } + + public virtual void visit_page (Page element) { + } + +diff --git a/src/libvaladoc/content/note.vala b/src/libvaladoc/content/note.vala +new file mode 100755 +index 0000000000..d2b16c557c +--- /dev/null ++++ b/src/libvaladoc/content/note.vala +@@ -0,0 +1,40 @@ ++/* note.vala ++ * ++ * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Lesser General Public ++ * License as published by the Free Software Foundation; either ++ * version 2.1 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Lesser General Public License for more details. ++ * ++ * You should have received a copy of the GNU Lesser General Public ++ * License along with this library; if not, write to the Free Software ++ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA ++ * ++ * Author: ++ * Didier 'Ptitjes Villevalois ++ */ + -+ /****************************************************************** -+ * -+ * -+ * SubgraphVertexView tests -+ * -+ * -+ ******************************************************************/ ++using Gee; + + -+ @Test(expected=UnsupportedOperationException.class) public void testSubgraphVerticesAdd() { -+ DirectedMultigraph g = new DirectedMultigraph(); ++public class Valadoc.Content.Note : BlockContent, Block { ++ internal Note () { ++ base (); ++ } + -+ // fully connected -+ for (int i = 0; i < 10; i++) { -+ for (int j = i+1; j < 10; ++j) -+ g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); -+ } ++ public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ // Check inline content ++ base.check (api_root, container, reporter, settings); ++ } + -+ // (n * (n-1)) / 2 -+ assertEquals( (10 * 9) / 2, g.size()); -+ assertEquals(10, g.order()); ++ public override void accept (ContentVisitor visitor) { ++ visitor.visit_note (this); ++ } ++} + -+ Set vertices = new LinkedHashSet(); -+ for (int i = 0; i < 5; ++i) -+ vertices.add(i); +diff --git a/src/libvaladoc/content/warning.vala b/src/libvaladoc/content/warning.vala +new file mode 100755 +index 0000000000..e848c43f30 +--- /dev/null ++++ b/src/libvaladoc/content/warning.vala +@@ -0,0 +1,40 @@ ++/* warning.vala ++ * ++ * Copyright (C) 2008-2009 Florian Brosch, Didier Villevalois ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Lesser General Public ++ * License as published by the Free Software Foundation; either ++ * version 2.1 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Lesser General Public License for more details. ++ * ++ * You should have received a copy of the GNU Lesser General Public ++ * License along with this library; if not, write to the Free Software ++ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA ++ * ++ * Author: ++ * Didier 'Ptitjes Villevalois ++ */ + -+ DirectedMultigraph subgraph = g.subgraph(vertices); -+ assertEquals(5, subgraph.order()); -+ assertEquals( (5 * 4) / 2, subgraph.size()); ++using Gee; + -+ Set test = subgraph.vertices(); -+ assertEquals(5, test.size()); + -+ // Add a vertex -+ assertTrue(test.add(5)); -+ assertEquals(6, test.size()); -+ assertEquals(6, subgraph.order()); -+ assertEquals(11, g.order()); -+ assertEquals( (5*4)/2, subgraph.size()); -+ } ++public class Valadoc.Content.Warning : BlockContent, Block { ++ internal Warning () { ++ base (); ++ } + -+ @Test(expected=UnsupportedOperationException.class) public void testSubgraphVerticesRemove() { -+ DirectedMultigraph g = new DirectedMultigraph(); ++ public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { ++ // Check inline content ++ base.check (api_root, container, reporter, settings); ++ } + -+ // fully connected -+ for (int i = 0; i < 10; i++) { -+ for (int j = i+1; j < 10; ++j) -+ g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); -+ } ++ public override void accept (ContentVisitor visitor) { ++ visitor.visit_warning (this); ++ } ++} + -+ // (n * (n-1)) / 2 -+ assertEquals( (10 * 9) / 2, g.size()); -+ assertEquals(10, g.order()); +diff --git a/src/libvaladoc/documentation/documentationparser.vala b/src/libvaladoc/documentation/documentationparser.vala +index b2a96f0252..1b29698dd4 100755 +--- a/src/libvaladoc/documentation/documentationparser.vala ++++ b/src/libvaladoc/documentation/documentationparser.vala +@@ -499,6 +499,50 @@ public class Valadoc.DocumentationParser : Object, ResourceLocator { + } + }); + ++ Rule warning = ++ Rule.seq ({ ++ TokenType.str (""Warning:""), ++ optional_invisible_spaces, ++ Rule.many ({ ++ Rule.seq({optional_invisible_spaces, run}), ++ TokenType.EOL.action (() => { add_content_space (); }) ++ }) ++ }) ++ .set_name (""Warning"") ++ .set_start (() => { push (_factory.create_paragraph ()); }) ++ .set_reduce (() => { ++ var head = _factory.create_warning (); ++ head.content.add ((Paragraph) pop ()); ++ ((BlockContent) peek ()).content.add (head); ++ ++ Text last_element = head.content.last () as Text; ++ if (last_element != null) { ++ last_element.content._chomp (); ++ } ++ }); + -+ Set vertices = new LinkedHashSet(); -+ for (int i = 0; i < 5; ++i) -+ vertices.add(i); ++ Rule note = ++ Rule.seq ({ ++ TokenType.str (""Note:""), ++ optional_invisible_spaces, ++ Rule.many ({ ++ Rule.seq({optional_invisible_spaces, run}), ++ TokenType.EOL.action (() => { add_content_space (); }) ++ }) ++ }) ++ .set_name (""Note"") ++ .set_start (() => { push (_factory.create_paragraph ()); }) ++ .set_reduce (() => { ++ var head = _factory.create_note (); ++ head.content.add ((Paragraph) pop ()); ++ ((BlockContent) peek ()).content.add (head); ++ ++ Text last_element = head.content.last () as Text; ++ if (last_element != null) { ++ last_element.content._chomp (); ++ } ++ }); + -+ DirectedMultigraph subgraph = g.subgraph(vertices); -+ assertEquals(5, subgraph.order()); -+ assertEquals( (5 * 4) / 2, subgraph.size()); + Rule indented_item = + Rule.seq ({ + Rule.many ({ +@@ -667,6 +711,8 @@ public class Valadoc.DocumentationParser : Object, ResourceLocator { + indented_blocks, + table, + headline, ++ warning, ++ note, + paragraph + }) + .set_name (""Blocks""); +diff --git a/src/libvaladoc/html/htmlmarkupwriter.vala b/src/libvaladoc/html/htmlmarkupwriter.vala +index 7599d18510..13e6cd0ebd 100755 +--- a/src/libvaladoc/html/htmlmarkupwriter.vala ++++ b/src/libvaladoc/html/htmlmarkupwriter.vala +@@ -24,8 +24,14 @@ using GLib; + using Valadoc.Content; + + public class Valadoc.Html.MarkupWriter : Valadoc.MarkupWriter { ++ private unowned FileStream stream; ++ + public MarkupWriter (FileStream stream, bool xml_declaration = true) { +- base (stream, xml_declaration); ++ // avoid broken implicit copy ++ unowned FileStream _stream = stream; ++ ++ base ((str) => { _stream.printf (str); }, xml_declaration); ++ this.stream = stream; + } + + public MarkupWriter add_usemap (Charts.Chart chart) { +diff --git a/src/libvaladoc/html/htmlrenderer.vala b/src/libvaladoc/html/htmlrenderer.vala +index 7c79047d52..c6148ae6db 100755 +--- a/src/libvaladoc/html/htmlrenderer.vala ++++ b/src/libvaladoc/html/htmlrenderer.vala +@@ -341,6 +341,23 @@ public class Valadoc.Html.HtmlRenderer : ContentRenderer { + writer.end_tag (""p""); + } + ++ private void visit_notification_block (BlockContent element, string headline) { ++ writer.start_tag (""div"", {""class"", ""main_notification_block""}); ++ writer.start_tag (""span"", {""class"", ""main_block_headline""}).text (headline).end_tag (""span"").text ("" ""); ++ writer.start_tag (""span"", {""class"", ""main_block_content""}); ++ element.accept_children (this); ++ writer.end_tag (""span""); ++ writer.end_tag (""div""); ++ } ++ ++ public override void visit_warning (Warning element) { ++ visit_notification_block (element, ""Warning:""); ++ } ++ ++ public override void visit_note (Note element) { ++ visit_notification_block (element, ""Note:""); ++ } ++ + public override void visit_run (Run element) { + string tag = null; + string css_type = null;" +296212eab186b860fd9494db9ed238b341fc2975,kotlin,Merge two JetTypeMapper-mapToCallableMethod- methods--,p,https://github.com/JetBrains/kotlin,"diff --git a/compiler/backend/src/org/jetbrains/kotlin/codegen/ExpressionCodegen.java b/compiler/backend/src/org/jetbrains/kotlin/codegen/ExpressionCodegen.java +index 2be9032ebddaa..896f430d59004 100644 +--- a/compiler/backend/src/org/jetbrains/kotlin/codegen/ExpressionCodegen.java ++++ b/compiler/backend/src/org/jetbrains/kotlin/codegen/ExpressionCodegen.java +@@ -1472,7 +1472,7 @@ public Unit invoke(InstructionAdapter v) { + ConstructorDescriptor constructorToCall = SamCodegenUtil.resolveSamAdapter(superConstructor); + List superValueParameters = superConstructor.getValueParameters(); + int params = superValueParameters.size(); +- List superMappedTypes = typeMapper.mapToCallableMethod(constructorToCall).getValueParameterTypes(); ++ List superMappedTypes = typeMapper.mapToCallableMethod(constructorToCall, false).getValueParameterTypes(); + assert superMappedTypes.size() >= params : String + .format(""Incorrect number of mapped parameters vs arguments: %d < %d for %s"", + superMappedTypes.size(), params, classDescriptor); +@@ -3414,7 +3414,7 @@ public Unit invoke(InstructionAdapter v) { + pushClosureOnStack(constructor.getContainingDeclaration(), dispatchReceiver == null, defaultCallGenerator); + + constructor = SamCodegenUtil.resolveSamAdapter(constructor); +- CallableMethod method = typeMapper.mapToCallableMethod(constructor); ++ CallableMethod method = typeMapper.mapToCallableMethod(constructor, false); + invokeMethodWithArguments(method, resolvedCall, StackValue.none()); + + return Unit.INSTANCE$; +diff --git a/compiler/backend/src/org/jetbrains/kotlin/codegen/FunctionCodegen.java b/compiler/backend/src/org/jetbrains/kotlin/codegen/FunctionCodegen.java +index 43c94286739ef..b1c257a942940 100644 +--- a/compiler/backend/src/org/jetbrains/kotlin/codegen/FunctionCodegen.java ++++ b/compiler/backend/src/org/jetbrains/kotlin/codegen/FunctionCodegen.java +@@ -664,13 +664,7 @@ public static void generateDefaultImplBody( + generator.putValueIfNeeded(parameterDescriptor, type, StackValue.local(parameterIndex, type)); + } + +- CallableMethod method; +- if (functionDescriptor instanceof ConstructorDescriptor) { +- method = state.getTypeMapper().mapToCallableMethod((ConstructorDescriptor) functionDescriptor); +- } +- else { +- method = state.getTypeMapper().mapToCallableMethod(functionDescriptor, false); +- } ++ CallableMethod method = state.getTypeMapper().mapToCallableMethod(functionDescriptor, false); + + generator.genCallWithoutAssertions(method, codegen); + +diff --git a/compiler/backend/src/org/jetbrains/kotlin/codegen/ImplementationBodyCodegen.java b/compiler/backend/src/org/jetbrains/kotlin/codegen/ImplementationBodyCodegen.java +index dbd04bd4121ca..da405f940ff91 100644 +--- a/compiler/backend/src/org/jetbrains/kotlin/codegen/ImplementationBodyCodegen.java ++++ b/compiler/backend/src/org/jetbrains/kotlin/codegen/ImplementationBodyCodegen.java +@@ -937,17 +937,16 @@ private void generateMethodCallTo( + @Nullable FunctionDescriptor accessorDescriptor, + @NotNull InstructionAdapter iv + ) { +- boolean isConstructor = functionDescriptor instanceof ConstructorDescriptor; +- boolean accessorIsConstructor = accessorDescriptor instanceof AccessorForConstructorDescriptor; +- +- boolean superCall = accessorDescriptor instanceof AccessorForCallableDescriptor && +- ((AccessorForCallableDescriptor) accessorDescriptor).getSuperCallExpression() != null; +- CallableMethod callableMethod = isConstructor ? +- typeMapper.mapToCallableMethod((ConstructorDescriptor) functionDescriptor) : +- typeMapper.mapToCallableMethod(functionDescriptor, superCall); ++ CallableMethod callableMethod = typeMapper.mapToCallableMethod( ++ functionDescriptor, ++ accessorDescriptor instanceof AccessorForCallableDescriptor && ++ ((AccessorForCallableDescriptor) accessorDescriptor).getSuperCallExpression() != null ++ ); + + int reg = 1; +- if (isConstructor && !accessorIsConstructor) { ++ ++ boolean accessorIsConstructor = accessorDescriptor instanceof AccessorForConstructorDescriptor; ++ if (!accessorIsConstructor && functionDescriptor instanceof ConstructorDescriptor) { + iv.anew(callableMethod.getOwner()); + iv.dup(); + reg = 0; +@@ -1496,8 +1495,8 @@ private void generateDelegatorToConstructorCall( + iv.load(0, OBJECT_TYPE); + ConstructorDescriptor delegateConstructor = SamCodegenUtil.resolveSamAdapter(codegen.getConstructorDescriptor(delegationConstructorCall)); + +- CallableMethod delegateConstructorCallable = typeMapper.mapToCallableMethod(delegateConstructor); +- CallableMethod callable = typeMapper.mapToCallableMethod(constructorDescriptor); ++ CallableMethod delegateConstructorCallable = typeMapper.mapToCallableMethod(delegateConstructor, false); ++ CallableMethod callable = typeMapper.mapToCallableMethod(constructorDescriptor, false); + + List delegatingParameters = delegateConstructorCallable.getValueParameters(); + List parameters = callable.getValueParameters(); +@@ -1711,7 +1710,7 @@ private void initializeEnumConstant(@NotNull List enumEntries, int + if (delegationSpecifiers.size() == 1 && !enumEntryNeedSubclass(bindingContext, enumEntry)) { + ResolvedCall resolvedCall = CallUtilPackage.getResolvedCallWithAssert(delegationSpecifiers.get(0), bindingContext); + +- CallableMethod method = typeMapper.mapToCallableMethod((ConstructorDescriptor) resolvedCall.getResultingDescriptor()); ++ CallableMethod method = typeMapper.mapToCallableMethod((ConstructorDescriptor) resolvedCall.getResultingDescriptor(), false); + + codegen.invokeMethodWithArguments(method, resolvedCall, StackValue.none()); + } +diff --git a/compiler/backend/src/org/jetbrains/kotlin/codegen/state/JetTypeMapper.java b/compiler/backend/src/org/jetbrains/kotlin/codegen/state/JetTypeMapper.java +index dc57b4ac65db6..bf1c4772a95d8 100644 +--- a/compiler/backend/src/org/jetbrains/kotlin/codegen/state/JetTypeMapper.java ++++ b/compiler/backend/src/org/jetbrains/kotlin/codegen/state/JetTypeMapper.java +@@ -593,6 +593,12 @@ private Type mapKnownAsmType( + + @NotNull + public CallableMethod mapToCallableMethod(@NotNull FunctionDescriptor descriptor, boolean superCall) { ++ if (descriptor instanceof ConstructorDescriptor) { ++ JvmMethodSignature method = mapSignature(descriptor); ++ Type owner = mapClass(((ConstructorDescriptor) descriptor).getContainingDeclaration()); ++ return new CallableMethod(owner, owner, owner, method, INVOKESPECIAL, null, null, null); ++ } ++ + DeclarationDescriptor functionParent = descriptor.getOriginal().getContainingDeclaration(); + + FunctionDescriptor functionDescriptor = unwrapFakeOverride(descriptor.getOriginal()); +@@ -1113,17 +1119,6 @@ public JvmMethodSignature mapScriptSignature(@NotNull ScriptDescriptor script, @ + return sw.makeJvmMethodSignature(""""); + } + +- @NotNull +- public CallableMethod mapToCallableMethod(@NotNull ConstructorDescriptor descriptor) { +- JvmMethodSignature method = mapSignature(descriptor); +- ClassDescriptor container = descriptor.getContainingDeclaration(); +- Type owner = mapClass(container); +- if (owner.getSort() != Type.OBJECT) { +- throw new IllegalStateException(""type must have been mapped to object: "" + container.getDefaultType() + "", actual: "" + owner); +- } +- return new CallableMethod(owner, owner, owner, method, INVOKESPECIAL, null, null, null); +- } +- + public Type getSharedVarType(DeclarationDescriptor descriptor) { + if (descriptor instanceof SimpleFunctionDescriptor && descriptor.getContainingDeclaration() instanceof FunctionDescriptor) { + return asmTypeForAnonymousClass(bindingContext, (FunctionDescriptor) descriptor);" +77f1f5cd336bbd3c7b9d548a4916084bc1e56dc3,orientdb,Console: fixed jdk6 problem in more elegant way- (thanks to Andrey's suggestion)--,p,https://github.com/orientechnologies/orientdb,"diff --git a/commons/src/main/java/com/orientechnologies/common/console/ODFACommandStream.java b/commons/src/main/java/com/orientechnologies/common/console/ODFACommandStream.java +index aaeb27f7b7d..804182cb751 100755 +--- a/commons/src/main/java/com/orientechnologies/common/console/ODFACommandStream.java ++++ b/commons/src/main/java/com/orientechnologies/common/console/ODFACommandStream.java +@@ -107,11 +107,10 @@ public String nextCommand() { + result = partialResult.toString(); + } + } else { +- try { +- result = buffer.subSequence(start, end + 1).toString(); +- } catch (NoSuchMethodError e) { +- result = buffer.toString().substring(start, end + 1); +- } ++ // DON'T PUT THIS ON ONE LINE ONLY BECAUSE WITH JDK6 subSequence() RETURNS A CHAR CharSequence while JDK7+ RETURNS ++ // CharBuffer ++ final CharSequence cs = buffer.subSequence(start, end + 1); ++ result = cs.toString(); + } + + buffer.position(buffer.position() + position);" +51b603c850765b2734549a1962581ebe5f5f2125,restlet-framework-java,JAX-RS extension continued: - Added an- ObjectFactory to the JAX-RS extension for customized root resource class and- provider instantiation. Contributed by Bruno Dumon.--,a,https://github.com/restlet/restlet-framework-java,"diff --git a/build/tmpl/text/changes.txt b/build/tmpl/text/changes.txt +index 050ac19537..ee28f2d5a0 100644 +--- a/build/tmpl/text/changes.txt ++++ b/build/tmpl/text/changes.txt +@@ -66,6 +66,8 @@ Changes log + parsing and formatting into three new classes: + MessageRepresentation, MessagesRepresentation and + RepresentationMessage. Suggested by Matthieu Hug. ++ - Added an ObjectFactory to the JAX-RS extension for customized root ++ resource class and provider instantiation. Contributed by Bruno Dumon. + - Misc + - Updated Grizzly to 1.7.3. + - Improved logging consistency. +diff --git a/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/JaxRsApplication.java b/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/JaxRsApplication.java +index d3c2a84bc5..0a304ee739 100644 +--- a/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/JaxRsApplication.java ++++ b/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/JaxRsApplication.java +@@ -73,7 +73,10 @@ + */ + public class JaxRsApplication extends Application { + +- /** the {@link Guard} to use. May be null. */ ++ /** Indicates if any {@link ApplicationConfig} is attached yet */ ++ private volatile boolean appConfigAttached = false; + -+ Set test = subgraph.vertices(); -+ assertEquals(5, test.size()); ++ /** The {@link Guard} to use. May be null. */ + private volatile Guard guard; + + /** The {@link JaxRsRouter} to use. */ +@@ -82,9 +85,6 @@ public class JaxRsApplication extends Application { + /** Indicates, if an {@link HtmlPreferer} should be used or not. */ + private volatile boolean preferHtml = true; + +- /** indicates if any {@link ApplicationConfig} is attached yet */ +- private volatile boolean appConfigAttached = false; +- + /** + * Creates an new JaxRsApplication. You should typically use one of the + * other constructors, see {@link Restlet#Restlet()}. +@@ -248,6 +248,17 @@ public Guard getGuard() { + return this.guard; + } + ++ /** ++ * Returns the ObjectFactory for root resource class and provider ++ * instantiation, if given. ++ * ++ * @return the ObjectFactory for root resource class and provider ++ * instantiation, if given. ++ */ ++ public ObjectFactory getObjectFactory() { ++ return this.jaxRsRouter.getObjectFactory(); ++ } + -+ // Add a vertex -+ assertTrue(test.remove(0)); -+ assertEquals(4, test.size()); -+ assertEquals(4, subgraph.order()); -+ assertEquals(9, g.order()); -+ assertEquals( (4*3)/2, subgraph.size()); + /** + * Returns the current RoleChecker + * +@@ -337,6 +348,18 @@ public void setGuard(Guard guard) { + this.guard = guard; + } + ++ /** ++ * Sets the ObjectFactory for root resource class and provider ++ * instantiation. ++ * ++ * @param objectFactory ++ * the ObjectFactory for root resource class and provider ++ * instantiation. ++ */ ++ public void setObjectFactory(ObjectFactory objectFactory) { ++ this.jaxRsRouter.setObjectFactory(objectFactory); + } + -+ @Test(expected=UnsupportedOperationException.class) public void testSubgraphVerticesIteratorRemove() { -+ DirectedMultigraph g = new DirectedMultigraph(); + /** + * Some browsers (e.g. Internet Explorer 7.0 and Firefox 2.0) sends as + * accepted media type XML with a higher quality than HTML. The consequence +diff --git a/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/JaxRsRouter.java b/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/JaxRsRouter.java +index 44ce111066..e0249faad5 100644 +--- a/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/JaxRsRouter.java ++++ b/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/JaxRsRouter.java +@@ -166,6 +166,8 @@ public class JaxRsRouter extends Restlet { + @SuppressWarnings(""unchecked"") + private final ThreadLocalizedContext tlContext = new ThreadLocalizedContext(); + ++ private volatile ObjectFactory objectFactory; + -+ // fully connected -+ for (int i = 0; i < 10; i++) { -+ for (int j = i+1; j < 10; ++j) -+ g.add(new SimpleDirectedTypedEdge(""type-1"",i, j)); -+ } + /** + * Creates a new JaxRsRouter with the given Context. Only the default + * providers are loaded. If a resource class later wants to check if a user +@@ -330,8 +332,9 @@ private boolean addProvider(Class jaxRsProviderClass, + } + Provider provider; + try { +- provider = new Provider(jaxRsProviderClass, tlContext, +- this.entityProviders, contextResolvers, getLogger()); ++ provider = new Provider(jaxRsProviderClass, objectFactory, ++ tlContext, this.entityProviders, contextResolvers, ++ getLogger()); + } catch (InstantiateException e) { + String msg = ""Ignore provider "" + jaxRsProviderClass.getName() + + ""Could not instantiate the Provider, class "" +@@ -507,7 +510,7 @@ private ResourceObject instantiateRrc(RootResourceClass rrc) + throws WebApplicationException, RequestHandledException { + ResourceObject o; + try { +- o = rrc.createInstance(); ++ o = rrc.createInstance(this.objectFactory); + } catch (WebApplicationException e) { + throw e; + } catch (RuntimeException e) { +@@ -1078,4 +1081,27 @@ public Collection getRootUris() { + uris.add(rrc.getPathRegExp().getPathPattern()); + return Collections.unmodifiableCollection(uris); + } + -+ // (n * (n-1)) / 2 -+ assertEquals( (10 * 9) / 2, g.size()); -+ assertEquals(10, g.order()); ++ /** ++ * Returns the ObjectFactory for root resource class and provider ++ * instantiation, if given. ++ * ++ * @return the ObjectFactory for root resource class and provider ++ * instantiation, if given. ++ */ ++ public ObjectFactory getObjectFactory() { ++ return this.objectFactory; ++ } + -+ Set vertices = new LinkedHashSet(); -+ for (int i = 0; i < 5; ++i) -+ vertices.add(i); ++ /** ++ * Sets the ObjectFactory for root resource class and provider ++ * instantiation. ++ * ++ * @param objectFactory ++ * the ObjectFactory for root resource class and provider ++ * instantiation. ++ */ ++ public void setObjectFactory(ObjectFactory objectFactory) { ++ this.objectFactory = objectFactory; ++ } + } +\ No newline at end of file +diff --git a/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/ObjectFactory.java b/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/ObjectFactory.java +new file mode 100644 +index 0000000000..dc4e8134a4 +--- /dev/null ++++ b/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/ObjectFactory.java +@@ -0,0 +1,53 @@ ++/* ++ * Copyright 2005-2008 Noelios Consulting. ++ * ++ * The contents of this file are subject to the terms of the Common Development ++ * and Distribution License (the ""License""). You may not use this file except in ++ * compliance with the License. ++ * ++ * You can obtain a copy of the license at ++ * http://www.opensource.org/licenses/cddl1.txt See the License for the specific ++ * language governing permissions and limitations under the License. ++ * ++ * When distributing Covered Code, include this CDDL HEADER in each file and ++ * include the License file at http://www.opensource.org/licenses/cddl1.txt If ++ * applicable, add the following below this CDDL HEADER, with the fields ++ * enclosed by brackets ""[]"" replaced with your own identifying information: ++ * Portions Copyright [yyyy] [name of copyright owner] ++ */ ++package org.restlet.ext.jaxrs; + -+ DirectedMultigraph subgraph = g.subgraph(vertices); -+ assertEquals(5, subgraph.order()); -+ assertEquals( (5 * 4) / 2, subgraph.size()); ++import org.restlet.ext.jaxrs.internal.exceptions.InstantiateException; + -+ Set test = subgraph.vertices(); -+ assertEquals(5, test.size()); -+ Iterator it = test.iterator(); -+ assertTrue(it.hasNext()); -+ // Remove the first vertex returned -+ it.next(); -+ it.remove(); -+ -+ assertEquals(4, test.size()); -+ assertEquals(4, subgraph.order()); -+ assertEquals(9, g.order()); -+ assertEquals( (4*3)/2, subgraph.size()); -+ } ++/** ++ *

++ * Implement this interface to instantiate JAX-RS root resource classes and ++ * providers yourself and register it by ++ * {@link JaxRsApplication#setObjectFactory(ObjectFactory)}. ++ *

++ * ++ *

++ * When using a ObjectFactory, no JAX-RS constructor dependency injection will ++ * be performed, but instance variable and bean setter injection will still be ++ * done. ++ *

++ * ++ * @author Bruno Dumon ++ * @see JaxRsApplication#setObjectFactory(ObjectFactory) ++ * @see JaxRsRouter#setObjectFactory(ObjectFactory) ++ */ ++public interface ObjectFactory { ++ /** ++ * Creates an instance of the given class.
++ * If the concrete instance could not instantiate the given class, it could ++ * return null. Than the constructor specified by the JAX-RS specification ++ * (section 4.2) is used. ++ * ++ * @param ++ * @param jaxRsClass ++ * the root resource class or provider class. ++ * @return ++ * @throws InstantiateException ++ */ ++ public T getInstance(Class jaxRsClass) throws InstantiateException; ++} +\ No newline at end of file +diff --git a/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/wrappers/RootResourceClass.java b/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/wrappers/RootResourceClass.java +index 2957fc6c68..08dd0ae0dc 100644 +--- a/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/wrappers/RootResourceClass.java ++++ b/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/wrappers/RootResourceClass.java +@@ -28,6 +28,7 @@ + import javax.ws.rs.ext.ContextResolver; + + import org.restlet.ext.jaxrs.JaxRsRouter; ++import org.restlet.ext.jaxrs.ObjectFactory; + import org.restlet.ext.jaxrs.internal.core.ThreadLocalizedContext; + import org.restlet.ext.jaxrs.internal.exceptions.ConvertRepresentationException; + import org.restlet.ext.jaxrs.internal.exceptions.IllegalPathOnClassException; +@@ -122,22 +123,28 @@ private static void checkClassForPathAnnot(Class jaxRsClass, + /** + * Creates an instance of the root resource class. + * ++ * @param objectFactory ++ * object responsible for instantiating the root resource ++ * class. Optional, thus can be null. + * @return + * @throws InvocationTargetException + * @throws InstantiateException + * @throws MissingAnnotationException + * @throws WebApplicationException + */ +- public ResourceObject createInstance() throws InstantiateException, +- InvocationTargetException { +- Constructor constructor = this.constructor; +- Object instance; +- try { +- instance = WrapperUtil.createInstance(constructor, +- constructorParameters.get()); +- } catch (ConvertRepresentationException e) { +- // is not possible +- throw new ImplementationException(""Must not be possible"", e); ++ public ResourceObject createInstance(ObjectFactory objectFactory) ++ throws InstantiateException, InvocationTargetException { ++ Object instance = null; ++ if (objectFactory != null) ++ instance = objectFactory.getInstance(jaxRsClass); ++ if (instance == null) { ++ try { ++ Object[] args = constructorParameters.get(); ++ instance = WrapperUtil.createInstance(constructor, args); ++ } catch (ConvertRepresentationException e) { ++ // is not possible ++ throw new ImplementationException(""Must not be possible"", e); ++ } + } + ResourceObject rootResourceObject = new ResourceObject(instance, this); + try { +diff --git a/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/wrappers/provider/Provider.java b/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/wrappers/provider/Provider.java +index 1fa634f510..b42e36bc61 100644 +--- a/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/wrappers/provider/Provider.java ++++ b/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/wrappers/provider/Provider.java +@@ -40,6 +40,7 @@ + import javax.ws.rs.ext.MessageBodyWorkers; + + import org.restlet.data.MediaType; ++import org.restlet.ext.jaxrs.ObjectFactory; + import org.restlet.ext.jaxrs.internal.core.CallContext; + import org.restlet.ext.jaxrs.internal.core.ThreadLocalizedContext; + import org.restlet.ext.jaxrs.internal.exceptions.ConvertCookieParamException; +@@ -101,6 +102,9 @@ public class Provider implements MessageBodyReader, MessageBodyWriter, + * + * @param jaxRsProviderClass + * the JAX-RS provider class. ++ * @param objectFactory ++ * The object factory is responsible for the provider ++ * instantiation, if given. + * @param tlContext + * The tread local wrapped call context + * @param mbWorkers +@@ -124,7 +128,7 @@ public class Provider implements MessageBodyReader, MessageBodyWriter, + * @see javax.ws.rs.ext.ContextResolver + */ + @SuppressWarnings(""unchecked"") +- public Provider(Class jaxRsProviderClass, ++ public Provider(Class jaxRsProviderClass, ObjectFactory objectFactory, + ThreadLocalizedContext tlContext, EntityProviders mbWorkers, + Collection> allResolvers, Logger logger) + throws IllegalArgumentException, InvocationTargetException, +@@ -134,10 +138,15 @@ public Provider(Class jaxRsProviderClass, + throw new IllegalArgumentException( + ""The JAX-RS provider class must not be null""); + Util.checkClassConcrete(jaxRsProviderClass, ""provider""); +- Constructor providerConstructor = WrapperUtil.findJaxRsConstructor( +- jaxRsProviderClass, ""provider""); +- this.jaxRsProvider = createInstance(providerConstructor, +- jaxRsProviderClass, tlContext, mbWorkers, allResolvers, logger); ++ if (objectFactory != null) ++ this.jaxRsProvider = objectFactory.getInstance(jaxRsProviderClass); ++ if (this.jaxRsProvider == null) { ++ Constructor providerConstructor = WrapperUtil ++ .findJaxRsConstructor(jaxRsProviderClass, ""provider""); ++ this.jaxRsProvider = createInstance(providerConstructor, ++ jaxRsProviderClass, tlContext, mbWorkers, allResolvers, ++ logger); ++ } + boolean isProvider = false; + if (jaxRsProvider instanceof javax.ws.rs.ext.MessageBodyWriter) { + this.writer = (javax.ws.rs.ext.MessageBodyWriter) jaxRsProvider;" +e54358472c94d63c66ad607be256f94378e8ff16,orientdb,Issue -2900,c,https://github.com/orientechnologies/orientdb,"diff --git a/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseBinary.java b/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseBinary.java +deleted file mode 100644 +index 2ec9a2144fb..00000000000 +--- a/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseBinary.java ++++ /dev/null +@@ -1,33 +0,0 @@ +-/* +- * +- * * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com) +- * * +- * * Licensed under the Apache License, Version 2.0 (the ""License""); +- * * you may not use this file except in compliance with the License. +- * * You may obtain a copy of the License at +- * * +- * * http://www.apache.org/licenses/LICENSE-2.0 +- * * +- * * Unless required by applicable law or agreed to in writing, software +- * * distributed under the License is distributed on an ""AS IS"" BASIS, +- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +- * * See the License for the specific language governing permissions and +- * * limitations under the License. +- * * +- * * For more information: http://www.orientechnologies.com +- * +- */ +-package com.orientechnologies.orient.core.db.record; +- +-import com.orientechnologies.orient.core.record.impl.ORecordBytes; +- +-/** +- * Binary specialization of transactional database. +- * +- */ +-public class ODatabaseBinary extends ODatabaseRecordTx { +- +- public ODatabaseBinary(String iURL) { +- super(iURL, ORecordBytes.RECORD_TYPE); +- } +-} +diff --git a/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseFlat.java b/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseFlat.java +deleted file mode 100755 +index 09bc14dfc99..00000000000 +--- a/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseFlat.java ++++ /dev/null +@@ -1,54 +0,0 @@ +-/* +- * +- * * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com) +- * * +- * * Licensed under the Apache License, Version 2.0 (the ""License""); +- * * you may not use this file except in compliance with the License. +- * * You may obtain a copy of the License at +- * * +- * * http://www.apache.org/licenses/LICENSE-2.0 +- * * +- * * Unless required by applicable law or agreed to in writing, software +- * * distributed under the License is distributed on an ""AS IS"" BASIS, +- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +- * * See the License for the specific language governing permissions and +- * * limitations under the License. +- * * +- * * For more information: http://www.orientechnologies.com +- * +- */ +-package com.orientechnologies.orient.core.db.record; +- +-import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; +-import com.orientechnologies.orient.core.iterator.ORecordIteratorCluster; +-import com.orientechnologies.orient.core.record.impl.ORecordFlat; +- +-/** +- * Delegates all the CRUD operations to the current transaction. +- * +- */ +-public class ODatabaseFlat extends ODatabaseRecordTx { +- +- public ODatabaseFlat(String iURL) { +- super(iURL, ORecordFlat.RECORD_TYPE); +- serializer = ODatabaseDocumentTx.getDefaultSerializer(); +- } +- +- @SuppressWarnings(""unchecked"") +- @Override +- public ORecordIteratorCluster browseCluster(final String iClusterName) { +- return super.browseCluster(iClusterName, ORecordFlat.class); +- } +- +- @Override +- public ORecordIteratorCluster browseCluster(String iClusterName, long startClusterPosition, +- long endClusterPosition, boolean loadTombstones) { +- return super.browseCluster(iClusterName, ORecordFlat.class, startClusterPosition, endClusterPosition, loadTombstones); +- } +- +- @SuppressWarnings(""unchecked"") +- @Override +- public ORecordFlat newInstance() { +- return new ORecordFlat(); +- } +-} +diff --git a/core/src/main/java/com/orientechnologies/orient/core/record/impl/ORecordFlat.java b/core/src/main/java/com/orientechnologies/orient/core/record/impl/ORecordFlat.java +index 6db8b2e689b..b4701b13e49 100644 +--- a/core/src/main/java/com/orientechnologies/orient/core/record/impl/ORecordFlat.java ++++ b/core/src/main/java/com/orientechnologies/orient/core/record/impl/ORecordFlat.java +@@ -20,7 +20,7 @@ + package com.orientechnologies.orient.core.record.impl; + + import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal; +-import com.orientechnologies.orient.core.db.record.ODatabaseFlat; ++import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; + import com.orientechnologies.orient.core.db.record.ODatabaseRecord; + import com.orientechnologies.orient.core.id.ORID; + import com.orientechnologies.orient.core.id.ORecordId; +@@ -39,7 +39,7 @@ public class ORecordFlat extends ORecordAbstract implements ORecordStringable { + public static final byte RECORD_TYPE = 'f'; + protected String value; + +- public ORecordFlat(ODatabaseFlat iDatabase) { ++ public ORecordFlat(ODatabaseDocumentTx iDatabase) { + this(); + ODatabaseRecordThreadLocal.INSTANCE.set(iDatabase); + } +diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/CRUDFlatPhysicalTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/CRUDFlatPhysicalTest.java +index 4bd522343d3..7e413faab08 100644 +--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/CRUDFlatPhysicalTest.java ++++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/CRUDFlatPhysicalTest.java +@@ -18,18 +18,18 @@ + import java.util.HashSet; + import java.util.Set; + +-import com.orientechnologies.orient.core.storage.OStorage; ++import com.orientechnologies.orient.core.db.record.ODatabaseRecordAbstract; ++import com.orientechnologies.orient.core.iterator.ORecordIteratorCluster; + import org.testng.Assert; + import org.testng.annotations.BeforeClass; + import org.testng.annotations.Optional; + import org.testng.annotations.Parameters; + import org.testng.annotations.Test; + +-import com.orientechnologies.orient.core.db.record.ODatabaseFlat; + import com.orientechnologies.orient.core.record.impl.ORecordFlat; + + @Test(groups = { ""crud"", ""record-csv"" }, sequential = true) +-public class CRUDFlatPhysicalTest extends FlatDBBaseTest { ++public class CRUDFlatPhysicalTest extends DocumentDBBaseTest { + private static final String CLUSTER_NAME = ""binary""; + protected static final int TOT_RECORDS = 100; + +@@ -41,16 +41,16 @@ public CRUDFlatPhysicalTest(@Optional String url) { + super(url); + } + +- @BeforeClass +- @Override +- public void beforeClass() throws Exception { +- super.beforeClass(); +- record = database.newInstance(); +- } ++ @BeforeClass ++ @Override ++ public void beforeClass() throws Exception { ++ super.beforeClass(); ++ record = new ORecordFlat(); ++ } + +- public void createRaw() { +- if (database.getClusterIdByName(CLUSTER_NAME) < 0) +- database.addCluster(CLUSTER_NAME); ++ public void createRaw() { ++ if (database.getClusterIdByName(CLUSTER_NAME) < 0) ++ database.addCluster(CLUSTER_NAME); + + startRecordNumber = database.countClusterElements(CLUSTER_NAME); + +@@ -73,7 +73,8 @@ public void readRawWithExpressiveForwardIterator() { + for (int i = 0; i < TOT_RECORDS; i++) + ids.add(i); + +- for (ORecordFlat rec : database.browseCluster(CLUSTER_NAME)) { ++ for (ORecordFlat rec : new ORecordIteratorCluster(database, (ODatabaseRecordAbstract) database.getUnderlying(), ++ database.getClusterIdByName(CLUSTER_NAME), true)) { + fields = rec.value().split(""-""); + + int i = Integer.parseInt(fields[0]); +@@ -87,7 +88,8 @@ public void readRawWithExpressiveForwardIterator() { + public void updateRaw() { + String[] fields; + +- for (ORecordFlat rec : database.browseCluster(CLUSTER_NAME)) { ++ for (ORecordFlat rec : new ORecordIteratorCluster(database, (ODatabaseRecordAbstract) database.getUnderlying(), ++ database.getClusterIdByName(CLUSTER_NAME), true)) { + fields = rec.value().split(""-""); + int i = Integer.parseInt(fields[0]); + if (i % 2 == 0) { +@@ -105,7 +107,8 @@ public void testUpdateRaw() { + for (int i = 0; i < TOT_RECORDS; i++) + ids.add(i); + +- for (ORecordFlat rec : database.browseCluster(CLUSTER_NAME)) { ++ for (ORecordFlat rec : new ORecordIteratorCluster(database, (ODatabaseRecordAbstract) database.getUnderlying(), ++ database.getClusterIdByName(CLUSTER_NAME), true)) { + fields = rec.value().split(""-""); + + int i = Integer.parseInt(fields[0]); +diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/DictionaryTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/DictionaryTest.java +index a4c3c2ad1a4..54383db3c65 100755 +--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/DictionaryTest.java ++++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/DictionaryTest.java +@@ -23,7 +23,6 @@ + import org.testng.annotations.Test; + + import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; +-import com.orientechnologies.orient.core.db.record.ODatabaseFlat; + import com.orientechnologies.orient.core.record.ORecord; + import com.orientechnologies.orient.core.record.impl.ODocument; + import com.orientechnologies.orient.core.record.impl.ORecordFlat; +@@ -40,49 +39,32 @@ public DictionaryTest(@Optional String url) { + } + + public void testDictionaryCreate() throws IOException { +- ODatabaseFlat database = new ODatabaseFlat(url); +- database.open(""admin"", ""admin""); +- ORecordFlat record = database.newInstance(); ++ ORecordFlat record = new ORecordFlat(); + + database.getDictionary().put(""key1"", record.value(""Dictionary test!"")); +- +- database.close(); + } + + @Test(dependsOnMethods = ""testDictionaryCreate"") + public void testDictionaryLookup() throws IOException { +- ODatabaseFlat database = new ODatabaseFlat(url); +- database.open(""admin"", ""admin""); +- + Assert.assertNotNull(database.getDictionary().get(""key1"")); + Assert.assertTrue(((ORecordFlat) database.getDictionary().get(""key1"")).value().equals(""Dictionary test!"")); +- +- database.close(); + } + + @Test(dependsOnMethods = ""testDictionaryLookup"") + public void testDictionaryUpdate() throws IOException { +- ODatabaseFlat database = new ODatabaseFlat(url); +- database.open(""admin"", ""admin""); +- + final long originalSize = database.getDictionary().size(); + +- database.getDictionary().put(""key1"", database.newInstance().value(""Text changed"")); ++ database.getDictionary().put(""key1"", new ORecordFlat().value(""Text changed"")); + + database.close(); + database.open(""admin"", ""admin""); + + Assert.assertEquals(((ORecordFlat) database.getDictionary().get(""key1"")).value(), ""Text changed""); + Assert.assertEquals(database.getDictionary().size(), originalSize); +- +- database.close(); + } + + @Test(dependsOnMethods = ""testDictionaryUpdate"") + public void testDictionaryDelete() throws IOException { +- ODatabaseFlat database = new ODatabaseFlat(url); +- database.open(""admin"", ""admin""); +- + final long originalSize = database.getDictionary().size(); + Assert.assertNotNull(database.getDictionary().remove(""key1"")); + +@@ -90,22 +72,17 @@ public void testDictionaryDelete() throws IOException { + database.open(""admin"", ""admin""); + + Assert.assertEquals(database.getDictionary().size(), originalSize - 1); +- +- database.close(); + } + + @Test(dependsOnMethods = ""testDictionaryDelete"") + public void testDictionaryMassiveCreate() throws IOException { +- ODatabaseFlat database = new ODatabaseFlat(url); +- database.open(""admin"", ""admin""); +- + final long originalSize = database.getDictionary().size(); + + // ASSURE TO STORE THE PAGE-SIZE + 3 FORCING THE CREATION OF LEFT AND RIGHT + final int total = 1000; + + for (int i = total; i > 0; --i) { +- database.getDictionary().put(""key-"" + (originalSize + i), database.newInstance().value(""test-dictionary-"" + i)); ++ database.getDictionary().put(""key-"" + (originalSize + i), new ORecordFlat().value(""test-dictionary-"" + i)); + } + + for (int i = total; i > 0; --i) { +@@ -114,22 +91,15 @@ public void testDictionaryMassiveCreate() throws IOException { + } + + Assert.assertEquals(database.getDictionary().size(), originalSize + total); +- +- database.close(); + } + + @Test(dependsOnMethods = ""testDictionaryMassiveCreate"") + public void testDictionaryInTx() throws IOException { +- ODatabaseFlat database = new ODatabaseFlat(url); +- database.open(""admin"", ""admin""); +- + database.begin(); +- database.getDictionary().put(""tx-key"", database.newInstance().value(""tx-test-dictionary"")); ++ database.getDictionary().put(""tx-key"", new ORecordFlat().value(""tx-test-dictionary"")); + database.commit(); + + Assert.assertNotNull(database.getDictionary().get(""tx-key"")); +- +- database.close(); + } + + public class ObjectDictionaryTest { +diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/FlatDBBaseTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/FlatDBBaseTest.java +deleted file mode 100644 +index fe45fa706d6..00000000000 +--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/FlatDBBaseTest.java ++++ /dev/null +@@ -1,31 +0,0 @@ +-package com.orientechnologies.orient.test.database.auto; +- +-import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; +-import com.orientechnologies.orient.core.db.record.ODatabaseFlat; +-import org.testng.annotations.Optional; +-import org.testng.annotations.Parameters; +- +-/** +- * @author Andrey Lomakin Andrey Lomakin +- * @since 7/10/14 +- */ +-public abstract class FlatDBBaseTest extends BaseTest { +- @Parameters(value = ""url"") +- protected FlatDBBaseTest(@Optional String url) { +- super(url); +- } +- +- @Override +- protected ODatabaseFlat createDatabaseInstance(String url) { +- return new ODatabaseFlat(url); +- } +- +- @Override +- protected void createDatabase() { +- ODatabaseDocumentTx db = new ODatabaseDocumentTx(database.getURL()); +- db.create(); +- db.close(); +- +- database.open(""admin"", ""admin""); +- } +-} +diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/TransactionAtomicTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/TransactionAtomicTest.java +index 391133364b1..e06eef77857 100755 +--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/TransactionAtomicTest.java ++++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/TransactionAtomicTest.java +@@ -25,7 +25,6 @@ + import com.orientechnologies.orient.core.db.ODatabase; + import com.orientechnologies.orient.core.db.ODatabaseListener; + import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; +-import com.orientechnologies.orient.core.db.record.ODatabaseFlat; + import com.orientechnologies.orient.core.exception.OConcurrentModificationException; + import com.orientechnologies.orient.core.exception.OTransactionException; + import com.orientechnologies.orient.core.metadata.schema.OClass; +@@ -45,13 +44,13 @@ public TransactionAtomicTest(@Optional String url) { + + @Test + public void testTransactionAtomic() throws IOException { +- ODatabaseFlat db1 = new ODatabaseFlat(url); ++ ODatabaseDocumentTx db1 = new ODatabaseDocumentTx(url); + db1.open(""admin"", ""admin""); + +- ODatabaseFlat db2 = new ODatabaseFlat(url); ++ ODatabaseDocumentTx db2 = new ODatabaseDocumentTx(url); + db2.open(""admin"", ""admin""); + +- ORecordFlat record1 = new ORecordFlat(db1); ++ ORecordFlat record1 = new ORecordFlat(); + record1.value(""This is the first version"").save(); + + // RE-READ THE RECORD +@@ -91,7 +90,7 @@ public void testMVCC() throws IOException { + + @Test(expectedExceptions = OTransactionException.class) + public void testTransactionPreListenerRollback() throws IOException { +- ODatabaseFlat db = new ODatabaseFlat(url); ++ ODatabaseDocumentTx db = new ODatabaseDocumentTx(url); + db.open(""admin"", ""admin""); + + ORecordFlat record1 = new ORecordFlat(db); +diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/CreateRelationshipsSpeedTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/CreateRelationshipsSpeedTest.java +index 080197a4492..5c9b0944325 100644 +--- a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/CreateRelationshipsSpeedTest.java ++++ b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/CreateRelationshipsSpeedTest.java +@@ -19,12 +19,11 @@ + import java.io.UnsupportedEncodingException; + + import com.orientechnologies.common.test.SpeedTestMonoThread; +-import com.orientechnologies.orient.core.db.record.ODatabaseFlat; ++import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; + import com.orientechnologies.orient.core.record.impl.ORecordFlat; +-import com.orientechnologies.orient.core.storage.OStorage; + + public class CreateRelationshipsSpeedTest extends SpeedTestMonoThread { +- private ODatabaseFlat database; ++ private ODatabaseDocumentTx database; + private ORecordFlat record; + + public CreateRelationshipsSpeedTest() { +diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryLookupInverseSpeedTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryLookupInverseSpeedTest.java +index 095137fffe6..a94eddda3a0 100644 +--- a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryLookupInverseSpeedTest.java ++++ b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryLookupInverseSpeedTest.java +@@ -17,17 +17,17 @@ + + import java.io.UnsupportedEncodingException; + ++import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; + import org.testng.Assert; + import org.testng.annotations.Test; + + import com.orientechnologies.orient.core.Orient; +-import com.orientechnologies.orient.core.db.record.ODatabaseFlat; + import com.orientechnologies.orient.core.record.impl.ORecordFlat; + import com.orientechnologies.orient.test.database.base.OrientMonoThreadTest; + + @Test(enabled = false) + public class DictionaryLookupInverseSpeedTest extends OrientMonoThreadTest { +- private ODatabaseFlat database; ++ private ODatabaseDocumentTx database; + + public static void main(String[] iArgs) throws InstantiationException, IllegalAccessException { + DictionaryLookupInverseSpeedTest test = new DictionaryLookupInverseSpeedTest(); +@@ -37,7 +37,7 @@ public static void main(String[] iArgs) throws InstantiationException, IllegalAc + public DictionaryLookupInverseSpeedTest() { + super(100000); + Orient.instance().getProfiler().startRecording(); +- database = new ODatabaseFlat(System.getProperty(""url"")).open(""admin"", ""admin""); ++ database = new ODatabaseDocumentTx(System.getProperty(""url"")).open(""admin"", ""admin""); + } + + @Override +diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryLookupSpeedTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryLookupSpeedTest.java +index 8a660404859..fd1b3dd6610 100644 +--- a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryLookupSpeedTest.java ++++ b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryLookupSpeedTest.java +@@ -17,17 +17,17 @@ + + import java.io.UnsupportedEncodingException; + ++import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; + import org.testng.Assert; + import org.testng.annotations.Test; + + import com.orientechnologies.orient.core.Orient; +-import com.orientechnologies.orient.core.db.record.ODatabaseFlat; + import com.orientechnologies.orient.core.record.impl.ORecordFlat; + import com.orientechnologies.orient.test.database.base.OrientMonoThreadTest; + + @Test(enabled = false) + public class DictionaryLookupSpeedTest extends OrientMonoThreadTest { +- private ODatabaseFlat database; ++ private ODatabaseDocumentTx database; + + public static void main(String[] iArgs) throws InstantiationException, IllegalAccessException { + DictionaryLookupSpeedTest test = new DictionaryLookupSpeedTest(); +@@ -37,7 +37,7 @@ public static void main(String[] iArgs) throws InstantiationException, IllegalAc + public DictionaryLookupSpeedTest() { + super(100000); + Orient.instance().getProfiler().startRecording(); +- database = new ODatabaseFlat(System.getProperty(""url"")).open(""admin"", ""admin""); ++ database = new ODatabaseDocumentTx(System.getProperty(""url"")).open(""admin"", ""admin""); + } + + @Override +diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryPutSpeedTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryPutSpeedTest.java +index ab7d510ef1e..684a385244a 100644 +--- a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryPutSpeedTest.java ++++ b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryPutSpeedTest.java +@@ -15,10 +15,10 @@ + */ + package com.orientechnologies.orient.test.database.speed; + ++import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; + import org.testng.annotations.Test; + + import com.orientechnologies.orient.core.Orient; +-import com.orientechnologies.orient.core.db.record.ODatabaseFlat; + import com.orientechnologies.orient.core.intent.OIntentMassiveInsert; + import com.orientechnologies.orient.core.record.impl.ORecordFlat; + import com.orientechnologies.orient.core.tx.OTransaction.TXTYPE; +@@ -26,7 +26,7 @@ + + @Test(enabled = false) + public class DictionaryPutSpeedTest extends OrientMonoThreadTest { +- private ODatabaseFlat database; ++ private ODatabaseDocumentTx database; + private ORecordFlat record; + private long startNum; + +@@ -40,10 +40,10 @@ public DictionaryPutSpeedTest() throws InstantiationException, IllegalAccessExce + super(1000000); + + String url = System.getProperty(""url""); +- database = new ODatabaseFlat(url).open(""admin"", ""admin""); ++ database = new ODatabaseDocumentTx(url).open(""admin"", ""admin""); + database.declareIntent(new OIntentMassiveInsert()); + +- record = database.newInstance(); ++ record = new ORecordFlat(); + startNum = 0;// database.countClusterElements(""Animal""); + + Orient.instance().getProfiler().startRecording(); +diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateBinarySpeedTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateBinarySpeedTest.java +index 7c2c3de9567..711877b3895 100644 +--- a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateBinarySpeedTest.java ++++ b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateBinarySpeedTest.java +@@ -17,10 +17,10 @@ + + import java.util.Random; + ++import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; + import org.testng.annotations.Test; + + import com.orientechnologies.orient.core.Orient; +-import com.orientechnologies.orient.core.db.record.ODatabaseFlat; + import com.orientechnologies.orient.core.intent.OIntentMassiveInsert; + import com.orientechnologies.orient.core.record.impl.ORecordBytes; + import com.orientechnologies.orient.core.tx.OTransaction.TXTYPE; +@@ -28,7 +28,7 @@ + + @Test(enabled = false) + public class LocalCreateBinarySpeedTest extends OrientMonoThreadTest { +- private ODatabaseFlat database; ++ private ODatabaseDocumentTx database; + private ORecordBytes record; + private final static int RECORD_SIZE = 512; + private byte[] recordContent; +@@ -46,7 +46,7 @@ public LocalCreateBinarySpeedTest() throws InstantiationException, IllegalAccess + public void init() { + Orient.instance().getProfiler().startRecording(); + +- database = new ODatabaseFlat(System.getProperty(""url"")).open(""admin"", ""admin""); ++ database = new ODatabaseDocumentTx(System.getProperty(""url"")).open(""admin"", ""admin""); + record = new ORecordBytes(); + + database.declareIntent(new OIntentMassiveInsert()); +diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateFlatMultiThreadSpeedTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateFlatMultiThreadSpeedTest.java +index 80196ce13b2..36d663b78f8 100644 +--- a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateFlatMultiThreadSpeedTest.java ++++ b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateFlatMultiThreadSpeedTest.java +@@ -15,11 +15,11 @@ + */ + package com.orientechnologies.orient.test.database.speed; + ++import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; + import org.testng.Assert; + import org.testng.annotations.Test; + + import com.orientechnologies.common.test.SpeedTestMultiThreads; +-import com.orientechnologies.orient.core.db.record.ODatabaseFlat; + import com.orientechnologies.orient.core.intent.OIntentMassiveInsert; + import com.orientechnologies.orient.core.record.impl.ORecordFlat; + import com.orientechnologies.orient.core.tx.OTransaction.TXTYPE; +@@ -28,12 +28,12 @@ + + @Test(enabled = false) + public class LocalCreateFlatMultiThreadSpeedTest extends OrientMultiThreadTest { +- protected ODatabaseFlat database; ++ protected ODatabaseDocumentTx database; + private long foundObjects; + + @Test(enabled = false) + public static class CreateObjectsThread extends OrientThreadTest { +- protected ODatabaseFlat database; ++ protected ODatabaseDocumentTx database; + protected ORecordFlat record; + + public CreateObjectsThread(final SpeedTestMultiThreads parent, final int threadId) { +@@ -42,8 +42,8 @@ public CreateObjectsThread(final SpeedTestMultiThreads parent, final int threadI + + @Override + public void init() { +- database = new ODatabaseFlat(System.getProperty(""url"")).open(""admin"", ""admin""); +- record = database.newInstance(); ++ database = new ODatabaseDocumentTx(System.getProperty(""url"")).open(""admin"", ""admin""); ++ record = new ORecordFlat(); + database.declareIntent(new OIntentMassiveInsert()); + database.begin(TXTYPE.NOTX); + } +@@ -75,7 +75,7 @@ public static void main(String[] iArgs) throws InstantiationException, IllegalAc + + @Override + public void init() { +- database = new ODatabaseFlat(System.getProperty(""url"")).open(""admin"", ""admin""); ++ database = new ODatabaseDocumentTx(System.getProperty(""url"")).open(""admin"", ""admin""); + foundObjects = database.countClusterElements(""flat""); + + System.out.println(""\nTotal objects in Animal cluster before the test: "" + foundObjects); +diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateFlatSpeedTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateFlatSpeedTest.java +index 11efd77aefa..800a7eaa423 100644 +--- a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateFlatSpeedTest.java ++++ b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateFlatSpeedTest.java +@@ -15,11 +15,11 @@ + */ + package com.orientechnologies.orient.test.database.speed; + ++import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; + import org.testng.annotations.Test; + + import com.orientechnologies.orient.core.Orient; + import com.orientechnologies.orient.core.config.OGlobalConfiguration; +-import com.orientechnologies.orient.core.db.record.ODatabaseFlat; + import com.orientechnologies.orient.core.intent.OIntentMassiveInsert; + import com.orientechnologies.orient.core.record.impl.ORecordFlat; + import com.orientechnologies.orient.core.tx.OTransaction.TXTYPE; +@@ -27,7 +27,7 @@ + + @Test(enabled = false) + public class LocalCreateFlatSpeedTest extends OrientMonoThreadTest { +- private ODatabaseFlat database; ++ private ODatabaseDocumentTx database; + private ORecordFlat record; + private long date = System.currentTimeMillis(); + +@@ -45,13 +45,13 @@ public LocalCreateFlatSpeedTest() throws InstantiationException, IllegalAccessEx + public void init() { + Orient.instance().getProfiler().startRecording(); + +- database = new ODatabaseFlat(System.getProperty(""url"")); ++ database = new ODatabaseDocumentTx(System.getProperty(""url"")); + if (database.exists()) + database.open(""admin"", ""admin""); + else + database.create(); + +- record = database.newInstance(); ++ record = new ORecordFlat(); + + database.declareIntent(new OIntentMassiveInsert()); + database.begin(TXTYPE.NOTX); +diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxRemoteCreateFlatSpeedTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxRemoteCreateFlatSpeedTest.java +index 3d843ccf822..ff0cbd611ab 100644 +--- a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxRemoteCreateFlatSpeedTest.java ++++ b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxRemoteCreateFlatSpeedTest.java +@@ -15,16 +15,16 @@ + */ + package com.orientechnologies.orient.test.database.speed; + ++import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; + import org.testng.annotations.Test; + + import com.orientechnologies.orient.core.Orient; +-import com.orientechnologies.orient.core.db.record.ODatabaseFlat; + import com.orientechnologies.orient.core.record.impl.ORecordFlat; + import com.orientechnologies.orient.test.database.base.OrientMonoThreadTest; + + @Test(enabled = false) + public class TxRemoteCreateFlatSpeedTest extends OrientMonoThreadTest { +- private ODatabaseFlat database; ++ private ODatabaseDocumentTx database; + private ORecordFlat record; + + public static void main(String[] iArgs) throws InstantiationException, IllegalAccessException { +@@ -40,8 +40,8 @@ public TxRemoteCreateFlatSpeedTest() throws InstantiationException, IllegalAcces + public void init() { + Orient.instance().getProfiler().startRecording(); + +- database = new ODatabaseFlat(System.getProperty(""url"")).open(""admin"", ""admin""); +- record = database.newInstance(); ++ database = new ODatabaseDocumentTx(System.getProperty(""url"")).open(""admin"", ""admin""); ++ record = new ORecordFlat(); + + database.begin(); + } +diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxRemoteCreateObjectsMultiThreadSpeedTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxRemoteCreateObjectsMultiThreadSpeedTest.java +index a041b4f5281..3d3c316871c 100644 +--- a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxRemoteCreateObjectsMultiThreadSpeedTest.java ++++ b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxRemoteCreateObjectsMultiThreadSpeedTest.java +@@ -16,20 +16,19 @@ + package com.orientechnologies.orient.test.database.speed; + + import com.orientechnologies.common.test.SpeedTestMultiThreads; +-import com.orientechnologies.orient.core.db.record.ODatabaseFlat; ++import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; + import com.orientechnologies.orient.core.record.impl.ORecordFlat; +-import com.orientechnologies.orient.core.storage.OStorage; + import com.orientechnologies.orient.core.tx.OTransaction.TXTYPE; + import com.orientechnologies.orient.test.database.base.OrientMultiThreadTest; + import com.orientechnologies.orient.test.database.base.OrientThreadTest; + + public class TxRemoteCreateObjectsMultiThreadSpeedTest extends OrientMultiThreadTest { +- protected ODatabaseFlat database; +- protected long foundObjects; ++ protected ODatabaseDocumentTx database; ++ protected long foundObjects; + + public static class CreateObjectsThread extends OrientThreadTest { +- protected ODatabaseFlat database; +- protected ORecordFlat record = new ORecordFlat(); ++ protected ODatabaseDocumentTx database; ++ protected ORecordFlat record = new ORecordFlat(); + + public CreateObjectsThread(final SpeedTestMultiThreads parent, final int threadId) { + super(parent, threadId); +@@ -37,8 +36,8 @@ public CreateObjectsThread(final SpeedTestMultiThreads parent, final int threadI + + @Override + public void init() { +- database = new ODatabaseFlat(System.getProperty(""url"")).open(""admin"", ""admin""); +- record = database.newInstance(); ++ database = new ODatabaseDocumentTx(System.getProperty(""url"")).open(""admin"", ""admin""); ++ record = new ORecordFlat(); + + database.begin(TXTYPE.NOTX); + } +@@ -69,7 +68,7 @@ public static void main(String[] iArgs) throws InstantiationException, IllegalAc + + @Override + public void init() { +- database = new ODatabaseFlat(System.getProperty(""url"")).open(""admin"", ""admin""); ++ database = new ODatabaseDocumentTx(System.getProperty(""url"")).open(""admin"", ""admin""); + + if (!database.getStorage().getClusterNames().contains(""Animal"")) + database.addCluster(""Animal""); +diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxTest.java +index 151dad1d413..1a5ec437302 100644 +--- a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxTest.java ++++ b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxTest.java +@@ -17,22 +17,22 @@ + + import java.io.UnsupportedEncodingException; + ++import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; + import org.testng.annotations.Test; + +-import com.orientechnologies.orient.core.db.record.ODatabaseFlat; + import com.orientechnologies.orient.core.record.impl.ORecordFlat; + import com.orientechnologies.orient.core.tx.OTransaction.TXTYPE; + import com.orientechnologies.orient.test.database.base.OrientMonoThreadTest; + + @Test(enabled = false) + public class TxTest extends OrientMonoThreadTest { +- private ODatabaseFlat database; ++ private ODatabaseDocumentTx database; + private ORecordFlat record; + + public TxTest() throws InstantiationException, IllegalAccessException { + super(10); +- database = new ODatabaseFlat(System.getProperty(""url"")).open(""admin"", ""admin""); +- record = database.newInstance(); ++ database = new ODatabaseDocumentTx(System.getProperty(""url"")).open(""admin"", ""admin""); ++ record = new ORecordFlat(); + + database.begin(TXTYPE.OPTIMISTIC); + }" +c3b4d70410f2acd2a7d6f3ef9fbcd2a156fe9f30,coremedia$jangaroo-tools,"Submit last night's results: +* Correct handling of newlines inside generated strings (e.g. modifiers rendered inside string for the runtime to interpret). +* Support *-imports. Not recommended, since it disables automatic class loading, but good for reusing existing ActionScript 3 libraries. +* FlexUnit now compiles and runs with minimal changes: +- added some missing semicolons +- static members of super classes are not in scope +- the super class and package-scoped methods still have to be +imported explicitly +- ArrayCollection not yet implemented +- special case when subclassing native class Error +[git-p4: depot-paths = ""//coremedia/jangaroo/"": change = 146563] +",a,https://github.com/coremedia/jangaroo-tools,"diff --git a/jooc/src/main/cup/net/jangaroo/jooc/joo.cup b/jooc/src/main/cup/net/jangaroo/jooc/joo.cup +index e1169a5f1..5b504c150 100644 +--- a/jooc/src/main/cup/net/jangaroo/jooc/joo.cup ++++ b/jooc/src/main/cup/net/jangaroo/jooc/joo.cup +@@ -229,6 +229,8 @@ classBody ::= + classBodyDeclarations ::= + {: RESULT = new ArrayList(); :} ++ | classBodyDeclarations:list directive:d ++ {: RESULT = list; :} + | classBodyDeclarations:list classBodyDeclaration:decl + {: RESULT = list; list.add(decl); :} + ; +@@ -269,6 +271,8 @@ constOrVar ::= + directive ::= + IMPORT:i qualifiedIde:ide SEMICOLON:s + {: RESULT = new ImportDirective(i,new IdeType(ide), s); :} ++ | IMPORT:i qualifiedIde:ide DOT:dot MUL:all SEMICOLON:s ++ {: RESULT = new ImportDirective(i,new IdeType(new QualifiedIde(ide,dot,all)), s); :} + | LBRACK:lb ide:ide RBRACK:rb + {: RESULT = new Annotation(lb, ide, rb); :} + | LBRACK:lb ide:ide LPAREN:lb2 annotationFields:af RPAREN:rb2 RBRACK:rb +@@ -286,7 +290,7 @@ annotationField ::= + ide:name EQ:eq expr:value + {: RESULT = new ObjectField(new IdeExpr(name),eq,value); :} + | expr:value +- {: RESULT = new ObjectField(new IdeExpr(new Ide(new JooSymbol(""""))),null,value); :} ++ {: RESULT = new ObjectField(null,null,value); :} + ; - /****************************************************************** -diff --git a/test/edu/ucla/sspace/text/corpora/PukWacDependencyCorpusReaderTest.java b/test/edu/ucla/sspace/text/corpora/PukWacDependencyCorpusReaderTest.java -index fd06a4b1..6a47fef7 100644 ---- a/test/edu/ucla/sspace/text/corpora/PukWacDependencyCorpusReaderTest.java -+++ b/test/edu/ucla/sspace/text/corpora/PukWacDependencyCorpusReaderTest.java -@@ -26,6 +26,7 @@ - import edu.ucla.sspace.text.CorpusReader; - import edu.ucla.sspace.text.Document; + annotationFields ::= +diff --git a/jooc/src/main/java/net/jangaroo/jooc/ApplyExpr.java b/jooc/src/main/java/net/jangaroo/jooc/ApplyExpr.java +index 39e17a443..857549d14 100644 +--- a/jooc/src/main/java/net/jangaroo/jooc/ApplyExpr.java ++++ b/jooc/src/main/java/net/jangaroo/jooc/ApplyExpr.java +@@ -22,6 +22,9 @@ + */ + class ApplyExpr extends Expr { -+import java.io.BufferedReader; - import java.io.StringReader; - import java.util.Iterator; ++ // TODO: add a compiler option for this: ++ public static final boolean ASSUME_UNDECLARED_UPPER_CASE_FUNCTIONS_CALLS_ARE_TYPE_CASTS = Boolean.valueOf(""true""); ++ + Expr fun; + boolean isType; + JooSymbol lParen; +@@ -63,9 +66,9 @@ public void analyze(Node parentNode, AnalyzeContext context) { + // otherwise, it is most likely an imported package-namespaced function. + if (Character.isUpperCase(funIde.getName().charAt(0))) { + Scope scope = context.getScope().findScopeThatDeclares(funIde); +- if (scope!=null) { +- isType = scope.getDeclaration()==context.getScope().getPackageDeclaration(); +- } ++ isType = scope == null ++ ? ASSUME_UNDECLARED_UPPER_CASE_FUNCTIONS_CALLS_ARE_TYPE_CASTS ++ : scope.getDeclaration() == context.getScope().getPackageDeclaration(); + } + } + fun.analyze(this, context); +diff --git a/jooc/src/main/java/net/jangaroo/jooc/ClassDeclaration.java b/jooc/src/main/java/net/jangaroo/jooc/ClassDeclaration.java +index 9797a8188..a2d5f048d 100644 +--- a/jooc/src/main/java/net/jangaroo/jooc/ClassDeclaration.java ++++ b/jooc/src/main/java/net/jangaroo/jooc/ClassDeclaration.java +@@ -28,6 +28,7 @@ public class ClassDeclaration extends IdeDeclaration { + private Map members = new LinkedHashMap(); + private Set boundMethodCandidates = new HashSet(); + private Set classInit = new HashSet(); ++ private List packageImports; -@@ -41,22 +42,22 @@ - public class PukWacDependencyCorpusReaderTest { + public Extends getOptExtends() { + return optExtends; +@@ -88,15 +89,14 @@ public void setConstructor(MethodDeclaration methodDeclaration) { + } - public static final String FIRST_SENTENCE = -- ""1 Mr. _ NNP NNP _ 2 NMOD _ _\n"" + -- ""2 Holt _ NNP NNP _ 3 SBJ _ _\n"" + -- ""3 is _ VBZ VBZ _ 0 ROOT _ _\n""; -+ toTabs(""1 Mr. _ NNP NNP _ 2 NMOD _ _\n"" + -+ ""2 Holt _ NNP NNP _ 3 SBJ _ _\n"" + -+ ""3 is _ VBZ VBZ _ 0 ROOT _ _\n""); + public void generateCode(JsWriter out) throws IOException { +- out.writeSymbolWhitespace(symClass); +- if (!writeRuntimeModifiersUnclosed(out)) { +- out.write(""\""""); +- } +- out.writeSymbolToken(symClass); ++ out.beginString(); ++ writeModifiers(out); ++ out.writeSymbol(symClass); + ide.generateCode(out); + if (optExtends != null) optExtends.generateCode(out); + if (optImplements != null) optImplements.generateCode(out); +- out.write(""\"",[""); ++ out.endString(); ++ out.write("",[""); + boolean isFirst = true; + for (MemberDeclaration memberDeclaration : members.values()) { + if (memberDeclaration.isMethod() && memberDeclaration.isPublic() && memberDeclaration.isStatic()) { +@@ -111,7 +111,12 @@ public void generateCode(JsWriter out) throws IOException { + } + } + out.write(""],""); +- out.write(""function($jooPublic,$jooPrivate){with($jooPublic)with($jooPrivate)return[""); ++ String packageName = QualifiedIde.constructQualifiedNameStr(getPackageDeclaration().getQualifiedName()); ++ out.write(""function($jooPublic,$jooPrivate){""); ++ for (String importedPackage : packageImports) { ++ out.write(""with(""+importedPackage+"")""); ++ } ++ out.write(""with(""+ packageName +"")with($jooPublic)with($jooPrivate)return[""); + if (!classInit.isEmpty()) { + out.write(""function(){joo.Class.init(""); + for (Iterator iterator = classInit.iterator(); iterator.hasNext();) { +@@ -129,6 +134,7 @@ public void generateCode(JsWriter out) throws IOException { + public void analyze(Node parentNode, AnalyzeContext context) { + // do *not* call super! + this.parentNode = parentNode; ++ packageImports = context.getCurrentPackage().getPackageImports(); + context.getScope().declareIde(getName(), this); + parentDeclaration = context.getScope().getPackageDeclaration(); + context.enterScope(this); +diff --git a/jooc/src/main/java/net/jangaroo/jooc/Declaration.java b/jooc/src/main/java/net/jangaroo/jooc/Declaration.java +index e5f11d767..05e4edefb 100644 +--- a/jooc/src/main/java/net/jangaroo/jooc/Declaration.java ++++ b/jooc/src/main/java/net/jangaroo/jooc/Declaration.java +@@ -120,28 +120,6 @@ protected void writeModifiers(JsWriter out) throws IOException { + } + } - public static final String SECOND_SENTENCE = -- ""4 a _ DT DT _ 5 NMOD _ _\n"" + -- ""5 columnist _ NN NN _ 3 PRD _ _\n"" + -- ""6 for _ IN IN _ 5 NMOD _ _\n"" + -- ""7 the _ DT DT _ 9 NMOD _ _\n"" + -- ""8 Literary _ NNP NNP _ 9 NMOD _ _\n""; +- protected void writeRuntimeModifiers(JsWriter out) throws IOException { +- if (writeRuntimeModifiersUnclosed(out)) { +- out.write(""\"",""); +- } +- } - -+ toTabs(""4 a _ DT DT _ 5 NMOD _ _\n"" + -+ ""5 columnist _ NN NN _ 3 PRD _ _\n"" + -+ ""6 for _ IN IN _ 5 NMOD _ _\n"" + -+ ""7 the _ DT DT _ 9 NMOD _ _\n"" + -+ ""8 Literary _ NNP NNP _ 9 NMOD _ _\n""); -+ - public static final String THIRD_SENTENCE = -- ""9 Review _ NNP NNP _ 6 PMOD _ _\n"" + -- ""10 in _ IN IN _ 9 ADV _ _\n"" + -- ""11 London _ NNP NNP _ 10 PMOD _ _\n"" + -- ""12 . _ . . _ 3 P _ _\n""; -+ toTabs(""9 Review _ NNP NNP _ 6 PMOD _ _\n"" + -+ ""10 in _ IN IN _ 9 ADV _ _\n"" + -+ ""11 London _ NNP NNP _ 10 PMOD _ _\n"" + -+ ""12 . _ . . _ 3 P _ _\n""); - - public static final String TEST_TEXT = - ""\n"" + -@@ -87,8 +88,24 @@ public class PukWacDependencyCorpusReaderTest { +- protected boolean writeRuntimeModifiersUnclosed(JsWriter out) throws IOException { +- if (symModifiers.length>0) { +- out.writeSymbolWhitespace(symModifiers[0]); +- out.write('""'); +- for (int i = 0; i < symModifiers.length; i++) { +- JooSymbol modifier = symModifiers[i]; +- if (i==0) +- out.writeSymbolToken(modifier); +- else +- out.writeSymbol(modifier); +- } +- return true; +- } +- return false; +- } +- + public void analyze(Node parentNode, AnalyzeContext context) { + super.analyze(parentNode, context); + parentDeclaration = context.getScope().getDeclaration(); +diff --git a/jooc/src/main/java/net/jangaroo/jooc/DotExpr.java b/jooc/src/main/java/net/jangaroo/jooc/DotExpr.java +index d0374a329..cd92f9407 100644 +--- a/jooc/src/main/java/net/jangaroo/jooc/DotExpr.java ++++ b/jooc/src/main/java/net/jangaroo/jooc/DotExpr.java +@@ -48,10 +48,11 @@ public void analyze(Node parentNode, AnalyzeContext context) { + if (!(parentNode instanceof ApplyExpr)) { + String[] qualifiedName = getQualifiedName(arg1); + if (qualifiedName!=null) { +- String qulifiedNameStr = QualifiedIde.constructQualifiedNameStr(qualifiedName); +- Scope declaringScope = context.getScope().findScopeThatDeclares(qulifiedNameStr); +- if (declaringScope!=null && declaringScope.getDeclaration().equals(context.getCurrentPackage())) { +- this.classDeclaration.addClassInit(qulifiedNameStr); ++ String qualifiedNameStr = QualifiedIde.constructQualifiedNameStr(qualifiedName); ++ Scope declaringScope = context.getScope().findScopeThatDeclares(qualifiedNameStr); ++ if (declaringScope==null && qualifiedNameStr.indexOf('.')==-1 && Character.isUpperCase(qualifiedNameStr.charAt(0)) ++ || declaringScope!=null && declaringScope.getDeclaration().equals(context.getCurrentPackage())) { ++ this.classDeclaration.addClassInit(qualifiedNameStr); + } + } + } +diff --git a/jooc/src/main/java/net/jangaroo/jooc/FieldDeclaration.java b/jooc/src/main/java/net/jangaroo/jooc/FieldDeclaration.java +index 18d789af3..dc7db6fca 100644 +--- a/jooc/src/main/java/net/jangaroo/jooc/FieldDeclaration.java ++++ b/jooc/src/main/java/net/jangaroo/jooc/FieldDeclaration.java +@@ -35,13 +35,11 @@ public boolean isField() { + } - private static String readAll(Document doc) throws Exception { - StringBuilder sb = new StringBuilder(); -- for (String line = null; (line = doc.reader().readLine()) != null; ) -+ BufferedReader reader = doc.reader(); -+ for (String line = null; (line = reader.readLine()) != null; ) - sb.append(line).append(""\n""); - return sb.toString(); + public void generateCode(JsWriter out) throws IOException { +- out.writeSymbolWhitespace(optSymConstOrVar); +- if (!writeRuntimeModifiersUnclosed(out)) { +- out.write(""\""""); +- } +- out.writeSymbolToken(optSymConstOrVar); +- out.write(""\"",""); +- out.write('{'); ++ out.beginString(); ++ writeModifiers(out); ++ out.writeSymbol(optSymConstOrVar); ++ out.endString(); ++ out.write("",{""); + generateIdeCode(out); + if (optTypeRelation != null) + optTypeRelation.generateCode(out); +diff --git a/jooc/src/main/java/net/jangaroo/jooc/ImportDirective.java b/jooc/src/main/java/net/jangaroo/jooc/ImportDirective.java +index aa142b060..b8be3237b 100644 +--- a/jooc/src/main/java/net/jangaroo/jooc/ImportDirective.java ++++ b/jooc/src/main/java/net/jangaroo/jooc/ImportDirective.java +@@ -37,9 +37,16 @@ public void analyze(Node parentNode, AnalyzeContext context) { + super.analyze(parentNode, context); + if (type instanceof IdeType) { + Ide ide = ((IdeType)type).ide; +- context.getScope().declareIde(ide.getName(), this); +- // also add the fully qualified name (might be the same string for top level imports): +- context.getScope().declareIde(QualifiedIde.constructQualifiedNameStr(ide.getQualifiedName()), this); ++ String typeName = ide.getName(); ++ if (""*"".equals(typeName)) { ++ // found *-import, do not register, but add to package import list: ++ String packageName = QualifiedIde.constructQualifiedNameStr(((QualifiedIde)ide).prefix.getQualifiedName()); ++ context.getCurrentPackage().addPackageImport(packageName); ++ } else { ++ context.getScope().declareIde(typeName, this); ++ // also add the fully qualified name (might be the same string for top level imports): ++ context.getScope().declareIde(QualifiedIde.constructQualifiedNameStr(ide.getQualifiedName()), this); ++ } } + } + +diff --git a/jooc/src/main/java/net/jangaroo/jooc/JsWriter.java b/jooc/src/main/java/net/jangaroo/jooc/JsWriter.java +index 28a606519..5e99d3ffa 100644 +--- a/jooc/src/main/java/net/jangaroo/jooc/JsWriter.java ++++ b/jooc/src/main/java/net/jangaroo/jooc/JsWriter.java +@@ -33,6 +33,8 @@ public class JsWriter extends FilterWriter { + boolean inComment = false; + int nOpenBeginComments = 0; + char lastChar = ' '; ++ boolean inString = false; ++ int nOpenStrings = 0; + + public JsWriter(Writer target) { + super(target); +@@ -152,7 +154,7 @@ public void beginComment() throws IOException { + nOpenBeginComments++; + } + +- private final boolean shouldWrite() throws IOException { ++ private boolean shouldWrite() throws IOException { + boolean result = keepSource || nOpenBeginComments == 0; + if (result) { + if (nOpenBeginComments > 0 && !inComment) { +@@ -178,10 +180,65 @@ public void beginCommentWriteSymbol(JooSymbol symbol) throws IOException { + writeSymbol(symbol); + } + ++ public void beginString() throws IOException { ++ nOpenStrings++; ++ } + -+ static String toTabs(String doc) { -+ StringBuilder sb = new StringBuilder(); -+ String[] arr = doc.split(""\n""); -+ for (String line : arr) { -+ String[] cols = line.split(""\\s+""); -+ for (int i = 0; i < cols.length; ++i) { -+ sb.append(cols[i]); -+ if (i + 1 < cols.length) -+ sb.append('\t'); -+ } -+ sb.append('\n'); -+ } -+ return sb.toString(); ++ private void checkOpenString() throws IOException { ++ if (nOpenStrings > 0 && !inString) { ++ out.write('""'); ++ lastChar = '""'; ++ inString = true; + } - } -diff --git a/test/edu/ucla/sspace/wordsi/DependencyContextExtractorTest.java b/test/edu/ucla/sspace/wordsi/DependencyContextExtractorTest.java -index 6aae6fb7..8c0e843a 100644 ---- a/test/edu/ucla/sspace/wordsi/DependencyContextExtractorTest.java -+++ b/test/edu/ucla/sspace/wordsi/DependencyContextExtractorTest.java -@@ -70,7 +70,7 @@ public class DependencyContextExtractorTest { - MockWordsi wordsi = new MockWordsi(null, extractor); - - extractor.processDocument( -- new BufferedReader(new StringReader(SINGLE_PARSE)), -+ new BufferedReader(new StringReader(toTabs(SINGLE_PARSE))), - wordsi); - assertTrue(wordsi.called); - } -@@ -178,4 +178,19 @@ public boolean acceptWord(String word) { - return word.equals(""cat""); - } - } ++ } + -+ static String toTabs(String doc) { -+ StringBuilder sb = new StringBuilder(); -+ String[] arr = doc.split(""\n""); -+ for (String line : arr) { -+ String[] cols = line.split(""\\s+""); -+ for (int i = 0; i < cols.length; ++i) { -+ sb.append(cols[i]); -+ if (i + 1 < cols.length) -+ sb.append('\t'); -+ } -+ sb.append('\n'); -+ } -+ return sb.toString(); ++ private boolean checkCloseString() throws IOException { ++ if (inString) { ++ out.write('""'); ++ inString = false; ++ return true; + } - } -diff --git a/test/edu/ucla/sspace/wordsi/OccurrenceDependencyContextGeneratorTest.java b/test/edu/ucla/sspace/wordsi/OccurrenceDependencyContextGeneratorTest.java -index 246d2542..33b1f431 100644 ---- a/test/edu/ucla/sspace/wordsi/OccurrenceDependencyContextGeneratorTest.java -+++ b/test/edu/ucla/sspace/wordsi/OccurrenceDependencyContextGeneratorTest.java -@@ -46,18 +46,18 @@ - public class OccurrenceDependencyContextGeneratorTest { - - public static final String SINGLE_PARSE = -- ""1 Mr. _ NNP NNP _ 2 NMOD _ _\n"" + -- ""2 Holt _ NNP NNP _ 3 SBJ _ _\n"" + -- ""3 is _ VBZ VBZ _ 0 ROOT _ _\n"" + -- ""4 a _ DT DT _ 5 NMOD _ _\n"" + -- ""5 columnist _ NN NN _ 3 PRD _ _\n"" + -- ""6 for _ IN IN _ 5 NMOD _ _\n"" + -- ""7 the _ DT DT _ 9 NMOD _ _\n"" + -- ""8 Literary _ NNP NNP _ 9 NMOD _ _\n"" + -- ""9 Review _ NNP NNP _ 6 PMOD _ _\n"" + -- ""10 in _ IN IN _ 9 ADV _ _\n"" + -- ""11 London _ NNP NNP _ 10 PMOD _ _\n"" + -- ""12 . _ . . _ 3 P _ _""; -+ toTabs(""1 Mr. _ NNP NNP _ 2 NMOD _ _\n"" + -+ ""2 Holt _ NNP NNP _ 3 SBJ _ _\n"" + -+ ""3 is _ VBZ VBZ _ 0 ROOT _ _\n"" + -+ ""4 a _ DT DT _ 5 NMOD _ _\n"" + -+ ""5 columnist _ NN NN _ 3 PRD _ _\n"" + -+ ""6 for _ IN IN _ 5 NMOD _ _\n"" + -+ ""7 the _ DT DT _ 9 NMOD _ _\n"" + -+ ""8 Literary _ NNP NNP _ 9 NMOD _ _\n"" + -+ ""9 Review _ NNP NNP _ 6 PMOD _ _\n"" + -+ ""10 in _ IN IN _ 9 ADV _ _\n"" + -+ ""11 London _ NNP NNP _ 10 PMOD _ _\n"" + -+ ""12 . _ . . _ 3 P _ _""); ++ return false; ++ } ++ ++ public void endString() throws IOException { ++ Debug.assertTrue(nOpenStrings > 0, ""missing beginString() for endString()""); ++ nOpenStrings--; ++ if (nOpenStrings == 0) { ++ checkCloseString(); ++ } ++ } ++ ++ private void writeLinesInsideString(String ws) throws IOException { ++ String[] lines = ws.split(""\n"",-1); ++ for (int i = 0; i < lines.length-1; i++) { ++ String line = lines[i]; ++ if (line.length()>1) { ++ checkOpenString(); ++ write(line.substring(0,line.length()-1)); ++ write(""\\n""); ++ } ++ if(checkCloseString()) { ++ write(""+""); ++ } ++ write(""\n""); ++ } ++ String line = lines[lines.length - 1]; ++ if (line.length()>0) { ++ checkOpenString(); ++ write(line); ++ } ++ } ++ + public void writeSymbolWhitespace(JooSymbol symbol) throws IOException { + String ws = symbol.getWhitespace(); +- if (keepSource) +- write(ws); ++ if (keepSource) { ++ if (inString) { ++ writeLinesInsideString(ws); ++ } else { ++ write(ws); ++ } ++ } + else if (keepLines) + writeLines(ws); + } +@@ -192,8 +249,14 @@ protected void writeLines(String s) throws IOException { - @Test public void testOccurrence() throws Exception { - DependencyExtractor extractor = new CoNLLDependencyExtractor(); -@@ -82,22 +82,37 @@ public int getDimension(String key) { - return 0; - if (key.equals(""is"")) - return 1; -- if (key.equals(""holt"")) -+ if (key.equals(""Holt"")) - return 2; -- if (key.equals(""mr."")) -+ if (key.equals(""Mr."")) - return 3; + protected void writeLines(String s, int off, int len) throws IOException { + int pos = off; +- while ((pos = s.indexOf('\n', pos)+1) > 0 && pos < off+len+1) ++ while ((pos = s.indexOf('\n', pos)+1) > 0 && pos < off+len+1) { ++ if (inString) { ++ write(""\\n""); ++ checkCloseString(); ++ write('+'); ++ } + write('\n'); ++ } + } - if (key.equals(""for"")) - return 4; - if (key.equals(""the"")) - return 5; -- if (key.equals(""literary"")) -+ if (key.equals(""Literary"")) - return 6; -- if (key.equals(""review"")) -+ if (key.equals(""Review"")) - return 7; - if (key.equals(""in"")) - return 8; - return -1; + public void writeToken(String token) throws IOException { +@@ -204,6 +267,7 @@ public void writeToken(String token) throws IOException { + (lastChar == firstSymbolChar && ""=>= 0) || + (firstSymbolChar == '=' && ""=>= 0)) + write(' '); ++ checkOpenString(); + write(text); + } + } +diff --git a/jooc/src/main/java/net/jangaroo/jooc/MethodDeclaration.java b/jooc/src/main/java/net/jangaroo/jooc/MethodDeclaration.java +index 11ecafe4a..b0f442645 100644 +--- a/jooc/src/main/java/net/jangaroo/jooc/MethodDeclaration.java ++++ b/jooc/src/main/java/net/jangaroo/jooc/MethodDeclaration.java +@@ -125,26 +125,27 @@ public void generateCode(JsWriter out) throws IOException { + out.writeSymbol(symFunction); + ide.generateCode(out); + } else { +- if (!writeRuntimeModifiersUnclosed(out)) +- out.write(""\""""); +- else +- out.write("" ""); ++ out.beginString(); ++ writeModifiers(out); + String methodName = ide.getName(); +- out.write(methodName); +- out.write(""\"",""); ++ if (!isConstructor && !isStatic() && classDeclaration.isBoundMethod(methodName)) { ++ out.writeToken(""bound""); ++ } ++ out.writeToken(methodName); ++ out.endString(); ++ out.write("",""); + out.writeSymbol(symFunction); + out.writeSymbolWhitespace(ide.ide); + if (out.getKeepSource()) { +- out.write("" ""); + if (isConstructor) { + // do not name the constructor initializer function like the class, or it will be called + // instead of the constructor function generated by the runtime! So we prefix it with a ""$"". + // The name is for debugging purposes only, anyway. +- out.write(""$""+methodName); ++ out.writeToken(""$""+methodName); + } else if (ide instanceof AccessorIde) { +- out.write(((AccessorIde)ide).getFunctionName()); ++ out.writeToken(((AccessorIde)ide).getFunctionName()); + } else { +- out.write(methodName); ++ out.writeToken(methodName); } + } } -+ -+ static String toTabs(String doc) { -+ StringBuilder sb = new StringBuilder(); -+ String[] arr = doc.split(""\n""); -+ for (String line : arr) { -+ String[] cols = line.split(""\\s+""); -+ for (int i = 0; i < cols.length; ++i) { -+ sb.append(cols[i]); -+ if (i + 1 < cols.length) -+ sb.append('\t'); -+ } -+ sb.append('\n'); -+ } -+ return sb.toString(); +diff --git a/jooc/src/main/java/net/jangaroo/jooc/ObjectField.java b/jooc/src/main/java/net/jangaroo/jooc/ObjectField.java +index 3831976b5..c02ab5d6a 100644 +--- a/jooc/src/main/java/net/jangaroo/jooc/ObjectField.java ++++ b/jooc/src/main/java/net/jangaroo/jooc/ObjectField.java +@@ -34,13 +34,17 @@ public ObjectField(Expr nameExpr, JooSymbol symColon, Expr value) { + + public void analyze(Node parentNode, AnalyzeContext context) { + super.analyze(parentNode, context); +- nameExpr.analyze(this, context); ++ if (nameExpr!=null) { ++ nameExpr.analyze(this, context); + } - } -diff --git a/test/edu/ucla/sspace/wordsi/OrderingDependencyContextGeneratorTest.java b/test/edu/ucla/sspace/wordsi/OrderingDependencyContextGeneratorTest.java -index 9bd825fc..241e6ef3 100644 ---- a/test/edu/ucla/sspace/wordsi/OrderingDependencyContextGeneratorTest.java -+++ b/test/edu/ucla/sspace/wordsi/OrderingDependencyContextGeneratorTest.java -@@ -49,18 +49,18 @@ - public class OrderingDependencyContextGeneratorTest { + value.analyze(this, context); + } - public static final String SINGLE_PARSE = -- ""1 Mr. _ NNP NNP _ 2 NMOD _ _\n"" + -- ""2 Holt _ NNP NNP _ 3 SBJ _ _\n"" + -- ""3 is _ VBZ VBZ _ 0 ROOT _ _\n"" + -- ""4 a _ DT DT _ 5 NMOD _ _\n"" + -- ""5 columnist _ NN NN _ 3 PRD _ _\n"" + -- ""6 for _ IN IN _ 5 NMOD _ _\n"" + -- ""7 the _ DT DT _ 9 NMOD _ _\n"" + -- ""8 Literary _ NNP NNP _ 9 NMOD _ _\n"" + -- ""9 Review _ NNP NNP _ 6 PMOD _ _\n"" + -- ""10 in _ IN IN _ 9 ADV _ _\n"" + -- ""11 London _ NNP NNP _ 10 PMOD _ _\n"" + -- ""12 . _ . . _ 3 P _ _""; -+ toTabs(""1 Mr. _ NNP NNP _ 2 NMOD _ _\n"" + -+ ""2 Holt _ NNP NNP _ 3 SBJ _ _\n"" + -+ ""3 is _ VBZ VBZ _ 0 ROOT _ _\n"" + -+ ""4 a _ DT DT _ 5 NMOD _ _\n"" + -+ ""5 columnist _ NN NN _ 3 PRD _ _\n"" + -+ ""6 for _ IN IN _ 5 NMOD _ _\n"" + -+ ""7 the _ DT DT _ 9 NMOD _ _\n"" + -+ ""8 Literary _ NNP NNP _ 9 NMOD _ _\n"" + -+ ""9 Review _ NNP NNP _ 6 PMOD _ _\n"" + -+ ""10 in _ IN IN _ 9 ADV _ _\n"" + -+ ""11 London _ NNP NNP _ 10 PMOD _ _\n"" + -+ ""12 . _ . . _ 3 P _ _""); + public void generateCode(JsWriter out) throws IOException { +- nameExpr.generateCode(out); +- out.writeSymbol(symColon); ++ if (nameExpr!=null) { ++ nameExpr.generateCode(out); ++ out.writeSymbol(symColon); ++ } + value.generateCode(out); + } - @Test public void testOrdering() throws Exception { - DependencyExtractor extractor = new CoNLLDependencyExtractor(); -@@ -85,22 +85,37 @@ public int getDimension(String key) { - return 0; - if (key.equals(""is--2"")) - return 1; -- if (key.equals(""holt--3"")) -+ if (key.equals(""Holt--3"")) - return 2; -- if (key.equals(""mr.--4"")) -+ if (key.equals(""Mr.--4"")) - return 3; +diff --git a/jooc/src/main/java/net/jangaroo/jooc/PackageDeclaration.java b/jooc/src/main/java/net/jangaroo/jooc/PackageDeclaration.java +index f82eaaf8d..f0a343dd7 100644 +--- a/jooc/src/main/java/net/jangaroo/jooc/PackageDeclaration.java ++++ b/jooc/src/main/java/net/jangaroo/jooc/PackageDeclaration.java +@@ -16,6 +16,9 @@ + package net.jangaroo.jooc; - if (key.equals(""for-1"")) - return 4; - if (key.equals(""the-2"")) - return 5; -- if (key.equals(""literary-3"")) -+ if (key.equals(""Literary-3"")) - return 6; -- if (key.equals(""review-4"")) -+ if (key.equals(""Review-4"")) - return 7; - if (key.equals(""in-5"")) - return 8; - return -1; + import java.io.IOException; ++import java.util.List; ++import java.util.ArrayList; ++import java.util.Collections; + + /** + * @author Andreas Gawecki +@@ -23,6 +26,15 @@ + public class PackageDeclaration extends IdeDeclaration { + + JooSymbol symPackage; ++ private List packageImports = new ArrayList(); ++ ++ public void addPackageImport(String packageName) { ++ packageImports.add(packageName); ++ } ++ ++ public List getPackageImports() { ++ return Collections.unmodifiableList(packageImports); ++ } + + public PackageDeclaration(JooSymbol symPackage, Ide ide) { + super(new JooSymbol[0], 0, ide); +@@ -30,12 +42,12 @@ public PackageDeclaration(JooSymbol symPackage, Ide ide) { + } + + public void generateCode(JsWriter out) throws IOException { +- out.writeSymbolWhitespace(symPackage); +- out.write(""\""package ""); ++ out.beginString(); ++ out.writeSymbol(symPackage); + if (ide!=null) { + ide.generateCode(out); + } +- out.write(""\""""); ++ out.endString(); + out.write("",""); + } + +diff --git a/jooc/src/main/java/net/jangaroo/jooc/TopLevelIdeExpr.java b/jooc/src/main/java/net/jangaroo/jooc/TopLevelIdeExpr.java +index 53a2ce32c..859ba12f0 100644 +--- a/jooc/src/main/java/net/jangaroo/jooc/TopLevelIdeExpr.java ++++ b/jooc/src/main/java/net/jangaroo/jooc/TopLevelIdeExpr.java +@@ -38,7 +38,7 @@ public void analyze(Node parentNode, AnalyzeContext context) { + if (scope!=null) { + Scope declaringScope = scope.findScopeThatDeclares(ide); + if (declaringScope==null || declaringScope.getDeclaration() instanceof ClassDeclaration) { +- synthesizedDotExpr = new DotExpr(new ThisExpr(new JooSymbol(""this"")), new JooSymbol("".""), ide); ++ synthesizedDotExpr = new DotExpr(new ThisExpr(new JooSymbol(""this"")), new JooSymbol("".""), new Ide(ide.ide)); + synthesizedDotExpr.analyze(parentNode, context); + } + } +@@ -59,25 +59,28 @@ private boolean addThis() { + Scope declaringScope = scope.findScopeThatDeclares(ide); + if (declaringScope==null) { + // check for fully qualified ide: +- IdeExpr currentExpr = this; ++ DotExpr currentDotExpr = synthesizedDotExpr; + String ideName = ide.getName(); +- while (declaringScope==null && currentExpr.parentNode instanceof DotExpr && ((DotExpr)currentExpr.parentNode).arg2 instanceof IdeExpr) { +- currentExpr = (IdeExpr)((DotExpr)currentExpr.parentNode).arg2; +- ideName += ""."" +currentExpr.ide.getName(); ++ while (currentDotExpr.parentNode instanceof DotExpr) { ++ currentDotExpr = (DotExpr)currentDotExpr.parentNode; ++ if (!(currentDotExpr.arg2 instanceof IdeExpr)) ++ break; ++ ideName += ""."" +((IdeExpr)currentDotExpr.arg2).ide.getName(); + declaringScope = scope.findScopeThatDeclares(ideName); ++ if (declaringScope!=null) { ++ // it has been defined in the meantime or is an imported qualified identifier: ++ return false; ++ } + } +- if (declaringScope!=null) { +- return false; +- } +- boolean probablyAType = Character.isUpperCase(ide.getName().charAt(0)); ++ boolean maybeInScope = Character.isUpperCase(ide.getName().charAt(0)); + String warningMsg = ""Undeclared identifier: "" + ide.getName(); +- if (probablyAType) { +- warningMsg += "", assuming it is a top level type.""; ++ if (maybeInScope) { ++ warningMsg += "", assuming it is already in scope.""; + } else if (ASSUME_UNDECLARED_IDENTIFIERS_ARE_MEMBERS) { + warningMsg += "", assuming it is an inherited member.""; } + Jooc.warning(ide.getSymbol(), warningMsg); +- return !probablyAType && ASSUME_UNDECLARED_IDENTIFIERS_ARE_MEMBERS; ++ return !maybeInScope && ASSUME_UNDECLARED_IDENTIFIERS_ARE_MEMBERS; + } else if (declaringScope.getDeclaration() instanceof ClassDeclaration) { + MemberDeclaration memberDeclaration = (MemberDeclaration)declaringScope.getIdeDeclaration(ide); + return !memberDeclaration.isStatic() && !memberDeclaration.isConstructor(); +@@ -85,4 +88,5 @@ private boolean addThis() { } + return false; + } + -+ static String toTabs(String doc) { -+ StringBuilder sb = new StringBuilder(); -+ String[] arr = doc.split(""\n""); -+ for (String line : arr) { -+ String[] cols = line.split(""\\s+""); -+ for (int i = 0; i < cols.length; ++i) { -+ sb.append(cols[i]); -+ if (i + 1 < cols.length) -+ sb.append('\t'); -+ } -+ sb.append('\n'); -+ } -+ return sb.toString(); -+ } } -diff --git a/test/edu/ucla/sspace/wordsi/PartOfSpeechDependencyContextGeneratorTest.java b/test/edu/ucla/sspace/wordsi/PartOfSpeechDependencyContextGeneratorTest.java -index 5da76df4..6fa81d67 100644 ---- a/test/edu/ucla/sspace/wordsi/PartOfSpeechDependencyContextGeneratorTest.java -+++ b/test/edu/ucla/sspace/wordsi/PartOfSpeechDependencyContextGeneratorTest.java -@@ -49,18 +49,18 @@ - public class PartOfSpeechDependencyContextGeneratorTest { +\ No newline at end of file +diff --git a/jooc/src/main/js/joo/Class.js b/jooc/src/main/js/joo/Class.js +index eca78a3ad..c97c163db 100644 +--- a/jooc/src/main/js/joo/Class.js ++++ b/jooc/src/main/js/joo/Class.js +@@ -499,7 +499,7 @@ Function.prototype.bind = function(object) { + var memberName = undefined; + var modifiers; + if (typeof members==""string"") { +- modifiers = members.split("" ""); ++ modifiers = members.split(/\s+/); + for (var j=0; j get_files (); + } - public static final String SINGLE_PARSE = -- ""1 Mr. _ NNP NNP _ 2 NMOD _ _\n"" + -- ""2 Holt _ NNP NNP _ 3 SBJ _ _\n"" + -- ""3 is _ VBZ VBZ _ 0 ROOT _ _\n"" + -- ""4 a _ DT DT _ 5 NMOD _ _\n"" + -- ""5 columnist _ NN NN _ 3 PRD _ _\n"" + -- ""6 for _ IN IN _ 5 NMOD _ _\n"" + -- ""7 the _ DT DT _ 9 NMOD _ _\n"" + -- ""8 Literary _ NNP NNP _ 9 NMOD _ _\n"" + -- ""9 Review _ NNP NNP _ 6 PMOD _ _\n"" + -- ""10 in _ IN IN _ 9 ADV _ _\n"" + -- ""11 London _ NNP NNP _ 10 PMOD _ _\n"" + -- ""12 . _ . . _ 3 P _ _""; -+ toTabs(""1 Mr. _ NNP NNP _ 2 NMOD _ _\n"" + -+ ""2 Holt _ NNP NNP _ 3 SBJ _ _\n"" + -+ ""3 is _ VBZ VBZ _ 0 ROOT _ _\n"" + -+ ""4 a _ DT DT _ 5 NMOD _ _\n"" + -+ ""5 columnist _ NN NN _ 3 PRD _ _\n"" + -+ ""6 for _ IN IN _ 5 NMOD _ _\n"" + -+ ""7 the _ DT DT _ 9 NMOD _ _\n"" + -+ ""8 Literary _ NNP NNP _ 9 NMOD _ _\n"" + -+ ""9 Review _ NNP NNP _ 6 PMOD _ _\n"" + -+ ""10 in _ IN IN _ 9 ADV _ _\n"" + -+ ""11 London _ NNP NNP _ 10 PMOD _ _\n"" + -+ ""12 . _ . . _ 3 P _ _""); +- public static delegate void CallbackMarshal (Object object, void* data, Arg[] args); ++ [CCode (has_target = false)] ++ public delegate void CallbackMarshal (Object object, void* data, Arg[] args); + + public delegate void ActionCallback (Action action); + +diff --git a/vapi/packages/tracker-indexer-module-1.0/tracker-indexer-module-1.0-custom.vala b/vapi/packages/tracker-indexer-module-1.0/tracker-indexer-module-1.0-custom.vala +index 7f9752f4b3..f5cf696ead 100644 +--- a/vapi/packages/tracker-indexer-module-1.0/tracker-indexer-module-1.0-custom.vala ++++ b/vapi/packages/tracker-indexer-module-1.0/tracker-indexer-module-1.0-custom.vala +@@ -138,26 +138,26 @@ namespace Tracker { + namespace Module { + [CCode (cheader_filename = ""tracker-1.0/libtracker-indexer/tracker-module.h"")] + public delegate void FileFreeDataFunc (); +- [CCode (cheader_filename = ""tracker-1.0/libtracker-indexer/tracker-module.h"")] +- public static delegate void* FileGetDataFunc (string path); +- [CCode (cheader_filename = ""tracker-1.0/libtracker-indexer/tracker-module.h"")] +- public static delegate weak Tracker.Metadata FileGetMetadataFunc (Tracker.File file); +- [CCode (cheader_filename = ""tracker-1.0/libtracker-indexer/tracker-module.h"")] +- public static delegate weak string FileGetServiceTypeFunc (Tracker.File file); +- [CCode (cheader_filename = ""tracker-1.0/libtracker-indexer/tracker-module.h"")] +- public static delegate weak string FileGetText (Tracker.File path); +- [CCode (cheader_filename = ""tracker-1.0/libtracker-indexer/tracker-module.h"")] +- public static delegate void FileGetUriFunc (Tracker.File file, string dirname, string basename); +- [CCode (cheader_filename = ""tracker-1.0/libtracker-indexer/tracker-module.h"")] +- public static delegate bool FileIterContents (Tracker.File path); +- [CCode (cheader_filename = ""tracker-1.0/libtracker-indexer/tracker-module.h"")] +- public static delegate weak string GetDirectoriesFunc (); +- [CCode (cheader_filename = ""tracker-1.0/libtracker-indexer/tracker-module.h"")] +- public static delegate weak string GetNameFunc (); +- [CCode (cheader_filename = ""tracker-1.0/libtracker-indexer/tracker-module.h"")] +- public static delegate void Init (); +- [CCode (cheader_filename = ""tracker-1.0/libtracker-indexer/tracker-module.h"")] +- public static delegate void Shutdown (); ++ [CCode (cheader_filename = ""tracker-1.0/libtracker-indexer/tracker-module.h"", has_target = false)] ++ public delegate void* FileGetDataFunc (string path); ++ [CCode (cheader_filename = ""tracker-1.0/libtracker-indexer/tracker-module.h"", has_target = false)] ++ public delegate weak Tracker.Metadata FileGetMetadataFunc (Tracker.File file); ++ [CCode (cheader_filename = ""tracker-1.0/libtracker-indexer/tracker-module.h"", has_target = false)] ++ public delegate weak string FileGetServiceTypeFunc (Tracker.File file); ++ [CCode (cheader_filename = ""tracker-1.0/libtracker-indexer/tracker-module.h"", has_target = false)] ++ public delegate weak string FileGetText (Tracker.File path); ++ [CCode (cheader_filename = ""tracker-1.0/libtracker-indexer/tracker-module.h"", has_target = false)] ++ public delegate void FileGetUriFunc (Tracker.File file, string dirname, string basename); ++ [CCode (cheader_filename = ""tracker-1.0/libtracker-indexer/tracker-module.h"", has_target = false)] ++ public delegate bool FileIterContents (Tracker.File path); ++ [CCode (cheader_filename = ""tracker-1.0/libtracker-indexer/tracker-module.h"", has_target = false)] ++ public delegate weak string GetDirectoriesFunc (); ++ [CCode (cheader_filename = ""tracker-1.0/libtracker-indexer/tracker-module.h"", has_target = false)] ++ public delegate weak string GetNameFunc (); ++ [CCode (cheader_filename = ""tracker-1.0/libtracker-indexer/tracker-module.h"", has_target = false)] ++ public delegate void Init (); ++ [CCode (cheader_filename = ""tracker-1.0/libtracker-indexer/tracker-module.h"", has_target = false)] ++ public delegate void Shutdown (); + + [CCode (cheader_filename = ""tracker-1.0/libtracker-indexer/tracker-module.h"")] + public static void file_free_data (void* file_data);" +dc9e9cb4cc87f132a32a00e6589d807350f0b8e0,elasticsearch,Aggregations: change to default shard_size in- terms aggregation--The default shard size in the terms aggregation now uses BucketUtils.suggestShardSideQueueSize() to set the shard size if the user does not specify it as a parameter.--Closes -6857-,p,https://github.com/elastic/elasticsearch,"diff --git a/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java b/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java +index c4b57064e80eb..c38f136dd9b29 100644 +--- a/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java ++++ b/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java +@@ -21,6 +21,7 @@ + import org.elasticsearch.common.xcontent.XContentParser; + import org.elasticsearch.search.aggregations.Aggregator; + import org.elasticsearch.search.aggregations.AggregatorFactory; ++import org.elasticsearch.search.aggregations.bucket.BucketUtils; + import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; + import org.elasticsearch.search.aggregations.support.ValuesSourceParser; + import org.elasticsearch.search.internal.SearchContext; +@@ -32,7 +33,6 @@ + */ + public class TermsParser implements Aggregator.Parser { - @Test public void testGenerate() throws Exception { - DependencyExtractor extractor = new CoNLLDependencyExtractor(); -@@ -85,22 +85,37 @@ public int getDimension(String key) { - return 0; - if (key.equals(""is-VBZ"")) - return 1; -- if (key.equals(""holt-NNP"")) -+ if (key.equals(""Holt-NNP"")) - return 2; -- if (key.equals(""mr.-NNP"")) -+ if (key.equals(""Mr.-NNP"")) - return 3; +- + @Override + public String type() { + return StringTerms.TYPE.name(); +@@ -41,19 +41,22 @@ public String type() { + @Override + public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { + TermsParametersParser aggParser = new TermsParametersParser(); +- ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, StringTerms.TYPE, context) +- .scriptable(true) +- .formattable(true) +- .requiresSortedValues(true) +- .requiresUniqueValues(true) +- .build(); ++ ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, StringTerms.TYPE, context).scriptable(true).formattable(true) ++ .requiresSortedValues(true).requiresUniqueValues(true).build(); + IncludeExclude.Parser incExcParser = new IncludeExclude.Parser(aggregationName, StringTerms.TYPE, context); + aggParser.parse(aggregationName, parser, context, vsParser, incExcParser); - if (key.equals(""for-IN"")) - return 4; - if (key.equals(""the-DT"")) - return 5; -- if (key.equals(""literary-NNP"")) -+ if (key.equals(""Literary-NNP"")) - return 6; -- if (key.equals(""review-NNP"")) -+ if (key.equals(""Review-NNP"")) - return 7; - if (key.equals(""in-IN"")) - return 8; - return -1; ++ InternalOrder order = resolveOrder(aggParser.getOrderKey(), aggParser.isOrderAsc()); + TermsAggregator.BucketCountThresholds bucketCountThresholds = aggParser.getBucketCountThresholds(); ++ if (!(order == InternalOrder.TERM_ASC || order == InternalOrder.TERM_DESC) ++ && bucketCountThresholds.getShardSize() == aggParser.getDefaultBucketCountThresholds().getShardSize()) { ++ // The user has not made a shardSize selection. Use default heuristic to avoid any wrong-ranking caused by distributed counting ++ bucketCountThresholds.setShardSize(BucketUtils.suggestShardSideQueueSize(bucketCountThresholds.getRequiredSize(), ++ context.numberOfShards())); ++ } + bucketCountThresholds.ensureValidity(); +- InternalOrder order = resolveOrder(aggParser.getOrderKey(), aggParser.isOrderAsc()); +- return new TermsAggregatorFactory(aggregationName, vsParser.config(), order, bucketCountThresholds, aggParser.getIncludeExclude(), aggParser.getExecutionHint(), aggParser.getCollectionMode()); ++ return new TermsAggregatorFactory(aggregationName, vsParser.config(), order, bucketCountThresholds, aggParser.getIncludeExclude(), ++ aggParser.getExecutionHint(), aggParser.getCollectionMode()); + } + + static InternalOrder resolveOrder(String key, boolean asc) { +diff --git a/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsTests.java b/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsTests.java +index 7251617f374ee..4bdaecc646d0c 100644 +--- a/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsTests.java ++++ b/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsTests.java +@@ -45,6 +45,31 @@ public void noShardSize_string() throws Exception { + .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false))) + .execute().actionGet(); + ++ Terms terms = response.getAggregations().get(""keys""); ++ Collection buckets = terms.getBuckets(); ++ assertThat(buckets.size(), equalTo(3)); ++ Map expected = ImmutableMap.builder() ++ .put(""1"", 8l) ++ .put(""3"", 8l) ++ .put(""2"", 5l) ++ .build(); ++ for (Terms.Bucket bucket : buckets) { ++ assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsText().string()))); ++ } ++ } ++ ++ @Test ++ public void shardSizeEqualsSize_string() throws Exception { ++ createIdx(""type=string,index=not_analyzed""); ++ ++ indexData(); ++ ++ SearchResponse response = client().prepareSearch(""idx"").setTypes(""type"") ++ .setQuery(matchAllQuery()) ++ .addAggregation(terms(""keys"").field(""key"").size(3).shardSize(3) ++ .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false))) ++ .execute().actionGet(); ++ + Terms terms = response.getAggregations().get(""keys""); + Collection buckets = terms.getBuckets(); + assertThat(buckets.size(), equalTo(3)); +@@ -109,6 +134,31 @@ public void withShardSize_string_singleShard() throws Exception { + assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKey()))); } } ++ ++ @Test ++ public void noShardSizeTermOrder_string() throws Exception { ++ createIdx(""type=string,index=not_analyzed""); + -+ static String toTabs(String doc) { -+ StringBuilder sb = new StringBuilder(); -+ String[] arr = doc.split(""\n""); -+ for (String line : arr) { -+ String[] cols = line.split(""\\s+""); -+ for (int i = 0; i < cols.length; ++i) { -+ sb.append(cols[i]); -+ if (i + 1 < cols.length) -+ sb.append('\t'); -+ } -+ sb.append('\n'); ++ indexData(); ++ ++ SearchResponse response = client().prepareSearch(""idx"").setTypes(""type"") ++ .setQuery(matchAllQuery()) ++ .addAggregation(terms(""keys"").field(""key"").size(3) ++ .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.term(true))) ++ .execute().actionGet(); ++ ++ Terms terms = response.getAggregations().get(""keys""); ++ Collection buckets = terms.getBuckets(); ++ assertThat(buckets.size(), equalTo(3)); ++ Map expected = ImmutableMap.builder() ++ .put(""1"", 8l) ++ .put(""2"", 5l) ++ .put(""3"", 8l) ++ .build(); ++ for (Terms.Bucket bucket : buckets) { ++ assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsText().string()))); + } -+ return sb.toString(); + } - } -diff --git a/test/edu/ucla/sspace/wordsi/psd/PseudoWordDependencyContextExtractorTest.java b/test/edu/ucla/sspace/wordsi/psd/PseudoWordDependencyContextExtractorTest.java -index fbbfa3ee..367ac49e 100644 ---- a/test/edu/ucla/sspace/wordsi/psd/PseudoWordDependencyContextExtractorTest.java -+++ b/test/edu/ucla/sspace/wordsi/psd/PseudoWordDependencyContextExtractorTest.java -@@ -50,19 +50,19 @@ - public class PseudoWordDependencyContextExtractorTest { - - public static final String SINGLE_PARSE = -- ""target: cat absolute_position: 4 relative_position: 4 prior_trees: 0 after_trees: 0\n"" + -- ""1 Mr. _ NNP NNP _ 2 NMOD _ _\n"" + -- ""2 Holt _ NNP NNP _ 3 SBJ _ _\n"" + -- ""3 is _ VBZ VBZ _ 0 ROOT _ _\n"" + -- ""4 a _ DT DT _ 5 NMOD _ _\n"" + -- ""5 cat _ NN NN _ 3 PRD _ _\n"" + -- ""6 for _ IN IN _ 5 NMOD _ _\n"" + -- ""7 the _ DT DT _ 9 NMOD _ _\n"" + -- ""8 Literary _ NNP NNP _ 9 NMOD _ _\n"" + -- ""9 Review _ NNP NNP _ 6 PMOD _ _\n"" + -- ""10 in _ IN IN _ 9 ADV _ _\n"" + -- ""11 London _ NNP NNP _ 10 PMOD _ _\n"" + -- ""12 . _ . . _ 3 P _ _""; -+ toTabs(""target: cat absolute_position: 4 relative_position: 4 prior_trees: 0 after_trees: 0\n"" + -+ ""1 Mr. _ NNP NNP _ 2 NMOD _ _\n"" + -+ ""2 Holt _ NNP NNP _ 3 SBJ _ _\n"" + -+ ""3 is _ VBZ VBZ _ 0 ROOT _ _\n"" + -+ ""4 a _ DT DT _ 5 NMOD _ _\n"" + -+ ""5 cat _ NN NN _ 3 PRD _ _\n"" + -+ ""6 for _ IN IN _ 5 NMOD _ _\n"" + -+ ""7 the _ DT DT _ 9 NMOD _ _\n"" + -+ ""8 Literary _ NNP NNP _ 9 NMOD _ _\n"" + -+ ""9 Review _ NNP NNP _ 6 PMOD _ _\n"" + -+ ""10 in _ IN IN _ 9 ADV _ _\n"" + -+ ""11 London _ NNP NNP _ 10 PMOD _ _\n"" + -+ ""12 . _ . . _ 3 P _ _""); - private SparseDoubleVector testVector; + @Test + public void noShardSize_long() throws Exception { +@@ -123,6 +173,32 @@ public void noShardSize_long() throws Exception { + .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false))) + .execute().actionGet(); -@@ -149,4 +149,19 @@ public boolean acceptWord(String word) { - return word.equals(""cat""); ++ Terms terms = response.getAggregations().get(""keys""); ++ Collection buckets = terms.getBuckets(); ++ assertThat(buckets.size(), equalTo(3)); ++ Map expected = ImmutableMap.builder() ++ .put(1, 8l) ++ .put(3, 8l) ++ .put(2, 5l) ++ .build(); ++ for (Terms.Bucket bucket : buckets) { ++ assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue()))); ++ } ++ } ++ ++ @Test ++ public void shardSizeEqualsSize_long() throws Exception { ++ ++ createIdx(""type=long""); ++ ++ indexData(); ++ ++ SearchResponse response = client().prepareSearch(""idx"").setTypes(""type"") ++ .setQuery(matchAllQuery()) ++ .addAggregation(terms(""keys"").field(""key"").size(3).shardSize(3) ++ .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false))) ++ .execute().actionGet(); ++ + Terms terms = response.getAggregations().get(""keys""); + Collection buckets = terms.getBuckets(); + assertThat(buckets.size(), equalTo(3)); +@@ -188,6 +264,32 @@ public void withShardSize_long_singleShard() throws Exception { } } + ++ @Test ++ public void noShardSizeTermOrder_long() throws Exception { + -+ static String toTabs(String doc) { -+ StringBuilder sb = new StringBuilder(); -+ String[] arr = doc.split(""\n""); -+ for (String line : arr) { -+ String[] cols = line.split(""\\s+""); -+ for (int i = 0; i < cols.length; ++i) { -+ sb.append(cols[i]); -+ if (i + 1 < cols.length) -+ sb.append('\t'); -+ } -+ sb.append('\n'); ++ createIdx(""type=long""); ++ ++ indexData(); ++ ++ SearchResponse response = client().prepareSearch(""idx"").setTypes(""type"") ++ .setQuery(matchAllQuery()) ++ .addAggregation(terms(""keys"").field(""key"").size(3) ++ .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.term(true))) ++ .execute().actionGet(); ++ ++ Terms terms = response.getAggregations().get(""keys""); ++ Collection buckets = terms.getBuckets(); ++ assertThat(buckets.size(), equalTo(3)); ++ Map expected = ImmutableMap.builder() ++ .put(1, 8l) ++ .put(2, 5l) ++ .put(3, 8l) ++ .build(); ++ for (Terms.Bucket bucket : buckets) { ++ assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue()))); + } -+ return sb.toString(); + } - } -diff --git a/test/edu/ucla/sspace/wordsi/semeval/SemEvalDependencyContextExtractorTest.java b/test/edu/ucla/sspace/wordsi/semeval/SemEvalDependencyContextExtractorTest.java -index 2b51f4e2..5fc4bd6d 100644 ---- a/test/edu/ucla/sspace/wordsi/semeval/SemEvalDependencyContextExtractorTest.java -+++ b/test/edu/ucla/sspace/wordsi/semeval/SemEvalDependencyContextExtractorTest.java -@@ -74,7 +74,7 @@ public class SemEvalDependencyContextExtractorTest { - MockWordsi wordsi = new MockWordsi(null, extractor); ++ + @Test + public void noShardSize_double() throws Exception { - extractor.processDocument( -- new BufferedReader(new StringReader(SINGLE_PARSE)), -+ new BufferedReader(new StringReader(toTabs(SINGLE_PARSE))), - wordsi); - assertTrue(wordsi.called); - } -@@ -149,4 +149,19 @@ public boolean acceptWord(String word) { - return word.equals(""cat""); +@@ -201,6 +303,32 @@ public void noShardSize_double() throws Exception { + .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false))) + .execute().actionGet(); + ++ Terms terms = response.getAggregations().get(""keys""); ++ Collection buckets = terms.getBuckets(); ++ assertThat(buckets.size(), equalTo(3)); ++ Map expected = ImmutableMap.builder() ++ .put(1, 8l) ++ .put(3, 8l) ++ .put(2, 5l) ++ .build(); ++ for (Terms.Bucket bucket : buckets) { ++ assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue()))); ++ } ++ } ++ ++ @Test ++ public void shardSizeEqualsSize_double() throws Exception { ++ ++ createIdx(""type=double""); ++ ++ indexData(); ++ ++ SearchResponse response = client().prepareSearch(""idx"").setTypes(""type"") ++ .setQuery(matchAllQuery()) ++ .addAggregation(terms(""keys"").field(""key"").size(3).shardSize(3) ++ .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false))) ++ .execute().actionGet(); ++ + Terms terms = response.getAggregations().get(""keys""); + Collection buckets = terms.getBuckets(); + assertThat(buckets.size(), equalTo(3)); +@@ -265,4 +393,30 @@ public void withShardSize_double_singleShard() throws Exception { + assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue()))); } } + -+ static String toTabs(String doc) { -+ StringBuilder sb = new StringBuilder(); -+ String[] arr = doc.split(""\n""); -+ for (String line : arr) { -+ String[] cols = line.split(""\\s+""); -+ for (int i = 0; i < cols.length; ++i) { -+ sb.append(cols[i]); -+ if (i + 1 < cols.length) -+ sb.append('\t'); -+ } -+ sb.append('\n'); ++ @Test ++ public void noShardSizeTermOrder_double() throws Exception { ++ ++ createIdx(""type=double""); ++ ++ indexData(); ++ ++ SearchResponse response = client().prepareSearch(""idx"").setTypes(""type"") ++ .setQuery(matchAllQuery()) ++ .addAggregation(terms(""keys"").field(""key"").size(3) ++ .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.term(true))) ++ .execute().actionGet(); ++ ++ Terms terms = response.getAggregations().get(""keys""); ++ Collection buckets = terms.getBuckets(); ++ assertThat(buckets.size(), equalTo(3)); ++ Map expected = ImmutableMap.builder() ++ .put(1, 8l) ++ .put(2, 5l) ++ .put(3, 8l) ++ .build(); ++ for (Terms.Bucket bucket : buckets) { ++ assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue()))); + } -+ return sb.toString(); + } }" -9f923255878b7baefd89bc37af8fe3072f163322,elasticsearch,Allow additional settings for the node in- ESSingleNodeTestCase--This change adds a method that extending classes can override to provide additional settings-for the node used in a single node test case.-,a,https://github.com/elastic/elasticsearch,"diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java -index 6e16d60eafc01..57dfc10684588 100644 ---- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java -+++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java -@@ -32,7 +32,6 @@ - import org.elasticsearch.cluster.metadata.MetaData; - import org.elasticsearch.cluster.node.DiscoveryNode; - import org.elasticsearch.common.Priority; --import org.elasticsearch.common.lease.Releasables; - import org.elasticsearch.common.settings.Settings; - import org.elasticsearch.common.unit.TimeValue; - import org.elasticsearch.common.util.BigArrays; -@@ -160,6 +159,11 @@ protected final Collection> pluginList(Class uriVari + UriTemplate template; -+ /** Additional settings to add when creating the node. Also allows overriding the default settings. */ -+ protected Settings nodeSettings() { -+ return Settings.EMPTY; -+ } -+ - private Node newNode() { - Settings settings = Settings.builder() - .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), InternalTestCluster.clusterName(""single-node-cluster"", randomLong())) -@@ -177,6 +181,7 @@ private Node newNode() { - .put(Node.NODE_LOCAL_SETTING.getKey(), true) - .put(Node.NODE_DATA_SETTING.getKey(), true) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) // make sure we get what we set :) -+ .put(nodeSettings()) // allow test cases to provide their own settings or override these - .build(); - Node build = new MockNode(settings, getVersion(), getPlugins()); - build.start();" -289d1dce9d15fc553188deed2b8ff052b04ac2fb,adangel$pmd,"Lots of nice updates: -* Colour syntax highlighting for all relevant editors & example viewers. -* User-selectable marker shapes and colours, violation decorators in the navigator tree -* New AST view and related XPath test editor -* Rule search (page form only, non-functional) -Disabled Quickfix views until its ready -Zapped several nevarious bugs in previous functionality -Note: for the time being, violation errors will not appear in the Problems -page. There is a conflict between having unique markers and 'standard' -error markers that denote inclusion in the error page. Will check out -the use of annotations to deal with this... -For evaluation: -The size and scope of the rule editor is (I believe) outgrowing its -placement within its preference page and it needs to be parked within -it own view. Besides, tracking all the edits being made to the rules -goes beyond what the pref pages can handle (i.e. apply/cancel... -which never worked properly for the rules in any case) -To that end, I've copied the rule table into its own view for now and -left the one in the preference page as is so we can compare & contrast. -Only one will remain by the time the best approach is chosen. -git-svn-id: https://pmd.svn.sourceforge.net/svnroot/pmd/trunk@7115 51baf565-9d33-0410-a72c-fc3788e3496d -",p,https://github.com/adangel/pmd,"diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/.classpath b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/.classpath -index d4fda0e2e81..741a2898938 100644 ---- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/.classpath -+++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/.classpath -@@ -11,14 +11,7 @@ - - - -- -- -- - - -- -- -- -- - - -diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/META-INF/MANIFEST.MF b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/META-INF/MANIFEST.MF -index b9c5a365153..be8eeb326e6 100644 ---- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/META-INF/MANIFEST.MF -+++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/META-INF/MANIFEST.MF -@@ -2,7 +2,7 @@ Manifest-Version: 1.0 - Bundle-ManifestVersion: 2 - Bundle-Name: %plugin.name - Bundle-SymbolicName: net.sourceforge.pmd.eclipse.plugin;singleton:=true --Bundle-Version: 5.0.0.v20100726 -+Bundle-Version: 5.0.0.v20100826 - Bundle-Activator: net.sourceforge.pmd.eclipse.plugin.PMDPlugin - Require-Bundle: org.apache.commons.logging;bundle-version=""1.0.4"", - org.eclipse.core.resources;bundle-version=""3.5.0"", -@@ -11,7 +11,12 @@ Require-Bundle: org.apache.commons.logging;bundle-version=""1.0.4"", - org.eclipse.jface.text;bundle-version=""3.5.0"", - org.eclipse.ui;bundle-version=""3.5.0"", - org.eclipse.ui.ide;bundle-version=""3.5.0"", -- org.eclipse.ui.editors;bundle-version=""3.5.0"" -+ org.eclipse.ui.editors;bundle-version=""3.5.0"", -+ org.eclipse.team.core;bundle-version=""3.5.0"", -+ org.eclipse.search, -+ org.eclipse.help;bundle-version=""3.5.0"", -+ org.eclipse.help.ui;bundle-version=""3.5.0"", -+ org.eclipse.help.appserver;bundle-version=""3.1.400"" - Bundle-ActivationPolicy: lazy - Bundle-RequiredExecutionEnvironment: J2SE-1.5 - Bundle-Vendor: %plugin.provider -@@ -36,9 +41,10 @@ Export-Package: net.sourceforge.pmd, - net.sourceforge.pmd.eclipse.runtime.properties, - net.sourceforge.pmd.eclipse.runtime.writer, - net.sourceforge.pmd.eclipse.ui, -+ net.sourceforge.pmd.eclipse.ui.actions, - net.sourceforge.pmd.eclipse.ui.model, -- net.sourceforge.pmd.eclipse.ui.nls, - net.sourceforge.pmd.eclipse.ui.preferences.br, - net.sourceforge.pmd.eclipse.ui.views.actions, - net.sourceforge.pmd.util, -+ org.apache.log4j, - rulesets -diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/build.properties b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/build.properties -index 83143a2c913..87f3ea1111c 100644 ---- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/build.properties -+++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/build.properties -@@ -52,7 +52,8 @@ src.includes = icons/,\ - about.ini,\ - toc.xml,\ - welcome.xml,\ -- schema/ -+ schema/,\ -+ src/ - jars.compile.order = pmd-plugin.jar - source.pmd-plugin.jar = src/ - output.pmd-plugin.jar = bin/ -diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/markerP1.png b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/markerP1.png -new file mode 100755 -index 00000000000..0703aa63f7b -Binary files /dev/null and b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/markerP1.png differ -diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/markerP2.png b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/markerP2.png -new file mode 100755 -index 00000000000..d98b9346871 -Binary files /dev/null and b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/markerP2.png differ -diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/markerP3.png b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/markerP3.png -new file mode 100755 -index 00000000000..7452a0babd2 -Binary files /dev/null and b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/markerP3.png differ -diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/markerP4.png b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/markerP4.png -new file mode 100755 -index 00000000000..f9bdb9d02df -Binary files /dev/null and b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/markerP4.png differ -diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/markerP5.png b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/markerP5.png -new file mode 100755 -index 00000000000..b7a811bfb67 -Binary files /dev/null and b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/markerP5.png differ -diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/messages.properties b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/messages.properties -index 1414e7475fb..ec6a6ba1823 100644 ---- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/messages.properties -+++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/messages.properties -@@ -19,6 +19,7 @@ preference.pmd.label.addcomment = Additional text to be appended to review comme - preference.pmd.label.sample = Sample : - preference.pmd.tooltip.addcomment = Use MessageFormat substitution rules. {0} is the user name, {1} is the current date. - preference.pmd.message.incorrect_format = Incorrect message format -+preference.pmd.group.priorities = Priority levels - preference.pmd.group.review = Violations review parameters - preference.pmd.group.general = General options - preference.pmd.label.perspective_on_check = Show PMD perspective when checking code -@@ -151,14 +152,16 @@ preference.cpd.title = CPD Configuration Options - preference.cpd.tilesize = Minimum Tile Size - - # View labels -+ - view.outline.default_text = A violation outline is not available - view.outline.column_message = Error Message - view.outline.column_line = Line - view.overview.column_element = Element - view.overview.column_vio_total = # Violations --view.overview.column_vio_loc = # Violations/LOC -+view.overview.column_vio_loc = # Violations/KLOC - view.overview.column_vio_method = # Violations/Method - view.overview.column_project = Project -+ - view.dataflow.default_text = A dataflow graph is not available - view.dataflow.choose_method = Choose a method: - view.dataflow.graph.column_line = Line -@@ -174,6 +177,9 @@ view.dataflow.table.column_type.tooltip = Specifies the type of the anomaly, tha - view.dataflow.table.column_line = Line(s) - view.dataflow.table.column_variable = Variable - view.dataflow.table.column_method = Method -+ -+view.ast.default_text = An abstract syntax tree is not available -+ - view.column.message = Message - view.column.rule = Rule - view.column.class = Class -@@ -309,3 +315,10 @@ priority.error = Error - priority.warning_high = Warning high - priority.warning = Warning - priority.information = Information -+ -+priority.column.name = Name -+priority.column.value = Value -+priority.column.size = Size -+priority.column.shape = Shape -+priority.column.color = Color -+priority.column.description = Description -\ No newline at end of file -diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/nl/fr/messages.properties b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/nl/fr/messages.properties -index e7e8ee67b9d..9936701e989 100644 ---- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/nl/fr/messages.properties -+++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/nl/fr/messages.properties -@@ -101,7 +101,7 @@ view.outline.column_message = Message - view.outline.column_line = Ligne - view.overview.column_element = Elément - view.overview.column_vio_total = # Violations --view.overview.column_vio_loc = # Violations/LDC -+view.overview.column_vio_loc = # Violations/KLDC - view.overview.column_vio_method = # Violations/Méthode - view.overview.column_project = Projet - view.dataflow.default_text = Aucun graphe de flot de données n'est disponible -diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/plugin.xml b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/plugin.xml -index db789430ed6..e7da19c4309 100644 ---- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/plugin.xml -+++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/plugin.xml -@@ -12,6 +12,7 @@ - - + if (scheme != null) { +- template = new UriTemplate(scheme, UriComponent.SCHEME); +- uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariables)); ++ template = new UriComponentTemplate(scheme, UriComponent.SCHEME, encodeUriVariableValues); ++ uriBuilder.append(template.expandAsString(uriVariables)); + uriBuilder.append(':'); + } -+ +@@ -229,14 +229,14 @@ private URI buildFromMap(boolean encodeUriVariableValues, Map uriVari + uriBuilder.append(""//""); + + if (StringUtils.hasLength(userInfo)) { +- template = new UriTemplate(userInfo, UriComponent.USER_INFO); +- uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariables)); ++ template = new UriComponentTemplate(userInfo, UriComponent.USER_INFO, encodeUriVariableValues); ++ uriBuilder.append(template.expandAsString(uriVariables)); + uriBuilder.append('@'); + } + + if (host != null) { +- template = new UriTemplate(host, UriComponent.HOST); +- uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariables)); ++ template = new UriComponentTemplate(host, UriComponent.HOST, encodeUriVariableValues); ++ uriBuilder.append(template.expandAsString(uriVariables)); + } + + if (port != -1) { +@@ -256,20 +256,20 @@ private URI buildFromMap(boolean encodeUriVariableValues, Map uriVari + else if (endsWithSlash && startsWithSlash) { + pathSegment = pathSegment.substring(1); + } +- template = new UriTemplate(pathSegment, UriComponent.PATH_SEGMENT); +- uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariables)); ++ template = new UriComponentTemplate(pathSegment, UriComponent.PATH_SEGMENT, encodeUriVariableValues); ++ uriBuilder.append(template.expandAsString(uriVariables)); + } + } + if (queryBuilder.length() > 0) { + uriBuilder.append('?'); +- template = new UriTemplate(queryBuilder.toString(), UriComponent.QUERY); +- uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariables)); ++ template = new UriComponentTemplate(queryBuilder.toString(), UriComponent.QUERY, encodeUriVariableValues); ++ uriBuilder.append(template.expandAsString(uriVariables)); + } + + if (StringUtils.hasLength(fragment)) { + uriBuilder.append('#'); +- template = new UriTemplate(fragment, UriComponent.FRAGMENT); +- uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariables)); ++ template = new UriComponentTemplate(fragment, UriComponent.FRAGMENT, encodeUriVariableValues); ++ uriBuilder.append(template.expandAsString(uriVariables)); + } + + return URI.create(uriBuilder.toString()); +@@ -308,8 +308,8 @@ private URI buildFromVarArg(boolean encodeUriVariableValues, Object... uriVariab + UriTemplate template; + + if (scheme != null) { +- template = new UriTemplate(scheme, UriComponent.SCHEME); +- uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariableValues)); ++ template = new UriComponentTemplate(scheme, UriComponent.SCHEME, encodeUriVariableValues); ++ uriBuilder.append(template.expandAsString(uriVariableValues)); + uriBuilder.append(':'); + } + +@@ -317,14 +317,14 @@ private URI buildFromVarArg(boolean encodeUriVariableValues, Object... uriVariab + uriBuilder.append(""//""); + + if (StringUtils.hasLength(userInfo)) { +- template = new UriTemplate(userInfo, UriComponent.USER_INFO); +- uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariableValues)); ++ template = new UriComponentTemplate(userInfo, UriComponent.USER_INFO, encodeUriVariableValues); ++ uriBuilder.append(template.expandAsString(uriVariableValues)); + uriBuilder.append('@'); + } + + if (host != null) { +- template = new UriTemplate(host, UriComponent.HOST); +- uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariableValues)); ++ template = new UriComponentTemplate(host, UriComponent.HOST, encodeUriVariableValues); ++ uriBuilder.append(template.expandAsString(uriVariableValues)); + } + + if (port != -1) { +@@ -344,21 +344,21 @@ private URI buildFromVarArg(boolean encodeUriVariableValues, Object... uriVariab + else if (endsWithSlash && startsWithSlash) { + pathSegment = pathSegment.substring(1); + } +- template = new UriTemplate(pathSegment, UriComponent.PATH_SEGMENT); +- uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariableValues)); ++ template = new UriComponentTemplate(pathSegment, UriComponent.PATH_SEGMENT, encodeUriVariableValues); ++ uriBuilder.append(template.expandAsString(uriVariableValues)); + } + } + + if (queryBuilder.length() > 0) { + uriBuilder.append('?'); +- template = new UriTemplate(queryBuilder.toString(), UriComponent.QUERY); +- uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariableValues)); ++ template = new UriComponentTemplate(queryBuilder.toString(), UriComponent.QUERY, encodeUriVariableValues); ++ uriBuilder.append(template.expandAsString(uriVariableValues)); + } + + if (StringUtils.hasLength(fragment)) { + uriBuilder.append('#'); +- template = new UriTemplate(fragment, UriComponent.FRAGMENT); +- uriBuilder.append(template.expandAsString(encodeUriVariableValues, uriVariableValues)); ++ template = new UriComponentTemplate(fragment, UriComponent.FRAGMENT, encodeUriVariableValues); ++ uriBuilder.append(template.expandAsString(uriVariableValues)); + } + + return URI.create(uriBuilder.toString()); +diff --git a/org.springframework.web/src/main/java/org/springframework/web/util/UriComponentTemplate.java b/org.springframework.web/src/main/java/org/springframework/web/util/UriComponentTemplate.java +new file mode 100644 +index 000000000000..d354ca2dcc3f +--- /dev/null ++++ b/org.springframework.web/src/main/java/org/springframework/web/util/UriComponentTemplate.java +@@ -0,0 +1,46 @@ ++/* ++ * Copyright 2002-2011 the original author or authors. ++ * ++ * Licensed under the Apache License, Version 2.0 (the ""License""); ++ * you may not use this file except in compliance with the License. ++ * You may obtain a copy of the License at ++ * ++ * http://www.apache.org/licenses/LICENSE-2.0 ++ * ++ * Unless required by applicable law or agreed to in writing, software ++ * distributed under the License is distributed on an ""AS IS"" BASIS, ++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++ * See the License for the specific language governing permissions and ++ * limitations under the License. ++ */ + -+ -+ -+ -+ -+ ++package org.springframework.web.util; + -+ -+ -+ -+ -+ ++import org.springframework.util.Assert; + -+ -+ -+ -+ -+ ++/** ++ * Subclass of {@link UriTemplate} that operates on URI components, rather than full URIs. ++ * ++ * @author Arjen Poutsma ++ * @since 3.1 ++ */ ++class UriComponentTemplate extends UriTemplate { + -+ -+ -+ -+ -+ ++ private final UriComponent uriComponent; + -+ -+ -+ -+ -+ ++ private boolean encodeUriVariableValues; ++ ++ UriComponentTemplate(String uriTemplate, UriComponent uriComponent, boolean encodeUriVariableValues) { ++ super(uriTemplate); ++ Assert.notNull(uriComponent, ""'uriComponent' must not be null""); ++ this.uriComponent = uriComponent; ++ this.encodeUriVariableValues = encodeUriVariableValues; ++ } + -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ ++ @Override ++ protected String getVariableValueAsString(Object variableValue) { ++ String variableValueString = super.getVariableValueAsString(variableValue); ++ return encodeUriVariableValues ? UriUtils.encode(variableValueString, uriComponent, false) : ++ variableValueString; ++ } ++} +diff --git a/org.springframework.web/src/main/java/org/springframework/web/util/UriTemplate.java b/org.springframework.web/src/main/java/org/springframework/web/util/UriTemplate.java +index 51ebb224600e..6026020b9761 100644 +--- a/org.springframework.web/src/main/java/org/springframework/web/util/UriTemplate.java ++++ b/org.springframework.web/src/main/java/org/springframework/web/util/UriTemplate.java +@@ -56,8 +56,6 @@ public class UriTemplate implements Serializable { - -+ -+ -+ -+ - + private final String uriTemplate; - - - -+ -+ -+ -+ The markers used by PMD to flag projects and files with violations. -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ +- private final UriComponent uriComponent; +- - -diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/plugin/PMDPlugin.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/plugin/PMDPlugin.java -index 7d565a85ac0..071aa3be09b 100644 ---- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/plugin/PMDPlugin.java -+++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/plugin/PMDPlugin.java -@@ -1,8 +1,12 @@ - package net.sourceforge.pmd.eclipse.plugin; + /** + * Construct a new {@link UriTemplate} with the given URI String. +@@ -68,19 +66,6 @@ public UriTemplate(String uriTemplate) { + this.uriTemplate = uriTemplate; + this.variableNames = parser.getVariableNames(); + this.matchPattern = parser.getMatchPattern(); +- this.uriComponent = null; +- } +- +- /** +- * Construct a new {@link UriTemplate} with the given URI String. +- * @param uriTemplate the URI template string +- */ +- public UriTemplate(String uriTemplate, UriComponent uriComponent) { +- Parser parser = new Parser(uriTemplate); +- this.uriTemplate = uriTemplate; +- this.variableNames = parser.getVariableNames(); +- this.matchPattern = parser.getMatchPattern(); +- this.uriComponent = uriComponent; + } -+import java.io.File; - import java.io.IOException; -+import java.net.URL; -+import java.util.ArrayList; - import java.util.Collection; - import java.util.HashMap; -+import java.util.HashSet; - import java.util.Iterator; + /** +@@ -110,7 +95,7 @@ public List getVariableNames() { + * or if it does not contain values for all the variable names + */ + public URI expand(Map uriVariables) { +- return encodeUri(expandAsString(false, uriVariables)); ++ return encodeUri(expandAsString(uriVariables)); + } + + /** +@@ -125,13 +110,13 @@ public URI expand(Map uriVariables) { + * System.out.println(template.expand(uriVariables)); + * + * will print:
http://example.com/hotels/1/bookings/42
+- * @param encodeUriVariableValues indicates whether uri template variables should be encoded or not ++ * + * @param uriVariables the map of URI variables + * @return the expanded URI + * @throws IllegalArgumentException if uriVariables is null; + * or if it does not contain values for all the variable names + */ +- public String expandAsString(boolean encodeUriVariableValues, Map uriVariables) { ++ public String expandAsString(Map uriVariables) { + Assert.notNull(uriVariables, ""'uriVariables' must not be null""); + Object[] values = new Object[this.variableNames.size()]; + for (int i = 0; i < this.variableNames.size(); i++) { +@@ -141,7 +126,7 @@ public String expandAsString(boolean encodeUriVariableValues, Map uri + } + values[i] = uriVariables.get(name); + } +- return expandAsString(encodeUriVariableValues, values); ++ return expandAsString(values); + } - import net.sourceforge.pmd.RuleSet; -@@ -23,6 +27,7 @@ - import net.sourceforge.pmd.eclipse.runtime.writer.IAstWriter; - import net.sourceforge.pmd.eclipse.runtime.writer.IRuleSetWriter; - import net.sourceforge.pmd.eclipse.runtime.writer.impl.WriterFactoryImpl; -+import net.sourceforge.pmd.eclipse.ui.RuleLabelDecorator; - import net.sourceforge.pmd.eclipse.ui.nls.StringKeys; - import net.sourceforge.pmd.eclipse.ui.nls.StringTable; + /** +@@ -159,7 +144,7 @@ public String expandAsString(boolean encodeUriVariableValues, Map uri + * or if it does not contain sufficient variables + */ + public URI expand(Object... uriVariableValues) { +- return encodeUri(expandAsString(false, uriVariableValues)); ++ return encodeUri(expandAsString(uriVariableValues)); + } -@@ -32,9 +37,14 @@ - import org.apache.log4j.Logger; - import org.apache.log4j.PatternLayout; - import org.apache.log4j.RollingFileAppender; -+import org.eclipse.core.resources.IFile; -+import org.eclipse.core.resources.IFolder; - import org.eclipse.core.resources.IProject; -+import org.eclipse.core.resources.IResource; - import org.eclipse.core.runtime.CoreException; -+import org.eclipse.core.runtime.FileLocator; - import org.eclipse.core.runtime.IStatus; -+import org.eclipse.core.runtime.Platform; - import org.eclipse.core.runtime.Status; - import org.eclipse.jface.dialogs.MessageDialog; - import org.eclipse.jface.resource.ImageDescriptor; -@@ -43,6 +53,7 @@ - import org.eclipse.swt.graphics.Image; - import org.eclipse.swt.graphics.RGB; - import org.eclipse.swt.widgets.Display; -+import org.eclipse.ui.IDecoratorManager; - import org.eclipse.ui.plugin.AbstractUIPlugin; - import org.osgi.framework.Bundle; - import org.osgi.framework.BundleContext; -@@ -52,6 +63,8 @@ - */ - public class PMDPlugin extends AbstractUIPlugin { + /** +@@ -171,13 +156,13 @@ public URI expand(Object... uriVariableValues) { + * System.out.println(template.expand(""1"", ""42)); + * + * will print:
http://example.com/hotels/1/bookings/42
+- * @param encodeVariableValues indicates whether uri template variables should be encoded or not ++ * + * @param uriVariableValues the array of URI variables + * @return the expanded URI + * @throws IllegalArgumentException if uriVariables is null + * or if it does not contain sufficient variables + */ +- public String expandAsString(boolean encodeVariableValues, Object... uriVariableValues) { ++ public String expandAsString(Object... uriVariableValues) { + Assert.notNull(uriVariableValues, ""'uriVariableValues' must not be null""); + if (uriVariableValues.length < this.variableNames.size()) { + throw new IllegalArgumentException( +@@ -188,18 +173,27 @@ public String expandAsString(boolean encodeVariableValues, Object... uriVariable + StringBuffer uriBuffer = new StringBuffer(); + int i = 0; + while (matcher.find()) { +- Object uriVariable = uriVariableValues[i++]; +- String uriVariableString = uriVariable != null ? uriVariable.toString() : """"; +- if (encodeVariableValues && uriComponent != null) { +- uriVariableString = UriUtils.encode(uriVariableString, uriComponent, false); +- } +- String replacement = Matcher.quoteReplacement(uriVariableString); ++ Object uriVariableValue = uriVariableValues[i++]; ++ String uriVariableValueString = getVariableValueAsString(uriVariableValue); ++ String replacement = Matcher.quoteReplacement(uriVariableValueString); + matcher.appendReplacement(uriBuffer, replacement); + } + matcher.appendTail(uriBuffer); + return uriBuffer.toString(); + } -+ private static File pluginFolder; ++ /** ++ * Template method that returns the string representation of the given URI template value. ++ * ++ *

Defaults implementation simply calls {@link Object#toString()}, or returns an empty string for {@code null}. ++ * ++ * @param variableValue the URI template variable value ++ * @return the variable value as string ++ */ ++ protected String getVariableValueAsString(Object variableValue) { ++ return variableValue != null ? variableValue.toString() : """"; ++ } ++ + /** + * Indicate whether the given URI matches this template. + * @param uri the URI to match to" +f3554235b3abb5b5820b57f932398af5a8846b32,Delta Spike,"adding release notes draft for upcoming deltaspike-0.5 release +",p,https://github.com/apache/deltaspike,"diff --git a/deltaspike/readme/ReleaseNotes-0.5.txt b/deltaspike/readme/ReleaseNotes-0.5.txt +new file mode 100644 +index 000000000..ea682196b +--- /dev/null ++++ b/deltaspike/readme/ReleaseNotes-0.5.txt +@@ -0,0 +1,30 @@ ++Apache DeltaSpike-0.5 Release Notes ++ ++ ++The following modules and big features got added in the DeltaSpike-0.5 release: ++ ++* DeltaSpike-Data ++ ++The DeltaSpike Data module enhances JPA experience with declarative ++queries, reducing boilerplate to a minimum. DeltaSpike Data repositories ++can derive queries by simple method names, or by method annotations ++defining JPQL, named queries or even plain SQL - beside result pagination ++and sorting. The module also features auditing of entities and a simplified ++alternative to the Criteria API. + - private HashMap coloursByRGB = new HashMap(); - - public static final String PLUGIN_ID = ""net.sourceforge.pmd.eclipse.plugin""; -@@ -93,6 +106,22 @@ public static void disposeAll(Collection colors) { - for (Color color : colors) color.dispose(); - } - -+ public static File getPluginFolder() { + -+ if (pluginFolder == null) { -+ URL url = Platform.getBundle(PLUGIN_ID).getEntry(""/""); -+ try { -+ url = FileLocator.resolve(url); -+ } -+ catch(IOException ex) { -+ ex.printStackTrace(); -+ } -+ pluginFolder = new File(url.getPath()); -+ } ++* DeltaSpike-Servlet + -+ return pluginFolder; -+ } -+ - /* - * (non-Javadoc) - * @see org.eclipse.ui.plugin.AbstractUIPlugin#start(org.osgi.framework.BundleContext) -@@ -369,5 +398,90 @@ private void registerAdditionalRuleSets() { - log(IStatus.ERROR, ""Error when processing RuleSets extensions"", e); - } - } -+ -+ public RuleLabelDecorator ruleLabelDecorator() { -+ IDecoratorManager mgr = getWorkbench().getDecoratorManager(); -+ return (RuleLabelDecorator) mgr.getBaseLabelProvider(""net.sourceforge.pmd.eclipse.plugin.RuleLabelDecorator""); -+ } -+ -+ public void changedFiles(Collection changedFiles) { -+ -+ Collection withParents = new HashSet(changedFiles.size() * 2); -+ withParents.addAll(changedFiles); -+ for (IFile file : changedFiles) { -+ IResource parent = file.getParent(); -+ while (parent != null) { -+ withParents.add(parent); -+ parent = parent.getParent(); -+ } -+ } -+ -+ changed( withParents ); -+ } -+ -+ public void changed(Collection changedResources) { -+ ruleLabelDecorator().changed(changedResources); -+ } -+ -+ private void addFilesTo(IResource resource, Collection allKids) { -+ -+ if (resource instanceof IFile) { -+ allKids.add(resource); -+ return; -+ } -+ -+ if (resource instanceof IFolder) { -+ IFolder folder = (IFolder)resource; -+ IResource[] kids = null; -+ try { -+ kids = folder.members(); -+ } catch (CoreException e) { -+ e.printStackTrace(); -+ } -+ for (IResource irc : kids) { -+ if (irc instanceof IFile) { -+ allKids.add(irc); -+ continue; -+ } -+ if (irc instanceof IFolder) { -+ addFilesTo(irc, allKids); -+ } -+ } -+ -+ allKids.add(folder); -+ return; -+ } -+ -+ if (resource instanceof IProject) { -+ IProject project = (IProject)resource; -+ IResource[] kids = null; -+ try { -+ kids = project.members(); -+ } catch (CoreException e) { -+ e.printStackTrace(); -+ } -+ for (IResource irc : kids) { -+ if (irc instanceof IFile) { -+ allKids.add(irc); -+ continue; -+ } -+ if (irc instanceof IFolder) { -+ addFilesTo(irc, allKids); -+ } -+ } -+ allKids.add(project); -+ return; -+ } -+ } -+ -+ public void removedMarkersIn(IResource resource) { -+ -+ Collection changes = new ArrayList(); -+ -+ addFilesTo(resource, changes); -+ -+ ruleLabelDecorator().changed(changes); -+ } ++The DeltaSpike Servlet module provides integration with the Java Servlet ++API. It adds support for injection of common servlet objects ++and propagates servlet events to the CDI event bus. + - } - -diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/plugin/PriorityDescriptor.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/plugin/PriorityDescriptor.java -index 0628bb0920a..a84200e2a29 100755 ---- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/plugin/PriorityDescriptor.java -+++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/plugin/PriorityDescriptor.java -@@ -1,9 +1,13 @@ - package net.sourceforge.pmd.eclipse.plugin; - -+import java.util.EnumSet; -+ - import net.sourceforge.pmd.RulePriority; --import net.sourceforge.pmd.eclipse.ui.preferences.panelmanagers.ShapeDescriptor; -+import net.sourceforge.pmd.eclipse.ui.Shape; -+import net.sourceforge.pmd.eclipse.ui.ShapeDescriptor; -+import net.sourceforge.pmd.eclipse.ui.ShapePainter; - import net.sourceforge.pmd.eclipse.ui.views.actions.AbstractPMDAction; --import net.sourceforge.pmd.eclipse.util.Util; -+import net.sourceforge.pmd.util.StringUtil; - - import org.eclipse.jface.resource.ImageDescriptor; - import org.eclipse.swt.graphics.Image; -@@ -14,35 +18,138 @@ - * - * @author Brian Remedios - */ --public class PriorityDescriptor { -+public class PriorityDescriptor implements Cloneable { - - public final RulePriority priority; - public String label; -+ public String description; - public String filterText; - public String iconId; - public ShapeDescriptor shape; - -- private static final RGB ProtoTransparentColour = new RGB(1,1,1); // almost black -+ private static final RGB ProtoTransparentColour = new RGB(1,1,1); // almost full black, unlikely to be used -+ -+ private static final char DELIMITER = '_'; -+ -+ public static PriorityDescriptor from(String text) { -+ -+ String[] values = text.split(Character.toString(DELIMITER)); -+ if (values.length != 7) return null; -+ -+ RGB rgb = rgbFrom(values[5]); -+ if (rgb == null) return null; -+ -+ return new PriorityDescriptor( -+ RulePriority.valueOf(Integer.parseInt(values[0])), -+ values[1], -+ values[2], -+ values[3], -+ shapeFrom(values[4]), -+ rgb, -+ Integer.parseInt(values[6]) -+ ); -+ } -+ -+ private static Shape shapeFrom(String id) { -+ int num = Integer.parseInt(id); -+ for (Shape shape : EnumSet.allOf(Shape.class)) { -+ if (shape.id == num) return shape; -+ } -+ return null; -+ } -+ -+ private static RGB rgbFrom(String desc) { -+ String[] clrs = desc.split("",""); -+ if (clrs.length != 3) return null; -+ return new RGB( -+ Integer.parseInt(clrs[0]), -+ Integer.parseInt(clrs[1]), -+ Integer.parseInt(clrs[2]) -+ ); -+ } -+ -+ private static void rgbOn(StringBuilder sb, RGB rgb) { -+ sb.append(rgb.red).append(','); -+ sb.append(rgb.green).append(','); -+ sb.append(rgb.blue); -+ } - - public PriorityDescriptor(RulePriority thePriority, String theLabelKey, String theFilterTextKey, String theIconId, ShapeDescriptor theShape) { - priority = thePriority; - label = AbstractPMDAction.getString(theLabelKey); -+ description = ""--""; // TODO - filterText = AbstractPMDAction.getString(theFilterTextKey); - iconId = theIconId; - shape = theShape; - } - -- public PriorityDescriptor(RulePriority thePriority, String theLabelKey, String theFilterTextKey, String theIconId, Util.shape theShape, RGB theColor, int theSize) { -+ public PriorityDescriptor(RulePriority thePriority, String theLabelKey, String theFilterTextKey, String theIconId, Shape theShape, RGB theColor, int theSize) { - this(thePriority, theLabelKey, theFilterTextKey, theIconId, new ShapeDescriptor(theShape, theColor, theSize)); - } - -+ private PriorityDescriptor(RulePriority thePriority) { -+ priority = thePriority; -+ } + -+ public String storeString() { -+ StringBuilder sb = new StringBuilder(); -+ storeOn(sb); -+ return sb.toString(); -+ } -+ -+ public boolean equals(Object other) { -+ -+ if (this == other) return true; -+ if (other.getClass() != getClass()) return false; -+ -+ PriorityDescriptor otherOne = (PriorityDescriptor)other; -+ -+ return priority.equals(otherOne.priority) && -+ StringUtil.isSame(label, otherOne.label, false, false, false) && -+ shape.equals(otherOne.shape) && -+ StringUtil.isSame(description, otherOne.description, false, false, false) && -+ StringUtil.isSame(filterText, otherOne.filterText, false, false, false) && -+ StringUtil.isSame(iconId, otherOne.iconId, false, false, false); -+ } -+ -+ public int hashCode() { -+ return -+ priority.hashCode() ^ shape.hashCode() ^ -+ String.valueOf(label).hashCode() ^ -+ String.valueOf(description).hashCode() ^ -+ String.valueOf(iconId).hashCode(); -+ } -+ -+ public void storeOn(StringBuilder sb) { -+ sb.append(priority.getPriority()).append(DELIMITER); -+ sb.append(label).append(DELIMITER); -+// sb.append(description).append(DELIMITER); -+ sb.append(filterText).append(DELIMITER); -+ sb.append(iconId).append(DELIMITER); -+ sb.append(shape.shape.id).append(DELIMITER); -+ rgbOn(sb, shape.rgbColor); sb.append(DELIMITER); -+ sb.append(shape.size).append(DELIMITER); -+ } -+ - public ImageDescriptor getImageDescriptor() { - return PMDPlugin.getImageDescriptor(iconId); - } - -+ public PriorityDescriptor clone() { -+ -+ PriorityDescriptor copy = new PriorityDescriptor(priority); -+ copy.label = label; -+ copy.description = description; -+ copy.filterText = filterText; -+ copy.iconId = iconId; -+ copy.shape = shape.clone(); -+ -+ return copy; -+ } -+ - public Image getImage(Display display) { - -- return Util.newDrawnImage( -+ return ShapePainter.newDrawnImage( - display, - shape.size, - shape.size, -@@ -51,4 +158,29 @@ public Image getImage(Display display) { - shape.rgbColor //fillColour - ); - } -+ -+ public Image getImage(Display display, int maxDimension) { -+ -+ return ShapePainter.newDrawnImage( -+ display, -+ Math.min(shape.size, maxDimension), -+ Math.min(shape.size, maxDimension), -+ shape.shape, -+ ProtoTransparentColour, -+ shape.rgbColor //fillColour -+ ); -+ } -+ -+ public String toString() { -+ -+ StringBuilder sb = new StringBuilder(); -+ sb.append(""RuleDescriptor: ""); -+ sb.append(priority).append("", ""); -+ sb.append(label).append("", ""); -+ sb.append(description).append("", ""); -+ sb.append(filterText).append("", ""); -+ sb.append(iconId).append("", ""); -+ sb.append(shape); -+ return sb.toString(); -+ } - } -diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/plugin/UISettings.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/plugin/UISettings.java -index 4bf1bf9c715..b50f339b03a 100755 ---- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/plugin/UISettings.java -+++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/plugin/UISettings.java -@@ -1,20 +1,31 @@ - package net.sourceforge.pmd.eclipse.plugin; - -+import java.net.MalformedURLException; -+import java.net.URL; ++* DeltaSpike-BeanValidation ++ ++The main feature of the Bean Validation module is to provide ++CDI integration in to ConstraintValidators. This allows you to ++inject CDI objects, EJBs etc in to your validators. ++ ++ ++" +cd2f14637bcecec0081104d749cd1bf10f28b07d,ReactiveX-RxJava,Fixed issue -799 - Added break to possibly-infinite- loop in CompositeException.attachCallingThreadStack--,c,https://github.com/ReactiveX/RxJava,"diff --git a/rxjava-core/src/main/java/rx/util/CompositeException.java b/rxjava-core/src/main/java/rx/util/CompositeException.java +index bca5dcfbf7..439b9400b2 100644 +--- a/rxjava-core/src/main/java/rx/util/CompositeException.java ++++ b/rxjava-core/src/main/java/rx/util/CompositeException.java +@@ -18,7 +18,9 @@ import java.util.ArrayList; - import java.util.Arrays; - import java.util.Comparator; -+import java.util.EnumSet; - import java.util.HashMap; + import java.util.Collection; + import java.util.Collections; ++import java.util.HashSet; import java.util.List; - import java.util.Map; +import java.util.Set; - import net.sourceforge.pmd.RulePriority; --import net.sourceforge.pmd.eclipse.ui.PMDUiConstants; -+import net.sourceforge.pmd.eclipse.runtime.preferences.IPreferences; -+import net.sourceforge.pmd.eclipse.ui.Shape; -+import net.sourceforge.pmd.eclipse.ui.ShapeDescriptor; - import net.sourceforge.pmd.eclipse.ui.nls.StringKeys; - import net.sourceforge.pmd.eclipse.ui.nls.StringTable; --import net.sourceforge.pmd.eclipse.ui.preferences.panelmanagers.ShapeDescriptor; --import net.sourceforge.pmd.eclipse.util.Util; -+import net.sourceforge.pmd.eclipse.ui.preferences.br.PriorityDescriptorCache; + /** + * Exception that is a composite of 1 or more other exceptions. +@@ -84,9 +86,16 @@ private static String getStackTraceAsString(StackTraceElement[] stack) { + return s.toString(); + } -+import org.eclipse.jface.resource.ImageDescriptor; -+import org.eclipse.swt.SWT; -+import org.eclipse.swt.graphics.Image; -+import org.eclipse.swt.graphics.ImageData; -+import org.eclipse.swt.graphics.ImageLoader; - import org.eclipse.swt.graphics.RGB; -+import org.eclipse.swt.widgets.Display; +- private static void attachCallingThreadStack(Throwable e, Throwable cause) { ++ /* package-private */ static void attachCallingThreadStack(Throwable e, Throwable cause) { ++ Set seenCauses = new HashSet(); ++ + while (e.getCause() != null) { + e = e.getCause(); ++ if (seenCauses.contains(e.getCause())) { ++ break; ++ } else { ++ seenCauses.add(e.getCause()); ++ } + } + // we now have 'e' as the last in the chain + try { +@@ -98,12 +107,13 @@ private static void attachCallingThreadStack(Throwable e, Throwable cause) { + } + } - /** - * -@@ -27,19 +38,35 @@ public class UISettings { - private static Map shapesByPriority; - private static Map prioritiesByIntValue; - -+ private static final int MAX_MARKER_DIMENSION = 9; -+ - private static final Map uiDescriptorsByPriority = new HashMap(5); +- private final static class CompositeExceptionCausalChain extends RuntimeException { ++ /* package-private */ final static class CompositeExceptionCausalChain extends RuntimeException { + private static final long serialVersionUID = 3875212506787802066L; ++ /* package-private */ static String MESSAGE = ""Chain of Causes for CompositeException In Order Received =>""; + + @Override + public String getMessage() { +- return ""Chain of Causes for CompositeException In Order Received =>""; ++ return MESSAGE; + } + } + +diff --git a/rxjava-core/src/test/java/rx/util/CompositeExceptionTest.java b/rxjava-core/src/test/java/rx/util/CompositeExceptionTest.java +new file mode 100644 +index 0000000000..0e80cf0309 +--- /dev/null ++++ b/rxjava-core/src/test/java/rx/util/CompositeExceptionTest.java +@@ -0,0 +1,70 @@ ++/** ++ * Copyright 2013 Netflix, Inc. ++ * ++ * Licensed under the Apache License, Version 2.0 (the ""License""); ++ * you may not use this file except in compliance with the License. ++ * You may obtain a copy of the License at ++ * ++ * http://www.apache.org/licenses/LICENSE-2.0 ++ * ++ * Unless required by applicable law or agreed to in writing, software ++ * distributed under the License is distributed on an ""AS IS"" BASIS, ++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++ * See the License for the specific language governing permissions and ++ * limitations under the License. ++ */ ++package rx.util; + ++import static org.junit.Assert.*; + -+ public static void reloadPriorities() { -+ uiDescriptorsByPriority.clear(); -+ uiDescriptorsByPriority(); // cause a reload -+ } - -- static { -- uiDescriptorsByPriority.put(RulePriority.LOW, new PriorityDescriptor(RulePriority.LOW, StringKeys.MSGKEY_VIEW_FILTER_PRIORITY_1, StringKeys.MSGKEY_VIEW_TOOLTIP_FILTER_PRIORITY_1, PMDUiConstants.ICON_BUTTON_PRIO1, Util.shape.triangleSouthEast, new RGB( 0,0,255), 13) ); // blue -- uiDescriptorsByPriority.put(RulePriority.MEDIUM_LOW, new PriorityDescriptor(RulePriority.MEDIUM_LOW, StringKeys.MSGKEY_VIEW_FILTER_PRIORITY_2, StringKeys.MSGKEY_VIEW_TOOLTIP_FILTER_PRIORITY_2, PMDUiConstants.ICON_BUTTON_PRIO2, Util.shape.triangleDown, new RGB( 0,255,0), 13) ); // green -- uiDescriptorsByPriority.put(RulePriority.MEDIUM, new PriorityDescriptor(RulePriority.MEDIUM, StringKeys.MSGKEY_VIEW_FILTER_PRIORITY_3, StringKeys.MSGKEY_VIEW_TOOLTIP_FILTER_PRIORITY_3, PMDUiConstants.ICON_BUTTON_PRIO3, Util.shape.triangleUp, new RGB( 255,255,0), 13) ); // yellow -- uiDescriptorsByPriority.put(RulePriority.MEDIUM_HIGH, new PriorityDescriptor(RulePriority.MEDIUM_HIGH,StringKeys.MSGKEY_VIEW_FILTER_PRIORITY_4, StringKeys.MSGKEY_VIEW_TOOLTIP_FILTER_PRIORITY_4, PMDUiConstants.ICON_BUTTON_PRIO4, Util.shape.triangleNorthEast, new RGB( 255,0,255), 13) ); // purple -- uiDescriptorsByPriority.put(RulePriority.HIGH, new PriorityDescriptor(RulePriority.HIGH, StringKeys.MSGKEY_VIEW_FILTER_PRIORITY_5, StringKeys.MSGKEY_VIEW_TOOLTIP_FILTER_PRIORITY_5, PMDUiConstants.ICON_BUTTON_PRIO5, Util.shape.diamond, new RGB( 255,0,0), 13) ); // red -+ private static Map uiDescriptorsByPriority() { -+ -+ if (uiDescriptorsByPriority.isEmpty()) { -+ IPreferences preferences = PMDPlugin.getDefault().getPreferencesManager().loadPreferences(); -+ for (RulePriority rp : currentPriorities(true)) { -+ uiDescriptorsByPriority.put(rp, preferences.getPriorityDescriptor(rp)); -+ } -+ } -+ -+ return uiDescriptorsByPriority; -+ } -+ -+ public static Shape[] allShapes() { -+ return new Shape[] { Shape.circle, Shape.star, Shape.domeLeft, Shape.domeRight, Shape.diamond, Shape.square, Shape.roundedRect, Shape.minus, Shape.pipe, Shape.plus, Shape.triangleUp, Shape.triangleDown, Shape.triangleRight, Shape.triangleLeft, Shape.triangleNorthEast, Shape.triangleSouthEast, Shape.triangleSouthWest, Shape.triangleNorthWest }; - } - - public static RulePriority[] currentPriorities(boolean sortAscending) { - -- RulePriority[] priorities = uiDescriptorsByPriority.keySet().toArray(new RulePriority[uiDescriptorsByPriority.size()]); -+ RulePriority[] priorities = RulePriority.values(); - - Arrays.sort(priorities, new Comparator() { - public int compare(RulePriority rpA, RulePriority rbB) { -@@ -47,18 +74,89 @@ public int compare(RulePriority rpA, RulePriority rbB) { - } - }); - return priorities; -- } -+ } -+ -+ public static Map shapeSet(RGB color, int size) { ++import org.junit.Test; ++ ++import java.util.ArrayList; ++import java.util.List; + -+ Map shapes = new HashMap(); ++public class CompositeExceptionTest { + -+ for(Shape shape : EnumSet.allOf(Shape.class)) { -+ shapes.put(shape, new ShapeDescriptor(shape, color, size)); -+ } ++ private final Throwable ex1 = new Throwable(""Ex1""); ++ private final Throwable ex2 = new Throwable(""Ex2"", ex1); ++ private final Throwable ex3 = new Throwable(""Ex3"", ex2); + -+ return shapes; -+ } -+ -+ public static String markerFilenameFor(RulePriority priority) { -+ String fileDir = PMDPlugin.getPluginFolder().getAbsolutePath(); -+ return fileDir + ""/"" + relativeMarkerFilenameFor(priority); -+ } -+ -+ public static String relativeMarkerFilenameFor(RulePriority priority) { -+ return ""icons/markerP"" + priority.getPriority() + "".png""; ++ private final CompositeException compositeEx; ++ ++ public CompositeExceptionTest() { ++ List throwables = new ArrayList(); ++ throwables.add(ex1); ++ throwables.add(ex2); ++ throwables.add(ex3); ++ compositeEx = new CompositeException(throwables); + } -+ -+ private static ImageDescriptor getImageDescriptor(final String fileName) { -+ -+ URL installURL = PMDPlugin.getDefault().getBundle().getEntry(""/""); -+ try { -+ URL url = new URL(installURL, fileName); -+ return ImageDescriptor.createFromURL(url); -+ } -+ catch (MalformedURLException mue) { -+ mue.printStackTrace(); -+ return null; -+ } ++ ++ @Test ++ public void testAttachCallingThreadStackParentThenChild() { ++ CompositeException.attachCallingThreadStack(ex1, ex2); ++ assertEquals(""Ex2"", ex1.getCause().getMessage()); + } -+ -+ public static ImageDescriptor markerDescriptorFor(RulePriority priority) { -+ String path = relativeMarkerFilenameFor(priority); -+ return getImageDescriptor(path); ++ ++ @Test ++ public void testAttachCallingThreadStackChildThenParent() { ++ CompositeException.attachCallingThreadStack(ex2, ex1); ++ assertEquals(""Ex1"", ex2.getCause().getMessage()); + } -+ -+ public static Map markerImgDescriptorsByPriority() { -+ -+ RulePriority[] priorities = currentPriorities(true); -+ Map overlaysByPriority = new HashMap(priorities.length); -+ for (RulePriority priority : priorities) { -+ overlaysByPriority.put( -+ priority.getPriority(), -+ markerDescriptorFor(priority) -+ ); -+ } -+ return overlaysByPriority; ++ ++ @Test ++ public void testAttachCallingThreadStackAddComposite() { ++ CompositeException.attachCallingThreadStack(ex1, compositeEx); ++ assertEquals(""Ex2"", ex1.getCause().getMessage()); + } -+ -+ public static void createRuleMarkerIcons(Display display) { -+ -+ ImageLoader loader = new ImageLoader(); -+ -+ PriorityDescriptorCache pdc = PriorityDescriptorCache.instance; -+ -+ for (RulePriority priority : currentPriorities(true)) { -+ Image image = pdc.descriptorFor(priority).getImage(display, MAX_MARKER_DIMENSION); -+ loader.data = new ImageData[] { image.getImageData() }; -+ String fullPath = markerFilenameFor( priority ); -+ loader.save(fullPath, SWT.IMAGE_PNG); -+ -+ image.dispose(); -+ } ++ ++ @Test ++ public void testAttachCallingThreadStackAddToComposite() { ++ CompositeException.attachCallingThreadStack(compositeEx, ex1); ++ assertEquals(CompositeException.CompositeExceptionCausalChain.MESSAGE, compositeEx.getCause().getMessage()); + } -+ -+ public static String descriptionFor(RulePriority priority) { -+ return descriptorFor(priority).description; ++ ++ @Test ++ public void testAttachCallingThreadStackAddCompositeToItself() { ++ CompositeException.attachCallingThreadStack(compositeEx, compositeEx); ++ assertEquals(CompositeException.CompositeExceptionCausalChain.MESSAGE, compositeEx.getCause().getMessage()); + } - - public static PriorityDescriptor descriptorFor(RulePriority priority) { -- return uiDescriptorsByPriority.get(priority); -+ return uiDescriptorsByPriority().get(priority); - } - - public static Map shapesByPriority() { - - if (shapesByPriority != null) return shapesByPriority; - -- Map shapesByPriority = new HashMap(uiDescriptorsByPriority.size()); -- for (Map.Entry entry : uiDescriptorsByPriority.entrySet()) { -+ Map shapesByPriority = new HashMap(uiDescriptorsByPriority().size()); -+ for (Map.Entry entry : uiDescriptorsByPriority().entrySet()) { - shapesByPriority.put(entry.getKey(), entry.getValue().shape); - } - -@@ -68,8 +166,8 @@ public static Map shapesByPriority() { - public static RulePriority priorityFor(int value) { - - if (prioritiesByIntValue == null) { -- prioritiesByIntValue = new HashMap(uiDescriptorsByPriority.size()); -- for (Map.Entry entry : uiDescriptorsByPriority.entrySet()) { -+ prioritiesByIntValue = new HashMap(uiDescriptorsByPriority().size()); -+ for (Map.Entry entry : uiDescriptorsByPriority().entrySet()) { - prioritiesByIntValue.put(entry.getKey().getPriority(), entry.getKey()); - } - } -@@ -97,8 +195,8 @@ public static String[] getPriorityLabels() { - - public static List getPriorityIntValues() { - -- List values = new ArrayList(uiDescriptorsByPriority.size()); -- for (RulePriority priority : uiDescriptorsByPriority.keySet()) { -+ List values = new ArrayList(); -+ for (RulePriority priority : RulePriority.values()) { - values.add(priority.getPriority()); - } - return values; -diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/PMDRuntimeConstants.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/PMDRuntimeConstants.java -index ed3d21a0617..0ace889df05 100644 ---- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/PMDRuntimeConstants.java -+++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/PMDRuntimeConstants.java -@@ -12,10 +12,18 @@ - */ - public class PMDRuntimeConstants { - -- public static final String PMD_MARKER = PMDPlugin.PLUGIN_ID + "".pmdMarker""; -+ public static final String PMD_MARKER = PMDPlugin.PLUGIN_ID + "".pmdMarker""; // obsolete -+ -+ public static final String PMD_MARKER_1 = PMDPlugin.PLUGIN_ID + "".pmdMarker1""; -+ public static final String PMD_MARKER_2 = PMDPlugin.PLUGIN_ID + "".pmdMarker2""; -+ public static final String PMD_MARKER_3 = PMDPlugin.PLUGIN_ID + "".pmdMarker3""; -+ public static final String PMD_MARKER_4 = PMDPlugin.PLUGIN_ID + "".pmdMarker4""; -+ public static final String PMD_MARKER_5 = PMDPlugin.PLUGIN_ID + "".pmdMarker5""; -+ - public static final String PMD_DFA_MARKER = PMDPlugin.PLUGIN_ID + "".pmdDFAMarker""; - public static final String PMD_TASKMARKER = PMDPlugin.PLUGIN_ID + "".pmdTaskMarker""; -- public static final String[] ALL_MARKER_TYPES = new String[] { PMD_MARKER, PMD_DFA_MARKER, PMD_TASKMARKER }; -+ public static final String[] RULE_MARKER_TYPES = new String[] { PMD_MARKER, PMD_MARKER_1, PMD_MARKER_2, PMD_MARKER_3, PMD_MARKER_4, PMD_MARKER_5 }; -+ public static final String[] ALL_MARKER_TYPES = new String[] { PMD_MARKER, PMD_DFA_MARKER, PMD_TASKMARKER, PMD_MARKER_1, PMD_MARKER_2, PMD_MARKER_3, PMD_MARKER_4, PMD_MARKER_5 }; - - public static final IntegerProperty MAX_VIOLATIONS_DESCRIPTOR = new IntegerProperty(""maxviolations"", ""Max allowable violations"", 1, Integer.MAX_VALUE-1, 1000, 0f); - -diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/builder/MarkerUtil.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/builder/MarkerUtil.java -index 786d37d065e..17af8e1dc48 100755 ---- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/builder/MarkerUtil.java -+++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/builder/MarkerUtil.java -@@ -1,8 +1,10 @@ - package net.sourceforge.pmd.eclipse.runtime.builder; - - import java.util.ArrayList; -+import java.util.HashMap; - import java.util.HashSet; - import java.util.List; -+import java.util.Map; - import java.util.Set; ++} +\ No newline at end of file" +4043b1d38140d531f5f97d4f87850f168283c240,spring-framework,Workaround Javadoc bug with JDK 8 (b112+)--Remove Javadoc linkplain to ExceptionHandler-value() from-AnnotationMethodHandlerExceptionResolver to work around JDK-Javadoc bug 9007707.-,c,https://github.com/spring-projects/spring-framework,"diff --git a/spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/annotation/AnnotationMethodHandlerExceptionResolver.java b/spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/annotation/AnnotationMethodHandlerExceptionResolver.java +index 1450d8a9683a..65bba17c3830 100644 +--- a/spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/annotation/AnnotationMethodHandlerExceptionResolver.java ++++ b/spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/annotation/AnnotationMethodHandlerExceptionResolver.java +@@ -206,7 +206,7 @@ public void doWith(Method method) { - import net.sourceforge.pmd.Rule; -@@ -10,12 +12,18 @@ - import net.sourceforge.pmd.eclipse.plugin.PMDPlugin; - import net.sourceforge.pmd.eclipse.runtime.PMDRuntimeConstants; - import net.sourceforge.pmd.eclipse.ui.PMDUiConstants; -+import net.sourceforge.pmd.eclipse.ui.model.AbstractPMDRecord; -+import net.sourceforge.pmd.eclipse.ui.model.FileRecord; -+import net.sourceforge.pmd.eclipse.ui.model.MarkerRecord; -+import net.sourceforge.pmd.eclipse.ui.model.RootRecord; - import net.sourceforge.pmd.util.StringUtil; + /** + * Returns all the exception classes handled by the given method. +- *

The default implementation looks for exceptions in the {@linkplain ExceptionHandler#value() annotation}, ++ *

The default implementation looks for exceptions in the annotation, + * or - if that annotation element is empty - any exceptions listed in the method parameters if the method + * is annotated with {@code @ExceptionHandler}. + * @param method the method" +95ba62f83dfa05990d2165484330cdd0792064d8,elasticsearch,Translog: Implement a file system based translog- and make it the default,a,https://github.com/elastic/elasticsearch,"diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/index/translog/TranslogModule.java b/modules/elasticsearch/src/main/java/org/elasticsearch/index/translog/TranslogModule.java +index 61977707465c0..02c378097c250 100644 +--- a/modules/elasticsearch/src/main/java/org/elasticsearch/index/translog/TranslogModule.java ++++ b/modules/elasticsearch/src/main/java/org/elasticsearch/index/translog/TranslogModule.java +@@ -22,7 +22,7 @@ + import org.elasticsearch.common.inject.AbstractModule; + import org.elasticsearch.common.inject.Scopes; + import org.elasticsearch.common.settings.Settings; +-import org.elasticsearch.index.translog.memory.MemoryTranslog; ++import org.elasticsearch.index.translog.fs.FsTranslog; - import org.eclipse.core.resources.IFile; - import org.eclipse.core.resources.IMarker; -+import org.eclipse.core.resources.IMarkerDelta; - import org.eclipse.core.resources.IProject; - import org.eclipse.core.resources.IResource; -+import org.eclipse.core.resources.IResourceChangeEvent; - import org.eclipse.core.resources.IResourceVisitor; - import org.eclipse.core.resources.IWorkspaceRoot; - import org.eclipse.core.runtime.CoreException; -@@ -28,6 +36,8 @@ public class MarkerUtil { + /** + * @author kimchy (shay.banon) +@@ -41,7 +41,7 @@ public TranslogModule(Settings settings) { - public static final IMarker[] EMPTY_MARKERS = new IMarker[0]; - -+ private static Map rulesByName; -+ - private MarkerUtil() { } - - public static boolean hasAnyRuleMarkers(IResource resource) throws CoreException { -@@ -42,15 +52,17 @@ public boolean visit(IResource resource) { - - if (resource instanceof IFile) { - -- IMarker[] ruleMarkers = null; -- try { -- ruleMarkers = resource.findMarkers(PMDRuntimeConstants.PMD_MARKER, true, IResource.DEPTH_INFINITE); -- } catch (CoreException ex) { -- // what do to? -- } -- if (ruleMarkers.length > 0) { -- foundOne[0] = true; -- return false; -+ for (String markerType : PMDRuntimeConstants.RULE_MARKER_TYPES) { -+ IMarker[] ruleMarkers = null; -+ try { -+ ruleMarkers = resource.findMarkers(markerType, true, IResource.DEPTH_INFINITE); -+ } catch (CoreException ex) { -+ // what do to? -+ } -+ if (ruleMarkers.length > 0) { -+ foundOne[0] = true; -+ return false; -+ } - } - } - -@@ -92,7 +104,7 @@ public static String ruleNameFor(IMarker marker) { - } - - public static int rulePriorityFor(IMarker marker) throws CoreException { -- return ((Integer)marker.getAttribute(PMDUiConstants.KEY_MARKERATT_PRIORITY)).intValue(); -+ return (Integer)marker.getAttribute(PMDUiConstants.KEY_MARKERATT_PRIORITY); - } - - public static int deleteViolationsOf(String ruleName, IResource resource) { -@@ -120,6 +132,17 @@ public static int deleteViolationsOf(String ruleName, IResource resource) { - } + @Override protected void configure() { + bind(Translog.class) +- .to(settings.getAsClass(TranslogSettings.TYPE, MemoryTranslog.class)) ++ .to(settings.getAsClass(TranslogSettings.TYPE, FsTranslog.class)) + .in(Scopes.SINGLETON); } - -+ public static List markerDeltasIn(IResourceChangeEvent event) { -+ -+ List deltas = new ArrayList(); -+ for (String markerType : PMDRuntimeConstants.RULE_MARKER_TYPES) { -+ IMarkerDelta[] deltaArray = event.findMarkerDeltas(markerType, true); -+ for (IMarkerDelta delta : deltaArray) deltas.add(delta); -+ } -+ -+ return deltas; -+ } -+ - public static List rulesFor(IMarker[] markers) { - - List rules = new ArrayList(markers.length); -@@ -165,6 +188,7 @@ public static void deleteMarkersIn(IResource resource, String[] markerTypes) thr - for (String markerType : markerTypes) { - resource.deleteMarkers(markerType, true, IResource.DEPTH_INFINITE); - } -+ PMDPlugin.getDefault().removedMarkersIn(resource); - } + }" +bb85cacf9358f9ed289e72a416e78625fcab18a4,Delta Spike,"DELTASPIKE-289 fix WindowScoped context test on jbossas + +I recently removed the which did lead to a different +component Id for the outputValue. fixed now. +",p,https://github.com/apache/deltaspike,"diff --git a/deltaspike/modules/jsf/impl/src/test/java/org/apache/deltaspike/test/jsf/impl/scope/window/WindowScopedContextTest.java b/deltaspike/modules/jsf/impl/src/test/java/org/apache/deltaspike/test/jsf/impl/scope/window/WindowScopedContextTest.java +index c57b16c48..a6e97cb28 100644 +--- a/deltaspike/modules/jsf/impl/src/test/java/org/apache/deltaspike/test/jsf/impl/scope/window/WindowScopedContextTest.java ++++ b/deltaspike/modules/jsf/impl/src/test/java/org/apache/deltaspike/test/jsf/impl/scope/window/WindowScopedContextTest.java +@@ -20,6 +20,7 @@ - public static IMarker[] findAllMarkers(IResource resource) throws CoreException { -@@ -189,4 +213,61 @@ public static IMarker[] findMarkers(IResource resource, String[] markerTypes) th - return markerList.toArray(markerArray); - } -+ public static Set priorityRangeOf(IResource resource, String[] markerTypes, int sizeLimit) throws CoreException { -+ -+ Set priorityLevels = new HashSet(sizeLimit); -+ -+ for (String markerType : markerTypes) { -+ for (IMarker marker : resource.findMarkers(markerType, true, IResource.DEPTH_INFINITE)) { -+ priorityLevels.add( rulePriorityFor(marker) ); -+ if (priorityLevels.size() == sizeLimit) return priorityLevels; -+ } -+ } -+ -+ return priorityLevels; -+ } -+ -+ -+ private static void gatherRuleNames() { -+ -+ rulesByName = new HashMap(); -+ Set ruleSets = PMDPlugin.getDefault().getRuleSetManager().getRegisteredRuleSets(); -+ for (RuleSet rs : ruleSets) { -+ for (Rule rule : rs.getRules()) { -+ rulesByName.put(rule.getName(), rule); -+ } -+ } -+ } -+ -+ private static Rule ruleFrom(IMarker marker) { -+ String ruleName = marker.getAttribute(PMDRuntimeConstants.KEY_MARKERATT_RULENAME, """"); -+ if (StringUtil.isEmpty(ruleName)) return null; //printValues(marker); -+ return rulesByName.get(ruleName); -+ } -+ -+ public static Set allMarkedFiles(RootRecord root) { -+ -+ gatherRuleNames(); -+ -+ Set files = new HashSet(); -+ -+ for (AbstractPMDRecord projectRecord : root.getChildren()) { -+ for (AbstractPMDRecord packageRecord : projectRecord.getChildren()) { -+ for (AbstractPMDRecord fileRecord : packageRecord.getChildren()) { -+ ((FileRecord)fileRecord).updateChildren(); -+ for (AbstractPMDRecord mRecord : fileRecord.getChildren()) { -+ MarkerRecord markerRecord = (MarkerRecord) mRecord; -+ for (IMarker marker : markerRecord.findMarkers()) { -+ Rule rule = ruleFrom(marker); -+ if (rule == null) continue; -+ files.add((IFile)fileRecord.getResource()); -+ break; -+ } -+ } -+ } -+ } -+ } -+ -+ return files; -+ } - } -diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/cmd/BaseVisitor.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/cmd/BaseVisitor.java -index b9c8f873f85..62bf20c54d8 100644 ---- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/cmd/BaseVisitor.java -+++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/cmd/BaseVisitor.java -@@ -230,17 +230,17 @@ public void setProjectProperties(IProjectProperties projectProperties) { - * the resource to process - */ - protected final void reviewResource(final IResource resource) { -- final IFile file = (IFile) resource.getAdapter(IFile.class); -+ IFile file = (IFile) resource.getAdapter(IFile.class); - if (file != null && file.getFileExtension() != null) { + import java.net.URL; ++import java.util.logging.Logger; - try { -- boolean included = this.projectProperties.isIncludeDerivedFiles() || !this.projectProperties.isIncludeDerivedFiles() && !file.isDerived(); -- log.debug(""Derived files included: "" + this.projectProperties.isIncludeDerivedFiles()); -+ boolean included = projectProperties.isIncludeDerivedFiles() || !projectProperties.isIncludeDerivedFiles() && !file.isDerived(); -+ log.debug(""Derived files included: "" + projectProperties.isIncludeDerivedFiles()); - log.debug(""file "" + file.getName() + "" is derived: "" + file.isDerived()); - log.debug(""file checked: "" + included); - - final File sourceCodeFile = file.getRawLocation().toFile(); -- if (getPmdEngine().applies(sourceCodeFile, getRuleSet()) && isFileInWorkingSet(file) && (this.projectProperties.isIncludeDerivedFiles() || !this.projectProperties.isIncludeDerivedFiles() && !file.isDerived())) { -+ if (getPmdEngine().applies(sourceCodeFile, getRuleSet()) && isFileInWorkingSet(file) && (projectProperties.isIncludeDerivedFiles() || !this.projectProperties.isIncludeDerivedFiles() && !file.isDerived())) { - subTask(""PMD checking: "" + file.getName()); - - Timer timer = new Timer(); -@@ -308,7 +308,7 @@ private boolean isFileInWorkingSet(final IFile file) throws PropertiesException - * Update markers list for the specified file - * - * @param file -- * the file for which markes are to be updated -+ * the file for which markers are to be updated - * @param context - * a PMD context - * @param fTask -@@ -324,6 +324,20 @@ private int maxAllowableViolationsFor(Rule rule) { - PMDRuntimeConstants.MAX_VIOLATIONS_DESCRIPTOR.defaultValue(); - } - -+ public static String markerTypeFor(RuleViolation violation) { + import org.apache.deltaspike.test.category.WebProfileCategory; + import org.apache.deltaspike.test.jsf.impl.scope.window.beans.WindowScopedBackingBean; +@@ -51,6 +52,8 @@ + @Category(WebProfileCategory.class) + public class WindowScopedContextTest + { ++ private static final Logger log = Logger.getLogger(WindowScopedContextTest.class.getName()); + -+ int priorityId = violation.getRule().getPriority().getPriority(); + @Drone + private WebDriver driver; + +@@ -79,14 +82,12 @@ public static WebArchive deploy() + public void testWindowId() throws Exception + { + System.out.println(""contextpath= "" + contextPath); +- //X +- Thread.sleep(600000L); +- +- driver.get(new URL(contextPath, ""page.xhtml"").toString()); + + //X comment this in if you like to debug the server + //X I've already reported ARQGRA-213 for it +- //X ++ //X Thread.sleep(600000L); + -+ switch (priorityId) { -+ case 1: return PMDRuntimeConstants.PMD_MARKER_1; -+ case 2: return PMDRuntimeConstants.PMD_MARKER_2; -+ case 3: return PMDRuntimeConstants.PMD_MARKER_3; -+ case 4: return PMDRuntimeConstants.PMD_MARKER_4; -+ case 5: return PMDRuntimeConstants.PMD_MARKER_5; -+ default: return PMDRuntimeConstants.PMD_MARKER; -+ } -+ } -+ - private void updateMarkers(final IFile file, final RuleContext context, final boolean fTask, final Map> accumulator) - throws CoreException, PropertiesException { - final Set markerSet = new HashSet(); -@@ -336,7 +350,7 @@ private void updateMarkers(final IFile file, final RuleContext context, final bo - - Rule rule = null; - while (iter.hasNext()) { -- final RuleViolation violation = iter.next(); -+ RuleViolation violation = iter.next(); - rule = violation.getRule(); - review.ruleName = rule.getName(); - review.lineNumber = violation.getBeginLine(); -@@ -354,8 +368,8 @@ private void updateMarkers(final IFile file, final RuleContext context, final bo - - if (count.intValue() < maxViolations) { - // Ryan Gustafson 02/16/2008 - Always use PMD_MARKER, as people get confused as to why PMD problems don't always show up on Problems view like they do when you do build. -- // markerSet.add(getMarkerInfo(violation, fTask ? PMDRuntimeConstants.PMD_TASKMARKER : PMDRuntimeConstants.PMD_MARKER)); -- markerSet.add(getMarkerInfo(violation, PMDRuntimeConstants.PMD_MARKER)); -+ // markerSet.add(getMarkerInfo(violation, fTask ? PMDRuntimeConstants.PMD_TASKMARKER : PMDRuntimeConstants.PMD_MARKER)); -+ markerSet.add(getMarkerInfo(violation, markerTypeFor(violation))); - /* - if (isDfaEnabled && violation.getRule().usesDFA()) { - markerSet.add(getMarkerInfo(violation, PMDRuntimeConstants.PMD_DFA_MARKER)); -@@ -518,8 +532,8 @@ private class Review { - public boolean equals(final Object obj) { - boolean result = false; - if (obj instanceof Review) { -- final Review reviewObj = (Review) obj; -- result = this.ruleName.equals(reviewObj.ruleName) && this.lineNumber == reviewObj.lineNumber; -+ Review reviewObj = (Review) obj; -+ result = ruleName.equals(reviewObj.ruleName) && lineNumber == reviewObj.lineNumber; - } - return result; - } -diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/cmd/ReviewCodeCmd.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/cmd/ReviewCodeCmd.java -index 17697b5bf88..c2bd1ee1cf6 100644 ---- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/cmd/ReviewCodeCmd.java -+++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/cmd/ReviewCodeCmd.java -@@ -36,6 +36,7 @@ - package net.sourceforge.pmd.eclipse.runtime.cmd; ++ driver.get(new URL(contextPath, ""page.xhtml"").toString()); - import java.util.ArrayList; -+import java.util.Collection; - import java.util.HashMap; - import java.util.Iterator; - import java.util.List; -@@ -47,7 +48,6 @@ - import net.sourceforge.pmd.RuleSet; - import net.sourceforge.pmd.eclipse.plugin.PMDPlugin; - import net.sourceforge.pmd.eclipse.runtime.PMDRuntimeConstants; --import net.sourceforge.pmd.eclipse.runtime.builder.MarkerUtil; - import net.sourceforge.pmd.eclipse.runtime.preferences.IPreferences; - import net.sourceforge.pmd.eclipse.runtime.properties.IProjectProperties; - import net.sourceforge.pmd.eclipse.runtime.properties.PropertiesException; -@@ -64,6 +64,7 @@ - import org.eclipse.core.resources.IResourceRuleFactory; - import org.eclipse.core.resources.IResourceVisitor; - import org.eclipse.core.resources.IWorkspace; -+import org.eclipse.core.resources.IWorkspaceRoot; - import org.eclipse.core.resources.IWorkspaceRunnable; - import org.eclipse.core.resources.ResourcesPlugin; - import org.eclipse.core.runtime.CoreException; -@@ -90,8 +91,8 @@ public class ReviewCodeCmd extends AbstractDefaultCommand { - final private List resources = new ArrayList(); - private IResourceDelta resourceDelta; - private Map> markersByFile = new HashMap>(); -- private boolean taskMarker = false; -- private boolean openPmdPerspective = false; -+ private boolean taskMarker; -+ private boolean openPmdPerspective; - private int ruleCount; - private int fileCount; - private long pmdDuration; -@@ -107,11 +108,15 @@ public class ReviewCodeCmd extends AbstractDefaultCommand { - public ReviewCodeCmd() { - super(""ReviewCode"", ""Run PMD on a list of workbench resources""); - -- this.setOutputProperties(true); -- this.setReadOnly(true); -- this.setTerminated(false); -+ setOutputProperties(true); -+ setReadOnly(true); -+ setTerminated(false); - } - -+ public Set markedFiles() { -+ return markersByFile.keySet(); -+ } -+ - /** - * @see name.herlin.command.AbstractProcessableCommand#execute() - */ -@@ -119,9 +124,9 @@ public ReviewCodeCmd() { - public void execute() throws CommandException { - log.info(""ReviewCode command starting.""); - try { -- this.fileCount = 0; -- this.ruleCount = 0; -- this.pmdDuration = 0; -+ fileCount = 0; -+ ruleCount = 0; -+ pmdDuration = 0; - - beginTask(""PMD checking..."", getStepCount()); - -@@ -133,7 +138,7 @@ public void execute() throws CommandException { - } + WebElement inputField = driver.findElement(By.id(""test:valueInput"")); + inputField.sendKeys(""23""); +@@ -94,7 +95,7 @@ public void testWindowId() throws Exception + WebElement button = driver.findElement(By.id(""test:saveButton"")); + button.click(); - // Appliquer les marqueurs -- final IWorkspaceRunnable action = new IWorkspaceRunnable() { -+ IWorkspaceRunnable action = new IWorkspaceRunnable() { - public void run(IProgressMonitor monitor) throws CoreException { - applyMarkers(); - } -@@ -143,7 +148,7 @@ public void run(IProgressMonitor monitor) throws CoreException { - workspace.run(action, getschedulingRule(), IWorkspace.AVOID_UPDATE, getMonitor()); - - // Switch to the PMD perspective if required -- if (this.openPmdPerspective) { -+ if (openPmdPerspective) { - Display.getDefault().asyncExec(new Runnable() { - public void run() { - switchToPmdPerspective(); -@@ -174,13 +179,15 @@ public void run() { - logInfo(""Review code command terminated. "" + ruleCount + "" rules were executed against "" + fileCount + "" files. PMD was not executed.""); - } +- Assert.assertTrue(ExpectedConditions.textToBePresentInElement(By.id(""test:valueOutput""), ""23"").apply(driver)); ++ Assert.assertTrue(ExpectedConditions.textToBePresentInElement(By.id(""valueOutput""), ""23"").apply(driver)); + + }" +a09d266b247b185054d0ab0dfdb6e8dc2e8898bc,orientdb,Minor: removed some warnings--,p,https://github.com/orientechnologies/orientdb,"diff --git a/core/src/main/java/com/orientechnologies/orient/core/index/OIndexManager.java b/core/src/main/java/com/orientechnologies/orient/core/index/OIndexManager.java +index f5fe061175a..dcf7d2ae48b 100644 +--- a/core/src/main/java/com/orientechnologies/orient/core/index/OIndexManager.java ++++ b/core/src/main/java/com/orientechnologies/orient/core/index/OIndexManager.java +@@ -32,13 +32,13 @@ public interface OIndexManager { + + public void create(); + +- public Collection getIndexes(); ++ public Collection> getIndexes(); + +- public OIndex getIndex(final String iName); ++ public OIndex getIndex(final String iName); + +- public OIndex getIndex(final ORID iRID); ++ public OIndex getIndex(final ORID iRID); + +- public OIndex createIndex(final String iName, final String iType, final OType iKeyType, final int[] iClusterIdsToIndex, ++ public OIndex createIndex(final String iName, final String iType, final OType iKeyType, final int[] iClusterIdsToIndex, + OIndexCallback iCallback, final OProgressListener iProgressListener, final boolean iAutomatic); + + public OIndexManager dropIndex(final String iIndexName); +diff --git a/core/src/main/java/com/orientechnologies/orient/core/index/OIndexManagerProxy.java b/core/src/main/java/com/orientechnologies/orient/core/index/OIndexManagerProxy.java +index d0a9eadd979..7b403ff8d30 100644 +--- a/core/src/main/java/com/orientechnologies/orient/core/index/OIndexManagerProxy.java ++++ b/core/src/main/java/com/orientechnologies/orient/core/index/OIndexManagerProxy.java +@@ -47,24 +47,24 @@ public void create() { + delegate.create(); + } + +- public Collection getIndexes() { ++ public Collection> getIndexes() { + return delegate.getIndexes(); + } + +- public OIndex getIndex(String iName) { ++ public OIndex getIndex(String iName) { + return delegate.getIndex(iName); + } + +- public OIndex getIndex(ORID iRID) { ++ public OIndex getIndex(ORID iRID) { + return delegate.getIndex(iRID); + } + +- public OIndex createIndex(String iName, String iType, final OType iKeyType, int[] iClusterIdsToIndex, OIndexCallback iCallback, ++ public OIndex createIndex(String iName, String iType, final OType iKeyType, int[] iClusterIdsToIndex, OIndexCallback iCallback, + OProgressListener iProgressListener, boolean iAutomatic) { + return delegate.createIndex(iName, iType, iKeyType, iClusterIdsToIndex, iCallback, iProgressListener, iAutomatic); + } + +- public OIndex getIndexInternal(final String iName) { ++ public OIndex getIndexInternal(final String iName) { + return ((OIndexManagerShared) delegate).getIndexInternal(iName); + } + +diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLSelect.java b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLSelect.java +index d85b127e2d9..10e8f89b848 100644 +--- a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLSelect.java ++++ b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLSelect.java +@@ -108,9 +108,9 @@ public class OCommandExecutorSQLSelect extends OCommandExecutorSQLAbstract imple + private static final class OSearchInIndexTriple { + private OQueryOperator indexOperator; + private Object key; +- private OIndex index; ++ private OIndex index; + +- private OSearchInIndexTriple(final OQueryOperator indexOperator, final Object key, final OIndex index) { ++ private OSearchInIndexTriple(final OQueryOperator indexOperator, final Object key, final OIndex index) { + this.indexOperator = indexOperator; + this.key = key; + this.index = index; +@@ -120,7 +120,6 @@ private OSearchInIndexTriple(final OQueryOperator indexOperator, final Object ke + /** + * Compile the filter conditions only the first time. + */ +- @SuppressWarnings(""unchecked"") + public OCommandExecutorSQLSelect parse(final OCommandRequestText iRequest) { + iRequest.getDatabase().checkSecurity(ODatabaseSecurityResources.COMMAND, ORole.PERMISSION_READ); + +@@ -386,7 +385,7 @@ private boolean searchForIndexes(final List> iResultSet, final OClass + return false; + + for (OSearchInIndexTriple indexTriple : searchInIndexTriples) { +- final OIndex idx = indexTriple.index.getInternal(); ++ final OIndex idx = indexTriple.index.getInternal(); + final OQueryOperator operator = indexTriple.indexOperator; + final Object key = indexTriple.key; + +@@ -482,7 +481,7 @@ private boolean searchIndexedProperty(OClass iSchemaClass, final OSQLFilterCondi + + if (prop != null && prop.isIndexed()) { + final Object origValue = iCondition.getLeft() == iItem ? iCondition.getRight() : iCondition.getLeft(); +- final OIndex underlyingIndex = prop.getIndex().getUnderlying(); ++ final OIndex underlyingIndex = prop.getIndex().getUnderlying(); + + if (iCondition.getOperator() instanceof OQueryOperatorBetween) { + iSearchInIndexTriples.add(new OSearchInIndexTriple(iCondition.getOperator(), origValue, underlyingIndex)); +@@ -788,7 +787,8 @@ record = database.load(rid); + } + + private void searchInIndex() { +- final OIndex index = database.getMetadata().getIndexManager().getIndex(compiledFilter.getTargetIndex()); ++ final OIndex index = (OIndex) database.getMetadata().getIndexManager() ++ .getIndex(compiledFilter.getTargetIndex()); + if (index == null) + throw new OCommandExecutionException(""Target index '"" + compiledFilter.getTargetIndex() + ""' not found""); + +diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/IndexTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/IndexTest.java +index 280c175c157..dac497b5198 100644 +--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/IndexTest.java ++++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/IndexTest.java +@@ -706,7 +706,7 @@ public void createNotUniqueIndexOnNick() { + public void LongTypes() { + database.getMetadata().getSchema().getClass(""Profile"").createProperty(""hash"", OType.LONG).createIndex(INDEX_TYPE.UNIQUE); + +- OIndex idx = database.getMetadata().getIndexManager().getIndex(""Profile.hash""); ++ OIndex idx = (OIndex) database.getMetadata().getIndexManager().getIndex(""Profile.hash""); + + for (int i = 0; i < 5; i++) { + Profile profile = new Profile(""HashTest1"").setHash(100l + i);" +a9a93876f2ead9468fd50eba715083c9a7e8a52a,drools,JBRULES-3714 Add capability to configure- date-effective/date-expires for SpreadSheet--,a,https://github.com/kiegroup/drools,"diff --git a/drools-decisiontables/src/main/java/org/drools/decisiontable/parser/ActionType.java b/drools-decisiontables/src/main/java/org/drools/decisiontable/parser/ActionType.java +index 73cb78135c8..9fe46429cd8 100644 +--- a/drools-decisiontables/src/main/java/org/drools/decisiontable/parser/ActionType.java ++++ b/drools-decisiontables/src/main/java/org/drools/decisiontable/parser/ActionType.java +@@ -46,6 +46,8 @@ public enum Code { + ACTIVATIONGROUP( ""ACTIVATION-GROUP"", ""X"", 1 ), + AGENDAGROUP( ""AGENDA-GROUP"", ""G"", 1 ), + RULEFLOWGROUP( ""RULEFLOW-GROUP"", ""R"", 1 ), ++ DATEEFFECTIVE( ""DATE-EFFECTIVE"", ""V"", 1 ), ++ DATEEXPIRES( ""DATE-EXPIRES"", ""Z"", 1 ), + METADATA( ""METADATA"", ""@"" ); + + private String colHeader; +@@ -80,7 +82,7 @@ public int getMaxCount() { } -+ -+ PMDPlugin.getDefault().changedFiles( markedFiles() ); } - /** - * @return Returns the file markers - */ - public Map> getMarkers() { -- return this.markersByFile; -+ return markersByFile; - } +- public static final EnumSet ATTRIBUTE_CODE_SET = EnumSet.range( Code.SALIENCE, Code.RULEFLOWGROUP ); ++ public static final EnumSet ATTRIBUTE_CODE_SET = EnumSet.range( Code.SALIENCE, Code.DATEEXPIRES ); - /** -@@ -301,11 +308,11 @@ private void processResource(IResource resource) throws CommandException { - log.debug(""Visiting resource "" + resource.getName() + "" : "" + getStepCount()); - - final ResourceVisitor visitor = new ResourceVisitor(); -- visitor.setMonitor(this.getMonitor()); -+ visitor.setMonitor(getMonitor()); - visitor.setRuleSet(ruleSet); - visitor.setPmdEngine(pmdEngine); -- visitor.setAccumulator(this.markersByFile); -- visitor.setUseTaskMarker(this.taskMarker); -+ visitor.setAccumulator(markersByFile); -+ visitor.setUseTaskMarker(taskMarker); - visitor.setProjectProperties(properties); - resource.accept(visitor); - -@@ -337,6 +344,7 @@ private void processProject(IProject project) throws CommandException { - - final IJavaProject javaProject = JavaCore.create(project); - final IClasspathEntry[] entries = javaProject.getRawClasspath(); -+ final IWorkspaceRoot root = ResourcesPlugin.getWorkspace().getRoot(); - for (IClasspathEntry entrie : entries) { - if (entrie.getEntryKind() == IClasspathEntry.CPE_SOURCE) { - -@@ -346,9 +354,9 @@ private void processProject(IProject project) throws CommandException { - // to know if the entry is a folder or a project ! - IContainer sourceContainer = null; - try { -- sourceContainer = ResourcesPlugin.getWorkspace().getRoot().getFolder(entrie.getPath()); -+ sourceContainer = root.getFolder(entrie.getPath()); - } catch (IllegalArgumentException e) { -- sourceContainer = ResourcesPlugin.getWorkspace().getRoot().getProject(entrie.getPath().toString()); -+ sourceContainer = root.getProject(entrie.getPath().toString()); - } - if (sourceContainer == null) { - log.warn(""Source container "" + entrie.getPath() + "" for project "" + project.getName() + "" is not valid""); -@@ -384,27 +392,27 @@ private RuleSet filteredRuleSet(IProjectProperties properties) throws CommandExc - */ - private void processResourceDelta() throws CommandException { - try { -- final IProject project = this.resourceDelta.getResource().getProject(); -+ final IProject project = resourceDelta.getResource().getProject(); - final IProjectProperties properties = PMDPlugin.getDefault().loadProjectProperties(project); - -- final RuleSet ruleSet = filteredRuleSet(properties); //properties.getProjectRuleSet(); -+ RuleSet ruleSet = filteredRuleSet(properties); //properties.getProjectRuleSet(); - -- final PMDEngine pmdEngine = getPmdEngineForProject(project); -- this.setStepCount(countDeltaElement(this.resourceDelta)); -+ PMDEngine pmdEngine = getPmdEngineForProject(project); -+ setStepCount(countDeltaElement(resourceDelta)); - log.debug(""Visit of resource delta : "" + getStepCount()); - -- final DeltaVisitor visitor = new DeltaVisitor(); -- visitor.setMonitor(this.getMonitor()); -+ DeltaVisitor visitor = new DeltaVisitor(); -+ visitor.setMonitor(getMonitor()); - visitor.setRuleSet(ruleSet); - visitor.setPmdEngine(pmdEngine); -- visitor.setAccumulator(this.markersByFile); -- visitor.setUseTaskMarker(this.taskMarker); -+ visitor.setAccumulator(markersByFile); -+ visitor.setUseTaskMarker(taskMarker); - visitor.setProjectProperties(properties); -- this.resourceDelta.accept(visitor); -+ resourceDelta.accept(visitor); - -- this.ruleCount = ruleSet.getRules().size(); -- this.fileCount += visitor.getProcessedFilesCount(); -- this.pmdDuration += visitor.getActualPmdDuration(); -+ ruleCount = ruleSet.getRules().size(); -+ fileCount += visitor.getProcessedFilesCount(); -+ pmdDuration += visitor.getActualPmdDuration(); - - } catch (PropertiesException e) { - throw new CommandException(e); -@@ -423,36 +431,32 @@ private void applyMarkers() { - final Timer timer = new Timer(); - - String currentFile = """"; // for logging -+ -+ beginTask(""PMD Applying markers"", markersByFile.size()); -+ - try { -- final Set fileSet = markersByFile.keySet(); -- final Iterator i = fileSet.iterator(); -- -- beginTask(""PMD Applying markers"", fileSet.size()); -- -- while (i.hasNext() && !isCanceled()) { -- final IFile file = i.next(); -+ for (IFile file : markersByFile.keySet()) { -+ if (isCanceled()) break; - currentFile = file.getName(); - -- final Set markerInfoSet = markersByFile.get(file); -+ Set markerInfoSet = markersByFile.get(file); - // MarkerUtil.deleteAllMarkersIn(file); -- final Iterator j = markerInfoSet.iterator(); -- while (j.hasNext()) { -- final MarkerInfo markerInfo = j.next(); -- final IMarker marker = file.createMarker(markerInfo.getType()); -+ for (MarkerInfo markerInfo : markerInfoSet) { -+ IMarker marker = file.createMarker(markerInfo.getType()); - marker.setAttributes(markerInfo.getAttributeNames(), markerInfo.getAttributeValues()); - violationCount++; + private static final Map tag2code = new HashMap(); + static { +diff --git a/drools-decisiontables/src/main/java/org/drools/decisiontable/parser/DefaultRuleSheetListener.java b/drools-decisiontables/src/main/java/org/drools/decisiontable/parser/DefaultRuleSheetListener.java +index 1e513078472..c5ba2bb178d 100644 +--- a/drools-decisiontables/src/main/java/org/drools/decisiontable/parser/DefaultRuleSheetListener.java ++++ b/drools-decisiontables/src/main/java/org/drools/decisiontable/parser/DefaultRuleSheetListener.java +@@ -240,6 +240,12 @@ private Package buildRuleSet() { + case RULEFLOWGROUP: + ruleset.setRuleFlowGroup( value ); + break; ++ case DATEEFFECTIVE: ++ ruleset.setDateEffective( value ); ++ break; ++ case DATEEXPIRES: ++ ruleset.setDateExpires( value ); ++ break; } - - worked(1); -- } - } catch (CoreException e) { -- log.warn(""CoreException when setting marker info for file "" + currentFile + "" : "" + e.getMessage()); // TODO: -+ log.warn(""CoreException when setting marker for file "" + currentFile + "" : "" + e.getMessage()); // TODO: - // NLS - } finally { - timer.stop(); -- logInfo("""" + violationCount + "" markers applied on "" + markersByFile.size() + "" files in "" + timer.getDuration() + ""ms.""); -- log.info(""End of processing marker directives. "" + violationCount + "" violations for "" + markersByFile.size() + "" files.""); -+ int count = markersByFile.size(); -+ logInfo("""" + violationCount + "" markers applied on "" + count + "" files in "" + timer.getDuration() + ""ms.""); -+ log.info(""End of processing marker directives. "" + violationCount + "" violations for "" + count + "" files.""); } - - } -@@ -463,7 +467,7 @@ private void applyMarkers() { - * @param resource a project - * @return the element count - */ -- private int countResourceElement(final IResource resource) { -+ private int countResourceElement(IResource resource) { - final CountVisitor visitor = new CountVisitor(); - - try { -@@ -481,7 +485,7 @@ private int countResourceElement(final IResource resource) { - * @param delta a resource delta - * @return the element count - */ -- private int countDeltaElement(final IResourceDelta delta) { -+ private int countDeltaElement(IResourceDelta delta) { - final CountVisitor visitor = new CountVisitor(); - - try { -diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/IPreferences.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/IPreferences.java -index 94a0f132560..48177ea3a31 100644 ---- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/IPreferences.java -+++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/IPreferences.java -@@ -38,7 +38,13 @@ - - import java.util.Set; - -+import net.sourceforge.pmd.RulePriority; -+import net.sourceforge.pmd.eclipse.plugin.PriorityDescriptor; -+import net.sourceforge.pmd.eclipse.ui.Shape; -+import net.sourceforge.pmd.eclipse.ui.nls.StringKeys; -+ - import org.apache.log4j.Level; -+import org.eclipse.swt.graphics.RGB; - - /** - * This interface models the PMD Plugin preferences -@@ -61,6 +67,12 @@ public interface IPreferences { - Level LOG_LEVEL = Level.WARN; - String ACTIVE_RULES = """"; - -+ PriorityDescriptor PD_1_DEFAULT = new PriorityDescriptor(RulePriority.HIGH, StringKeys.VIEW_FILTER_PRIORITY_1, StringKeys.VIEW_TOOLTIP_FILTER_PRIORITY_1, null, Shape.diamond, new RGB( 255,0,0), 13); // red -+ PriorityDescriptor PD_2_DEFAULT = new PriorityDescriptor(RulePriority.MEDIUM_HIGH, StringKeys.VIEW_FILTER_PRIORITY_2, StringKeys.VIEW_TOOLTIP_FILTER_PRIORITY_2, null, Shape.square, new RGB( 0,255,255), 13); // yellow -+ PriorityDescriptor PD_3_DEFAULT = new PriorityDescriptor(RulePriority.MEDIUM, StringKeys.VIEW_FILTER_PRIORITY_3, StringKeys.VIEW_TOOLTIP_FILTER_PRIORITY_3, null, Shape.circle, new RGB( 0,255,0), 13); // green -+ PriorityDescriptor PD_4_DEFAULT = new PriorityDescriptor(RulePriority.MEDIUM_LOW, StringKeys.VIEW_FILTER_PRIORITY_4, StringKeys.VIEW_TOOLTIP_FILTER_PRIORITY_4, null, Shape.domeRight,new RGB( 255,0,255), 13); // purple -+ PriorityDescriptor PD_5_DEFAULT = new PriorityDescriptor(RulePriority.LOW, StringKeys.VIEW_FILTER_PRIORITY_5, StringKeys.VIEW_TOOLTIP_FILTER_PRIORITY_5, null, Shape.plus, new RGB( 0,0,255), 13); // blue -+ - boolean isActive(String rulename); - - void isActive(String ruleName, boolean flag); -@@ -107,7 +119,7 @@ public interface IPreferences { - * Get the review additional comment. This comment is a text appended to the - * review comment that is inserted into the code when a violation is reviewed. - * This string follows the MessageFormat syntax and could contain 2 variable fields. -- * The 1st fied is replaced by the current used id and the second by the current date. -+ * The 1st field is replaced by the current used id and the second by the current date. - */ - String getReviewAdditionalComment(); - -@@ -119,7 +131,7 @@ public interface IPreferences { - - /** - * Does the review comment should be the PMD style (// NOPMD comment) or the -- * Plugin style (// @PMD:REVIEW...) which was implemented before. -+ * plugin style (// @PMD:REVIEW...) which was implemented before. - */ - boolean isReviewPmdStyleEnabled(); - -@@ -128,6 +140,9 @@ public interface IPreferences { - */ - void setReviewPmdStyleEnabled(boolean reviewPmdStyleEnabled); - -+ void setPriorityDescriptor(RulePriority priority, PriorityDescriptor pd); -+ -+ PriorityDescriptor getPriorityDescriptor(RulePriority priority); - - // CPD Preferences - -diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/impl/PreferencesImpl.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/impl/PreferencesImpl.java -index b7a91c3074b..099d98e70d5 100644 ---- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/impl/PreferencesImpl.java -+++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/impl/PreferencesImpl.java -@@ -36,9 +36,13 @@ - - package net.sourceforge.pmd.eclipse.runtime.preferences.impl; - -+import java.util.HashMap; - import java.util.HashSet; -+import java.util.Map; - import java.util.Set; - -+import net.sourceforge.pmd.RulePriority; -+import net.sourceforge.pmd.eclipse.plugin.PriorityDescriptor; - import net.sourceforge.pmd.eclipse.runtime.preferences.IPreferences; - import net.sourceforge.pmd.eclipse.runtime.preferences.IPreferencesManager; - -@@ -63,6 +67,9 @@ class PreferencesImpl implements IPreferences { - private String logFileName; - private Level logLevel; - private Set activeRuleNames = new HashSet(); -+ -+ private Map uiDescriptorsByPriority = new HashMap(5); -+ - /** - * Is constructed from a preferences manager - * @param preferencesManager -@@ -211,4 +218,12 @@ public void setActiveRuleNames(Set ruleNames) { - activeRuleNames = ruleNames; - } - -+ public void setPriorityDescriptor(RulePriority priority, PriorityDescriptor pd) { -+ uiDescriptorsByPriority.put(priority, pd); -+ } -+ -+ public PriorityDescriptor getPriorityDescriptor(RulePriority priority) { -+ return uiDescriptorsByPriority.get(priority); -+ } -+ - } -diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/impl/PreferencesManagerImpl.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/impl/PreferencesManagerImpl.java -index 11ae3ff4972..bc3ac6c7d7a 100644 ---- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/impl/PreferencesManagerImpl.java -+++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/impl/PreferencesManagerImpl.java -@@ -48,11 +48,13 @@ - import java.util.Set; - - import net.sourceforge.pmd.Rule; -+import net.sourceforge.pmd.RulePriority; - import net.sourceforge.pmd.RuleSet; - import net.sourceforge.pmd.RuleSetFactory; - import net.sourceforge.pmd.RuleSetNotFoundException; - import net.sourceforge.pmd.eclipse.core.IRuleSetManager; - import net.sourceforge.pmd.eclipse.plugin.PMDPlugin; -+import net.sourceforge.pmd.eclipse.plugin.PriorityDescriptor; - import net.sourceforge.pmd.eclipse.runtime.preferences.IPreferences; - import net.sourceforge.pmd.eclipse.runtime.preferences.IPreferencesFactory; - import net.sourceforge.pmd.eclipse.runtime.preferences.IPreferencesManager; -@@ -90,7 +92,12 @@ class PreferencesManagerImpl implements IPreferencesManager { - private static final String LOG_FILENAME = PMDPlugin.PLUGIN_ID + "".log_filename""; - private static final String LOG_LEVEL = PMDPlugin.PLUGIN_ID + "".log_level""; - private static final String DISABLED_RULES = PMDPlugin.PLUGIN_ID + "".disabled_rules""; -- -+ private static final String PRIORITY_DESC_1 = PMDPlugin.PLUGIN_ID + "".priority_descriptor_1""; -+ private static final String PRIORITY_DESC_2 = PMDPlugin.PLUGIN_ID + "".priority_descriptor_2""; -+ private static final String PRIORITY_DESC_3 = PMDPlugin.PLUGIN_ID + "".priority_descriptor_3""; -+ private static final String PRIORITY_DESC_4 = PMDPlugin.PLUGIN_ID + "".priority_descriptor_4""; -+ private static final String PRIORITY_DESC_5 = PMDPlugin.PLUGIN_ID + "".priority_descriptor_5""; -+ - private static final String OLD_PREFERENCE_PREFIX = ""net.sourceforge.pmd.runtime""; - private static final String OLD_PREFERENCE_LOCATION = ""/.metadata/.plugins/org.eclipse.core.runtime/.settings/net.sourceforge.pmd.runtime.prefs""; - public static final String NEW_PREFERENCE_LOCATION = ""/.metadata/.plugins/org.eclipse.core.runtime/.settings/net.sourceforge.pmd.eclipse.plugin.prefs""; -@@ -121,6 +128,7 @@ public IPreferences loadPreferences() { - loadLogFileName(); - loadLogLevel(); - loadActiveRules(); -+ loadRulePriorityDescriptors(); +@@ -648,6 +654,12 @@ private void nextDataCell(final int row, + case CALENDARS: + this._currentRule.setCalendars( value ); + break; ++ case DATEEFFECTIVE: ++ this._currentRule.setDateEffective( value ); ++ break; ++ case DATEEXPIRES: ++ this._currentRule.setDateExpires( value ); ++ break; } - - return this.preferences; -@@ -179,6 +187,7 @@ public void storePreferences(IPreferences preferences) { - storeLogFileName(); - storeLogLevel(); - storeActiveRules(); -+ storePriorityDescriptors(); } - /** -@@ -186,10 +195,10 @@ public void storePreferences(IPreferences preferences) { - */ - public RuleSet getRuleSet() { - -- if (this.ruleSet == null) { -- this.ruleSet = getRuleSetFromStateLocation(); -+ if (ruleSet == null) { -+ ruleSet = getRuleSetFromStateLocation(); - } -- return this.ruleSet; -+ return ruleSet; +diff --git a/drools-decisiontables/src/test/java/org/drools/decisiontable/SpreadsheetCompilerUnitTest.java b/drools-decisiontables/src/test/java/org/drools/decisiontable/SpreadsheetCompilerUnitTest.java +index 1b6e45ff102..150eac13869 100644 +--- a/drools-decisiontables/src/test/java/org/drools/decisiontable/SpreadsheetCompilerUnitTest.java ++++ b/drools-decisiontables/src/test/java/org/drools/decisiontable/SpreadsheetCompilerUnitTest.java +@@ -192,6 +192,10 @@ public void testAttributesXLS() { + rule1 ) > -1 ); + assertTrue( drl.indexOf( ""calendars \""CAL1\"""", + rule1 ) > -1 ); ++ assertTrue( drl.indexOf( ""date-effective \""01-Jan-2007\"""", ++ rule1 ) > -1 ); ++ assertTrue( drl.indexOf( ""date-expires \""31-Dec-2007\"""", ++ rule1 ) > -1 ); + + int rule2 = drl.indexOf( ""rule \""N2\"""" ); + assertFalse( rule2 == -1 ); +@@ -216,6 +220,10 @@ public void testAttributesXLS() { + rule2 ) > -1 ); + assertTrue( drl.indexOf( ""calendars \""CAL2\"""", + rule2 ) > -1 ); ++ assertTrue( drl.indexOf( ""date-effective \""01-Jan-2012\"""", ++ rule2 ) > -1 ); ++ assertTrue( drl.indexOf( ""date-expires \""31-Dec-2015\"""", ++ rule2 ) > -1 ); } - /** -@@ -282,6 +291,29 @@ private void loadActiveRules() { - this.preferences.setActiveRuleNames(asStringSet(loadPreferencesStore.getString(DISABLED_RULES), "","")); - } - -+ /** -+ * Read the priority descriptors -+ * -+ */ -+ private void loadRulePriorityDescriptors() { -+ // TODO - put into a loop -+ loadPreferencesStore.setDefault(PRIORITY_DESC_1, IPreferences.PD_1_DEFAULT.storeString()); -+ preferences.setPriorityDescriptor(RulePriority.HIGH, PriorityDescriptor.from( loadPreferencesStore.getString(PRIORITY_DESC_1) ) ); -+ -+ loadPreferencesStore.setDefault(PRIORITY_DESC_2, IPreferences.PD_2_DEFAULT.storeString()); -+ preferences.setPriorityDescriptor(RulePriority.MEDIUM_HIGH, PriorityDescriptor.from( loadPreferencesStore.getString(PRIORITY_DESC_2) ) ); -+ -+ loadPreferencesStore.setDefault(PRIORITY_DESC_3, IPreferences.PD_3_DEFAULT.storeString()); -+ preferences.setPriorityDescriptor(RulePriority.MEDIUM, PriorityDescriptor.from( loadPreferencesStore.getString(PRIORITY_DESC_3) ) ); -+ -+ loadPreferencesStore.setDefault(PRIORITY_DESC_4, IPreferences.PD_4_DEFAULT.storeString()); -+ preferences.setPriorityDescriptor(RulePriority.MEDIUM_LOW, PriorityDescriptor.from( loadPreferencesStore.getString(PRIORITY_DESC_4) ) ); -+ -+ loadPreferencesStore.setDefault(PRIORITY_DESC_5, IPreferences.PD_5_DEFAULT.storeString()); -+ preferences.setPriorityDescriptor(RulePriority.LOW, PriorityDescriptor.from( loadPreferencesStore.getString(PRIORITY_DESC_5) ) ); -+ } -+ -+ - private static Set asStringSet(String delimitedString, String delimiter) { - - String[] values = delimitedString.split(delimiter); -@@ -374,6 +406,15 @@ private void storeLogLevel() { - this.storePreferencesStore.setValue(LOG_LEVEL, this.preferences.getLogLevel().toString()); - } + @Test +diff --git a/drools-decisiontables/src/test/java/org/drools/decisiontable/parser/ActionTypeTest.java b/drools-decisiontables/src/test/java/org/drools/decisiontable/parser/ActionTypeTest.java +index cbfc6e9644a..6d7bf450be7 100644 +--- a/drools-decisiontables/src/test/java/org/drools/decisiontable/parser/ActionTypeTest.java ++++ b/drools-decisiontables/src/test/java/org/drools/decisiontable/parser/ActionTypeTest.java +@@ -156,6 +156,26 @@ public void testChooseActionType() { + type = (ActionType) actionTypeMap.get( new Integer(0) ); + assertEquals(Code.RULEFLOWGROUP, type.getCode()); -+ private void storePriorityDescriptors() { -+ // TODO put into a loop -+ storePreferencesStore.setValue(PRIORITY_DESC_1, preferences.getPriorityDescriptor(RulePriority.HIGH).storeString()); -+ storePreferencesStore.setValue(PRIORITY_DESC_2, preferences.getPriorityDescriptor(RulePriority.MEDIUM_HIGH).storeString()); -+ storePreferencesStore.setValue(PRIORITY_DESC_3, preferences.getPriorityDescriptor(RulePriority.MEDIUM).storeString()); -+ storePreferencesStore.setValue(PRIORITY_DESC_4, preferences.getPriorityDescriptor(RulePriority.MEDIUM_LOW).storeString()); -+ storePreferencesStore.setValue(PRIORITY_DESC_5, preferences.getPriorityDescriptor(RulePriority.LOW).storeString()); -+ } -+ - /** - * Get rule set from state location - */ -diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/search/RuleSearchPage.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/search/RuleSearchPage.java -new file mode 100755 -index 00000000000..3f1488e26e8 ---- /dev/null -+++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/search/RuleSearchPage.java -@@ -0,0 +1,147 @@ -+package net.sourceforge.pmd.eclipse.search; -+ -+import java.util.List; -+ -+import net.sourceforge.pmd.lang.Language; -+ -+import org.eclipse.jface.dialogs.DialogPage; -+import org.eclipse.jface.resource.ImageDescriptor; -+import org.eclipse.jface.text.TextSelection; -+import org.eclipse.search.ui.ISearchPage; -+import org.eclipse.search.ui.ISearchPageContainer; -+import org.eclipse.swt.SWT; -+import org.eclipse.swt.layout.GridData; -+import org.eclipse.swt.layout.GridLayout; -+import org.eclipse.swt.widgets.Button; -+import org.eclipse.swt.widgets.Combo; -+import org.eclipse.swt.widgets.Composite; -+import org.eclipse.swt.widgets.Group; -+import org.eclipse.swt.widgets.Label; -+import org.eclipse.swt.widgets.Text; -+ -+/** -+ * -+ * @author Brian Remedios -+ */ -+public class RuleSearchPage extends DialogPage implements ISearchPage { ++ actionTypeMap = new HashMap(); ++ ActionType.addNewActionType( actionTypeMap, ""V"", 0, 1 ); ++ type = (ActionType) actionTypeMap.get( new Integer(0) ); ++ assertEquals(Code.DATEEFFECTIVE, type.getCode()); + -+ private Text idText; -+ private Button caseSensitive; -+ -+ private String selected; ++ actionTypeMap = new HashMap(); ++ ActionType.addNewActionType( actionTypeMap, ""DATE-EFFECTIVE"", 0, 1 ); ++ type = (ActionType) actionTypeMap.get( new Integer(0) ); ++ assertEquals(Code.DATEEFFECTIVE, type.getCode()); + -+ private Button name; -+ private Button description; -+ private Button example; -+ private Button xpath; -+ private Combo language; -+ -+ public RuleSearchPage() { -+ } ++ actionTypeMap = new HashMap(); ++ ActionType.addNewActionType( actionTypeMap, ""Z"", 0, 1 ); ++ type = (ActionType) actionTypeMap.get( new Integer(0) ); ++ assertEquals(Code.DATEEXPIRES, type.getCode()); + -+ public RuleSearchPage(String title) { -+ super(title); -+ } ++ actionTypeMap = new HashMap(); ++ ActionType.addNewActionType( actionTypeMap, ""DATE-EXPIRES"", 0, 1 ); ++ type = (ActionType) actionTypeMap.get( new Integer(0) ); ++ assertEquals(Code.DATEEXPIRES, type.getCode()); + -+ public RuleSearchPage(String title, ImageDescriptor image) { -+ super(title, image); -+ } + actionTypeMap = new HashMap(); + ActionType.addNewActionType( actionTypeMap, ""@"", 0, 1 ); + type = (ActionType) actionTypeMap.get( new Integer(0) ); +diff --git a/drools-decisiontables/src/test/resources/org/drools/decisiontable/Attributes.xls b/drools-decisiontables/src/test/resources/org/drools/decisiontable/Attributes.xls +index 3159e4ffeb6..1819888649a 100644 +Binary files a/drools-decisiontables/src/test/resources/org/drools/decisiontable/Attributes.xls and b/drools-decisiontables/src/test/resources/org/drools/decisiontable/Attributes.xls differ +diff --git a/drools-templates/src/main/java/org/drools/template/model/AttributedDRLElement.java b/drools-templates/src/main/java/org/drools/template/model/AttributedDRLElement.java +index b46e25c7c45..3232fb349be 100644 +--- a/drools-templates/src/main/java/org/drools/template/model/AttributedDRLElement.java ++++ b/drools-templates/src/main/java/org/drools/template/model/AttributedDRLElement.java +@@ -105,6 +105,14 @@ public void setAutoFocus(final boolean value) { + this._attr2value.put( ""auto-focus"", Boolean.toString( value ) ); + } + ++ public void setDateEffective(final String value) { ++ this._attr2value.put( ""date-effective"", asStringLiteral( value ) ); ++ } + -+ public boolean performAction() { -+ // TODO Auto-generated method stub -+ return false; -+ } ++ public void setDateExpires(final String value) { ++ this._attr2value.put( ""date-expires"", asStringLiteral( value ) ); ++ } + -+ public void setContainer(ISearchPageContainer container) { -+ if (container.getSelection() instanceof TextSelection) { -+ selected = ((TextSelection) container.getSelection()).getText(); -+ } -+ } + public String getAttribute( String name ){ + return this._attr2value.get( name ).toString(); + }" +ce2995c49d111b5749a88b4de2065a3a68551386,hbase,HBASE-1136 HashFunction inadvertently destroys- some randomness; REVERTING--git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@735880 13f79535-47bb-0310-9956-ffa450edef68-,c,https://github.com/apache/hbase,"diff --git a/CHANGES.txt b/CHANGES.txt +index 7e9c80101764..970250b4eaeb 100644 +--- a/CHANGES.txt ++++ b/CHANGES.txt +@@ -3,8 +3,6 @@ Release 0.20.0 - Unreleased + INCOMPATIBLE CHANGES + + BUG FIXES +- HBASE-1136 HashFunction inadvertently destroys some randomness +- (Jonathan Ellis via Stack) + HBASE-1140 ""ant clean test"" fails (Nitay Joffe via Stack) + + IMPROVEMENTS +diff --git a/src/java/org/onelab/filter/HashFunction.java b/src/java/org/onelab/filter/HashFunction.java +index cf97c7bcaa26..a0c26964e2f6 100644 +--- a/src/java/org/onelab/filter/HashFunction.java ++++ b/src/java/org/onelab/filter/HashFunction.java +@@ -118,8 +118,7 @@ public int[] hash(Key k){ + } + int[] result = new int[nbHash]; + for (int i = 0, initval = 0; i < nbHash; i++) { +- initval = hashFunction.hash(b, initval); +- result[i] = Math.abs(initval) % maxValue; ++ initval = result[i] = Math.abs(hashFunction.hash(b, initval) % maxValue); + } + return result; + }//end hash() +diff --git a/src/test/org/onelab/test/TestFilter.java b/src/test/org/onelab/test/TestFilter.java +index 363fc9451481..6c88c1ab33f4 100644 +--- a/src/test/org/onelab/test/TestFilter.java ++++ b/src/test/org/onelab/test/TestFilter.java +@@ -274,7 +274,7 @@ public void testCountingBloomFilter() throws UnsupportedEncodingException { + bf.add(k2); + bf.add(k3); + assertTrue(bf.membershipTest(key)); +- assertFalse(bf.membershipTest(k2)); ++ assertTrue(bf.membershipTest(new StringKey(""graknyl""))); + assertFalse(bf.membershipTest(new StringKey(""xyzzy""))); + assertFalse(bf.membershipTest(new StringKey(""abcd""))); + +@@ -287,7 +287,7 @@ public void testCountingBloomFilter() throws UnsupportedEncodingException { + bf2.add(key); + bf.or(bf2); + assertTrue(bf.membershipTest(key)); +- assertTrue(bf.membershipTest(k2)); ++ assertTrue(bf.membershipTest(new StringKey(""graknyl""))); + assertFalse(bf.membershipTest(new StringKey(""xyzzy""))); + assertFalse(bf.membershipTest(new StringKey(""abcd"")));" +eb0a1bebaa1486ab4d2af4dccf40aea7f8f1d5dd,Mylyn Reviews,"Merge branch 'master' of git://git.eclipse.org/gitroot/mylyn/org.eclipse.mylyn.reviews +",p,https://github.com/eclipse-mylyn/org.eclipse.mylyn.reviews,"diff --git a/framework/org.eclipse.mylyn.reviews.core/build.properties b/framework/org.eclipse.mylyn.reviews.core/build.properties +index 9adcaaf4..21815455 100644 +--- a/framework/org.eclipse.mylyn.reviews.core/build.properties ++++ b/framework/org.eclipse.mylyn.reviews.core/build.properties +@@ -10,7 +10,9 @@ + bin.includes = .,\ + model/,\ + plugin.xml,\ +- META-INF/ ++ META-INF/,\ ++ about.html + jars.compile.order = . + source.. = src/ + output.. = target/classes/ ++src.includes = about.html +diff --git a/framework/org.eclipse.mylyn.reviews.feature/.settings/org.eclipse.jdt.core.prefs b/framework/org.eclipse.mylyn.reviews.feature/.settings/org.eclipse.jdt.core.prefs +deleted file mode 100644 +index bc1e8f9c..00000000 +--- a/framework/org.eclipse.mylyn.reviews.feature/.settings/org.eclipse.jdt.core.prefs ++++ /dev/null +@@ -1,357 +0,0 @@ +-#Wed Mar 02 16:00:03 PST 2011 +-eclipse.preferences.version=1 +-org.eclipse.jdt.core.codeComplete.argumentPrefixes= +-org.eclipse.jdt.core.codeComplete.argumentSuffixes= +-org.eclipse.jdt.core.codeComplete.fieldPrefixes= +-org.eclipse.jdt.core.codeComplete.fieldSuffixes= +-org.eclipse.jdt.core.codeComplete.localPrefixes= +-org.eclipse.jdt.core.codeComplete.localSuffixes= +-org.eclipse.jdt.core.codeComplete.staticFieldPrefixes= +-org.eclipse.jdt.core.codeComplete.staticFieldSuffixes= +-org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled +-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.5 +-org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve +-org.eclipse.jdt.core.compiler.compliance=1.5 +-org.eclipse.jdt.core.compiler.debug.lineNumber=generate +-org.eclipse.jdt.core.compiler.debug.localVariable=generate +-org.eclipse.jdt.core.compiler.debug.sourceFile=generate +-org.eclipse.jdt.core.compiler.problem.annotationSuperInterface=warning +-org.eclipse.jdt.core.compiler.problem.assertIdentifier=error +-org.eclipse.jdt.core.compiler.problem.autoboxing=ignore +-org.eclipse.jdt.core.compiler.problem.deprecation=warning +-org.eclipse.jdt.core.compiler.problem.deprecationInDeprecatedCode=disabled +-org.eclipse.jdt.core.compiler.problem.deprecationWhenOverridingDeprecatedMethod=enabled +-org.eclipse.jdt.core.compiler.problem.discouragedReference=warning +-org.eclipse.jdt.core.compiler.problem.emptyStatement=ignore +-org.eclipse.jdt.core.compiler.problem.enumIdentifier=error +-org.eclipse.jdt.core.compiler.problem.fallthroughCase=ignore +-org.eclipse.jdt.core.compiler.problem.fatalOptionalError=enabled +-org.eclipse.jdt.core.compiler.problem.fieldHiding=ignore +-org.eclipse.jdt.core.compiler.problem.finalParameterBound=warning +-org.eclipse.jdt.core.compiler.problem.finallyBlockNotCompletingNormally=warning +-org.eclipse.jdt.core.compiler.problem.forbiddenReference=error +-org.eclipse.jdt.core.compiler.problem.hiddenCatchBlock=warning +-org.eclipse.jdt.core.compiler.problem.incompatibleNonInheritedInterfaceMethod=warning +-org.eclipse.jdt.core.compiler.problem.incompleteEnumSwitch=ignore +-org.eclipse.jdt.core.compiler.problem.indirectStaticAccess=ignore +-org.eclipse.jdt.core.compiler.problem.localVariableHiding=ignore +-org.eclipse.jdt.core.compiler.problem.methodWithConstructorName=warning +-org.eclipse.jdt.core.compiler.problem.missingDeprecatedAnnotation=ignore +-org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotation=ignore +-org.eclipse.jdt.core.compiler.problem.missingSerialVersion=warning +-org.eclipse.jdt.core.compiler.problem.noEffectAssignment=warning +-org.eclipse.jdt.core.compiler.problem.noImplicitStringConversion=warning +-org.eclipse.jdt.core.compiler.problem.nonExternalizedStringLiteral=warning +-org.eclipse.jdt.core.compiler.problem.nullReference=error +-org.eclipse.jdt.core.compiler.problem.overridingPackageDefaultMethod=warning +-org.eclipse.jdt.core.compiler.problem.parameterAssignment=ignore +-org.eclipse.jdt.core.compiler.problem.possibleAccidentalBooleanAssignment=ignore +-org.eclipse.jdt.core.compiler.problem.potentialNullReference=warning +-org.eclipse.jdt.core.compiler.problem.rawTypeReference=warning +-org.eclipse.jdt.core.compiler.problem.redundantNullCheck=ignore +-org.eclipse.jdt.core.compiler.problem.redundantSuperinterface=ignore +-org.eclipse.jdt.core.compiler.problem.specialParameterHidingField=disabled +-org.eclipse.jdt.core.compiler.problem.staticAccessReceiver=warning +-org.eclipse.jdt.core.compiler.problem.suppressWarnings=enabled +-org.eclipse.jdt.core.compiler.problem.syntheticAccessEmulation=ignore +-org.eclipse.jdt.core.compiler.problem.typeParameterHiding=warning +-org.eclipse.jdt.core.compiler.problem.uncheckedTypeOperation=warning +-org.eclipse.jdt.core.compiler.problem.undocumentedEmptyBlock=ignore +-org.eclipse.jdt.core.compiler.problem.unhandledWarningToken=warning +-org.eclipse.jdt.core.compiler.problem.unnecessaryElse=ignore +-org.eclipse.jdt.core.compiler.problem.unnecessaryTypeCheck=ignore +-org.eclipse.jdt.core.compiler.problem.unqualifiedFieldAccess=ignore +-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownException=ignore +-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionExemptExceptionAndThrowable=enabled +-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionIncludeDocCommentReference=enabled +-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionWhenOverriding=disabled +-org.eclipse.jdt.core.compiler.problem.unusedImport=warning +-org.eclipse.jdt.core.compiler.problem.unusedLabel=warning +-org.eclipse.jdt.core.compiler.problem.unusedLocal=warning +-org.eclipse.jdt.core.compiler.problem.unusedParameter=ignore +-org.eclipse.jdt.core.compiler.problem.unusedParameterIncludeDocCommentReference=enabled +-org.eclipse.jdt.core.compiler.problem.unusedParameterWhenImplementingAbstract=disabled +-org.eclipse.jdt.core.compiler.problem.unusedParameterWhenOverridingConcrete=disabled +-org.eclipse.jdt.core.compiler.problem.unusedPrivateMember=warning +-org.eclipse.jdt.core.compiler.problem.unusedWarningToken=warning +-org.eclipse.jdt.core.compiler.problem.varargsArgumentNeedCast=warning +-org.eclipse.jdt.core.compiler.source=1.5 +-org.eclipse.jdt.core.compiler.taskCaseSensitive=enabled +-org.eclipse.jdt.core.compiler.taskPriorities=NORMAL,HIGH,NORMAL +-org.eclipse.jdt.core.compiler.taskTags=TODO,FIXME,XXX +-org.eclipse.jdt.core.formatter.align_type_members_on_columns=false +-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression=16 +-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation=0 +-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant=16 +-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call=16 +-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation=16 +-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression=16 +-org.eclipse.jdt.core.formatter.alignment_for_assignment=0 +-org.eclipse.jdt.core.formatter.alignment_for_binary_expression=16 +-org.eclipse.jdt.core.formatter.alignment_for_compact_if=16 +-org.eclipse.jdt.core.formatter.alignment_for_conditional_expression=48 +-org.eclipse.jdt.core.formatter.alignment_for_enum_constants=0 +-org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer=16 +-org.eclipse.jdt.core.formatter.alignment_for_method_declaration=0 +-org.eclipse.jdt.core.formatter.alignment_for_multiple_fields=16 +-org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration=16 +-org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration=16 +-org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation=80 +-org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration=16 +-org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration=16 +-org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration=16 +-org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration=16 +-org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16 +-org.eclipse.jdt.core.formatter.blank_lines_after_imports=1 +-org.eclipse.jdt.core.formatter.blank_lines_after_package=1 +-org.eclipse.jdt.core.formatter.blank_lines_before_field=1 +-org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration=0 +-org.eclipse.jdt.core.formatter.blank_lines_before_imports=1 +-org.eclipse.jdt.core.formatter.blank_lines_before_member_type=1 +-org.eclipse.jdt.core.formatter.blank_lines_before_method=1 +-org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk=1 +-org.eclipse.jdt.core.formatter.blank_lines_before_package=0 +-org.eclipse.jdt.core.formatter.blank_lines_between_import_groups=1 +-org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations=1 +-org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration=end_of_line +-org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration=end_of_line +-org.eclipse.jdt.core.formatter.brace_position_for_array_initializer=end_of_line +-org.eclipse.jdt.core.formatter.brace_position_for_block=end_of_line +-org.eclipse.jdt.core.formatter.brace_position_for_block_in_case=end_of_line +-org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration=end_of_line +-org.eclipse.jdt.core.formatter.brace_position_for_enum_constant=end_of_line +-org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration=end_of_line +-org.eclipse.jdt.core.formatter.brace_position_for_method_declaration=end_of_line +-org.eclipse.jdt.core.formatter.brace_position_for_switch=end_of_line +-org.eclipse.jdt.core.formatter.brace_position_for_type_declaration=end_of_line +-org.eclipse.jdt.core.formatter.comment.clear_blank_lines=false +-org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment=false +-org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment=true +-org.eclipse.jdt.core.formatter.comment.format_block_comments=false +-org.eclipse.jdt.core.formatter.comment.format_comments=true +-org.eclipse.jdt.core.formatter.comment.format_header=false +-org.eclipse.jdt.core.formatter.comment.format_html=true +-org.eclipse.jdt.core.formatter.comment.format_javadoc_comments=true +-org.eclipse.jdt.core.formatter.comment.format_line_comments=false +-org.eclipse.jdt.core.formatter.comment.format_source_code=true +-org.eclipse.jdt.core.formatter.comment.indent_parameter_description=true +-org.eclipse.jdt.core.formatter.comment.indent_root_tags=true +-org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags=insert +-org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter=insert +-org.eclipse.jdt.core.formatter.comment.line_length=120 +-org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries=true +-org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries=true +-org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments=false +-org.eclipse.jdt.core.formatter.compact_else_if=true +-org.eclipse.jdt.core.formatter.continuation_indentation=2 +-org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer=2 +-org.eclipse.jdt.core.formatter.disabling_tag=@formatter\:off +-org.eclipse.jdt.core.formatter.enabling_tag=@formatter\:on +-org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line=false +-org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column=true +-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header=true +-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header=true +-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header=true +-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header=true +-org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases=true +-org.eclipse.jdt.core.formatter.indent_empty_lines=false +-org.eclipse.jdt.core.formatter.indent_statements_compare_to_block=true +-org.eclipse.jdt.core.formatter.indent_statements_compare_to_body=true +-org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases=true +-org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch=false +-org.eclipse.jdt.core.formatter.indentation.size=4 +-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation=insert +-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field=insert +-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable=insert +-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_member=insert +-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method=insert +-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package=insert +-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter=insert +-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type=insert +-org.eclipse.jdt.core.formatter.insert_new_line_after_label=do not insert +-org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert +-org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing=do not insert +-org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert +-org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert +-org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert +-org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement=do not insert +-org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert +-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration=insert +-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration=insert +-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block=insert +-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant=insert +-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration=insert +-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body=insert +-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration=insert +-org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter=insert +-org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator=insert +-org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation=do not insert +-org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration=do not insert +-org.eclipse.jdt.core.formatter.insert_space_after_binary_operator=insert +-org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments=insert +-org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters=insert +-org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block=insert +-org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast=insert +-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert=insert +-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case=insert +-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional=insert +-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for=insert +-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert +-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression=insert +-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation=insert +-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer=insert +-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters=insert +-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws=insert +-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments=insert +-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert +-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments=insert +-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments=insert +-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits=insert +-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert +-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert +-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert +-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations=insert +-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations=insert +-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference=insert +-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces=insert +-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments=insert +-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters=insert +-org.eclipse.jdt.core.formatter.insert_space_after_ellipsis=insert +-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference=do not insert +-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments=do not insert +-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters=do not insert +-org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert +-org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression=do not insert +-org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference=do not insert +-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation=do not insert +-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert +-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert +-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration=do not insert +-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant=do not insert +-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert +-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert +-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert +-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert +-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert +-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert +-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized=do not insert +-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert +-org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator=do not insert +-org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator=do not insert +-org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional=insert +-org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard=do not insert +-org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for=insert +-org.eclipse.jdt.core.formatter.insert_space_after_unary_operator=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter=insert +-org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator=insert +-org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration=insert +-org.eclipse.jdt.core.formatter.insert_space_before_binary_operator=insert +-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert +-org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert=insert +-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional=insert +-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for=insert +-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_ellipsis=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration=insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration=insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block=insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration=insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant=insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration=insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch=insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch=insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for=insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if=insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch=insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized=insert +-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while=insert +-org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return=insert +-org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw=insert +-org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional=insert +-org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_semicolon=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for=do not insert +-org.eclipse.jdt.core.formatter.insert_space_before_unary_operator=do not insert +-org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference=do not insert +-org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert +-org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression=do not insert +-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration=do not insert +-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration=do not insert +-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant=do not insert +-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert +-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert +-org.eclipse.jdt.core.formatter.join_lines_in_comments=true +-org.eclipse.jdt.core.formatter.join_wrapped_lines=true +-org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line=false +-org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line=false +-org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line=false +-org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line=false +-org.eclipse.jdt.core.formatter.lineSplit=120 +-org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column=true +-org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column=true +-org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body=0 +-org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve=1 +-org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line=true +-org.eclipse.jdt.core.formatter.tabulation.char=tab +-org.eclipse.jdt.core.formatter.tabulation.size=4 +-org.eclipse.jdt.core.formatter.use_on_off_tags=false +-org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations=false +-org.eclipse.jdt.core.formatter.wrap_before_binary_operator=true +-org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested=true +diff --git a/framework/org.eclipse.mylyn.reviews.feature/.settings/org.eclipse.jdt.ui.prefs b/framework/org.eclipse.mylyn.reviews.feature/.settings/org.eclipse.jdt.ui.prefs +deleted file mode 100644 +index e2834b27..00000000 +--- a/framework/org.eclipse.mylyn.reviews.feature/.settings/org.eclipse.jdt.ui.prefs ++++ /dev/null +@@ -1,63 +0,0 @@ +-#Wed Mar 02 16:00:04 PST 2011 +-cleanup_settings_version=2 +-eclipse.preferences.version=1 +-editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true +-formatter_profile=_Mylyn based on Eclipse +-formatter_settings_version=12 +-internal.default.compliance=default +-org.eclipse.jdt.ui.exception.name=e +-org.eclipse.jdt.ui.gettersetter.use.is=true +-org.eclipse.jdt.ui.javadoc=false +-org.eclipse.jdt.ui.keywordthis=false +-org.eclipse.jdt.ui.overrideannotation=true +-org.eclipse.jdt.ui.text.custom_code_templates=